Skip to content

Commit f40de2d

Browse files
committed
change paddle_throw to std::runtime_err
1 parent efd0c8b commit f40de2d

File tree

1 file changed

+34
-32
lines changed

1 file changed

+34
-32
lines changed

csrc/gpu/moe/fused_moe/cutlass_kernels/moe_gemm/fused_moe_gemm_kernels_template.h

Lines changed: 34 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,8 @@ void generic_moe_gemm_kernelLauncher(const T* A,
6969
cudaStream_t stream,
7070
int* kernel_occupancy = nullptr) {
7171
if (gemm_config.split_k_style != SplitKStyle::NO_SPLIT_K) {
72-
PADDLE_THROW(
73-
phi::errors::Fatal("[MoeGemm] Grouped gemm does not support split-k"));
72+
throw std::runtime_error(
73+
"[MoeGemm] Grouped gemm does not support split-k");
7474
}
7575

7676
#ifdef PADDLE_CUDA_BF16
@@ -170,9 +170,9 @@ void generic_moe_gemm_kernelLauncher(const T* A,
170170
int occupancy = std::min(2, GemmGrouped::maximum_active_blocks());
171171

172172
if (occupancy == 0) {
173-
PADDLE_THROW(phi::errors::Fatal(
173+
throw std::runtime_error(
174174
"[MoE Runner] GPU lacks the shared memory resources to run "
175-
"GroupedGEMM kernel"));
175+
"GroupedGEMM kernel");
176176
}
177177
const int threadblock_count = multi_processor_count * occupancy;
178178

@@ -198,7 +198,7 @@ void generic_moe_gemm_kernelLauncher(const T* A,
198198
if (can_implement != cutlass::Status::kSuccess) {
199199
std::string err_msg = "MoEFC kernel will fail for params. Error: " +
200200
std::string(cutlassGetStatusString(can_implement));
201-
PADDLE_THROW(phi::errors::Fatal("[MoE Runner] " + err_msg));
201+
throw std::runtime_error("[MoE Runner] " + err_msg);
202202
}
203203

204204
auto init_status = gemm.initialize(args);
@@ -244,7 +244,7 @@ struct dispatch_stages {
244244
std::string err_msg = "Cutlass fpA_intB gemm. Not instantiates for arch " +
245245
std::to_string(arch::kMinComputeCapability) +
246246
" with stages set to " + std::to_string(Stages);
247-
PADDLE_THROW(phi::errors::Fatal("[dispatch_stages::dispatch] " + err_msg));
247+
throw std::runtime_error("[dispatch_stages::dispatch] " + err_msg);
248248
}
249249
};
250250

@@ -395,8 +395,8 @@ void dispatch_gemm_config(const T* A,
395395
default:
396396
std::string err_msg = "dispatch_gemm_config does not support stages " +
397397
std::to_string(gemm_config.stages);
398-
PADDLE_THROW(
399-
phi::errors::Fatal("[MoE][dispatch_gemm_config] " + err_msg));
398+
throw std::runtime_error(
399+
"[MoE][dispatch_gemm_config] " + err_msg);
400400
break;
401401
}
402402
}
@@ -454,18 +454,18 @@ void dispatch_moe_gemm_to_cutlass(const T* A,
454454
dispatch_gemm_config_macro(64, 128, 64, 32, 64, 64);
455455
dispatch_gemm_config_macro(128, 128, 64, 64, 32, 64);
456456
case CutlassTileConfig::Undefined:
457-
PADDLE_THROW(common::errors::InvalidArgument(
458-
"[dispatch_moe_gemm_to_cutlass] gemm config undefined."));
457+
throw std::runtime_error(
458+
"[dispatch_moe_gemm_to_cutlass] gemm config undefined.");
459459
break;
460460
case CutlassTileConfig::ChooseWithHeuristic:
461-
PADDLE_THROW(common::errors::InvalidArgument(
461+
throw std::runtime_error(
462462
"[dispatch_moe_gemm_to_cutlass] gemm config should have "
463-
"already been set by heuristic."));
463+
"already been set by heuristic.");
464464
break;
465465
default:
466-
PADDLE_THROW(common::errors::InvalidArgument(
466+
throw std::runtime_error(
467467
"[dispatch_moe_gemm_to_cutlass] Config is invalid for same "
468-
"type MoE tensorop GEMM."));
468+
"type MoE tensorop GEMM.");
469469
break;
470470
}
471471
}
@@ -500,18 +500,18 @@ void dispatch_moe_gemm_to_cutlass(const T* A,
500500
dispatch_gemm_config_macro(32, 128, 64, 32, 32, 64);
501501
dispatch_gemm_config_macro(64, 128, 64, 64, 64, 64);
502502
case CutlassTileConfig::Undefined:
503-
PADDLE_THROW(common::errors::InvalidArgument(
504-
"[dispatch_moe_gemm_to_cutlass] gemm config undefined."));
503+
throw std::runtime_error(
504+
"[dispatch_moe_gemm_to_cutlass] gemm config undefined.");
505505
break;
506506
case CutlassTileConfig::ChooseWithHeuristic:
507-
PADDLE_THROW(common::errors::InvalidArgument(
507+
throw std::runtime_error(
508508
"[dispatch_moe_gemm_to_cutlass] gemm config should have "
509-
"already been set by heuristic."));
509+
"already been set by heuristic.");
510510
break;
511511
default:
512-
PADDLE_THROW(common::errors::InvalidArgument(
512+
throw std::runtime_error(
513513
"[dispatch_moe_gemm_to_cutlass] Config is invalid for "
514-
"mixed type tensorop GEMM."));
514+
"mixed type tensorop GEMM.");
515515
break;
516516
}
517517
} else {
@@ -526,18 +526,20 @@ void dispatch_moe_gemm_to_cutlass(const T* A,
526526
dispatch_gemm_config_macro(128, 128, 64, 128, 32, 64);
527527
dispatch_gemm_config_macro(128, 256, 64, 64, 64, 64);
528528
dispatch_gemm_config_macro(64, 128, 64, 64, 32, 64);
529+
dispatch_gemm_config_macro(256, 128, 64, 64, 64, 64);
529530
case CutlassTileConfig::Undefined:
530-
PADDLE_THROW(common::errors::InvalidArgument(
531-
"[dispatch_moe_gemm_to_cutlass] gemm config undefined."));
531+
throw std::runtime_error(
532+
"[dispatch_moe_gemm_to_cutlass] gemm config undefined.");
532533
break;
533534
case CutlassTileConfig::ChooseWithHeuristic:
534-
PADDLE_THROW(common::errors::InvalidArgument(
535+
throw std::runtime_error(
535536
"[dispatch_moe_gemm_to_cutlass] gemm config should have "
536-
"already been set by heuristic."));
537+
"already been set by heuristic.");
537538
break;
538539
default:
539-
PADDLE_THROW(common::errors::InvalidArgument(
540-
"[dispatch_moe_gemm_to_cutlass] gemm config undefined."));
540+
throw std::runtime_error(
541+
"[dispatch_moe_gemm_to_cutlass] Config is invalid for "
542+
"mixed type tensorop GEMM.");
541543
break;
542544
}
543545
}
@@ -568,19 +570,19 @@ void dispatch_moe_gemm_to_cutlass(const T* A,
568570
switch (gemm_config.tile_config) {
569571
dispatch_gemm_config_macro(128, 128, 8, 64, 64, 8);
570572
case CutlassTileConfig::Undefined:
571-
PADDLE_THROW(common::errors::InvalidArgument(
573+
throw std::runtime_error(
572574
"[dispatch_moe_gemm_to_cutlass][SIMT] gemm config "
573-
"undefined."));
575+
"undefined.");
574576
break;
575577
case CutlassTileConfig::ChooseWithHeuristic:
576-
PADDLE_THROW(common::errors::InvalidArgument(
578+
throw std::runtime_error(
577579
"[dispatch_moe_gemm_to_cutlass][SIMT] gemm config should "
578-
"have already been set by heuristic."));
580+
"have already been set by heuristic.");
579581
break;
580582
default:
581-
PADDLE_THROW(common::errors::InvalidArgument(
583+
throw std::runtime_error(
582584
"[dispatch_moe_gemm_to_cutlass][SIMT] Unsupported config "
583-
"for float MoE gemm."));
585+
"for float MoE gemm.");
584586
break;
585587
}
586588
}

0 commit comments

Comments
 (0)