Skip to content

Commit 61e879e

Browse files
author
unknown
committed
add fc1 for fused moe
1 parent 662f981 commit 61e879e

File tree

1 file changed

+5
-0
lines changed

1 file changed

+5
-0
lines changed

vllm_ascend/ops/common_fused_moe.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -331,6 +331,11 @@ def forward_impl(self, hidden_states: torch.Tensor,
331331
moe_comm_method_name = forward_context.moe_comm_method_name
332332

333333
forward_context.moe_comm_method = getattr(self, moe_comm_method_name)
334+
flashcomm_v1_enabled = forward_context.flashcomm_v1_enabled
335+
if flashcomm_v1_enabled:
336+
router_logits = torch.ops.vllm.maybe_all_gather_and_maybe_unpad(router_logits, True)
337+
if router_logits.shape[0] != hidden_states.shape[0]:
338+
hidden_states = torch.ops.vllm.maybe_all_gather_and_maybe_unpad(hidden_states, True)
334339

335340
hidden_states, router_logits = forward_context.moe_comm_method.prepare(
336341
hidden_states=hidden_states, router_logits=router_logits)

0 commit comments

Comments
 (0)