Skip to content

Commit 0541c7a

Browse files
committed
Fix relu2 blockscale after rebase
Signed-off-by: Neta Zmora <[email protected]>
1 parent e84963f commit 0541c7a

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

tests/unittest/_torch/auto_deploy/unit/singlegpu/custom_ops/test_trtllm_moe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -706,7 +706,7 @@ def round_up(x, y):
706706
elif mlp_style == "mlp":
707707
# For non-gated MLP with ReLU^2
708708
fc1_expert_weights_fp4 = w1_q_fp4
709-
fc1_weight_blockscale_fp8 = torch.cat([w3_blockscale, w1_blockscale], dim=1)
709+
fc1_weight_blockscale_fp8 = w1_blockscale.view(torch.long)
710710
fc1_weight_gs = w1_gs
711711
if activation_func != "relu2":
712712
raise ValueError(f"Unsupported activation '{activation_func}' for mlp. Use 'relu2'.")

0 commit comments

Comments
 (0)