Skip to content

Commit 39b7067

Browse files
Gossityzhaotianyi
authored andcommitted
fix: fix rope_forward.
1 parent ff5558e commit 39b7067

File tree

2 files changed

+4
-1
lines changed

2 files changed

+4
-1
lines changed

xllm/core/layers/common/fused_moe.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,7 @@ torch::Tensor FusedMoEImpl::forward_expert(
110110
fused_moe_params.gating_output = router_logits;
111111
fused_moe_params.w1 = w13_;
112112
fused_moe_params.w2 = w2_;
113+
fused_moe_params.residual = shared_output;
113114
fused_moe_params.e_score_correction_bias = e_score_correction_bias;
114115
fused_moe_params.topk = topk_;
115116
fused_moe_params.renormalize = renormalize_;

xllm/core/layers/common/rotary_embedding.cpp

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,10 @@ void RotaryEmbeddingImpl::forward(torch::Tensor& q,
9090
rotary_params.interleaved = interleaved_;
9191
rotary_params.discrete = discrete;
9292
rotary_params.max_query_len = max_query_len;
93-
9493
xllm::kernel::apply_rotary(rotary_params);
94+
95+
q = rotary_params.q;
96+
k = rotary_params.k;
9597
}
9698

9799
} // namespace layer

0 commit comments

Comments
 (0)