Fix model bug.

This commit is contained in:
Colin 2024-03-20 22:23:52 +08:00
parent 72718e6b72
commit b248d1d890
1 changed files with 1 additions and 2 deletions

View File

@ -305,8 +305,7 @@ class QwenRunner:
layernorm_output = block.ln_1(hidden_states) layernorm_output = block.ln_1(hidden_states)
attn_outputs = self.forwardAttention(block.attn, layernorm_output, rotary_pos_emb_list) attn_outputs = self.forwardAttention(block.attn, layernorm_output, rotary_pos_emb_list)
attn_output = attn_outputs[0] layernorm_input = attn_outputs + hidden_states
layernorm_input = attn_output + hidden_states
layernorm_output = block.ln_2(layernorm_input) layernorm_output = block.ln_2(layernorm_input)
a1 = block.mlp.w1(layernorm_output) a1 = block.mlp.w1(layernorm_output)