Hotfix for not saving correctly (#762)
Browse files
src/axolotl/monkeypatch/fused_modules.py
DELETED
|
File without changes
|
src/axolotl/monkeypatch/llama_attn_hijack_flash.py
CHANGED
|
@@ -152,6 +152,7 @@ class FusedAttention(LlamaAttention):
|
|
| 152 |
new_attn.q_proj.weight.data = q_proj
|
| 153 |
new_attn.k_proj.weight.data = k_proj
|
| 154 |
new_attn.v_proj.weight.data = v_proj
|
|
|
|
| 155 |
|
| 156 |
set_module_name(model, name, new_attn)
|
| 157 |
|
|
|
|
| 152 |
new_attn.q_proj.weight.data = q_proj
|
| 153 |
new_attn.k_proj.weight.data = k_proj
|
| 154 |
new_attn.v_proj.weight.data = v_proj
|
| 155 |
+
new_attn.o_proj.weight.data = self.o_proj.weight.data
|
| 156 |
|
| 157 |
set_module_name(model, name, new_attn)
|
| 158 |
|