Spaces:
Runtime error
Runtime error
zetavg
commited on
fix
Browse files
llama_lora/ui/inference_ui.py
CHANGED
|
@@ -53,9 +53,8 @@ def do_inference(
|
|
| 53 |
yield message, '[0]'
|
| 54 |
return
|
| 55 |
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
else:
|
| 59 |
model = get_model_with_lora(lora_model_name)
|
| 60 |
tokenizer = get_tokenizer()
|
| 61 |
|
|
|
|
| 53 |
yield message, '[0]'
|
| 54 |
return
|
| 55 |
|
| 56 |
+
model = get_base_model()
|
| 57 |
+
if not lora_model_name == "None":
|
|
|
|
| 58 |
model = get_model_with_lora(lora_model_name)
|
| 59 |
tokenizer = get_tokenizer()
|
| 60 |
|