Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -92,8 +92,8 @@ def load_model(model_path, progress=gr.Progress()):
|
|
| 92 |
|
| 93 |
try:
|
| 94 |
progress(0.3, desc="Loading tokenizer...")
|
| 95 |
-
config = AutoConfig.from_pretrained(model_path, trust_remote_code=True)
|
| 96 |
-
current_tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False,trust_remote_code=True)
|
| 97 |
|
| 98 |
progress(0.5, desc="Loading model...")
|
| 99 |
current_model = AutoModelForCausalLM.from_pretrained(
|
|
@@ -101,7 +101,7 @@ def load_model(model_path, progress=gr.Progress()):
|
|
| 101 |
device_map="auto",
|
| 102 |
torch_dtype=torch.float16,
|
| 103 |
config=config,
|
| 104 |
-
trust_remote_code=True
|
| 105 |
)
|
| 106 |
|
| 107 |
current_model_path = model_path
|
|
|
|
| 92 |
|
| 93 |
try:
|
| 94 |
progress(0.3, desc="Loading tokenizer...")
|
| 95 |
+
config = AutoConfig.from_pretrained(model_path, trust_remote_code=True,force_download=True)
|
| 96 |
+
current_tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False,trust_remote_code=True,force_download=True)
|
| 97 |
|
| 98 |
progress(0.5, desc="Loading model...")
|
| 99 |
current_model = AutoModelForCausalLM.from_pretrained(
|
|
|
|
| 101 |
device_map="auto",
|
| 102 |
torch_dtype=torch.float16,
|
| 103 |
config=config,
|
| 104 |
+
trust_remote_code=True,force_download=True
|
| 105 |
)
|
| 106 |
|
| 107 |
current_model_path = model_path
|