Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -10,13 +10,16 @@ from langchain.memory import ConversationBufferMemory
|
|
10 |
# Configuraci贸n del modelo de lenguaje
|
11 |
MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3"
|
12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
13 |
|
14 |
print("馃攧 Cargando modelo de lenguaje...")
|
15 |
-
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME
|
|
|
16 |
model = AutoModelForCausalLM.from_pretrained(
|
17 |
MODEL_NAME,
|
18 |
torch_dtype=torch.float16 if device == "cuda" else torch.float32,
|
19 |
-
device_map="auto"
|
|
|
20 |
).to(device)
|
21 |
|
22 |
# Memoria conversacional
|
|
|
10 |
# Configuraci贸n del modelo de lenguaje
|
11 |
MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3"
|
12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
13 |
+
HF_TOKEN = os.getenv("HF_TOKEN") # Obtiene el token de la variable de entorno
|
14 |
|
15 |
print("馃攧 Cargando modelo de lenguaje...")
|
16 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME,
|
17 |
+
use_auth_token=HF_TOKEN )
|
18 |
model = AutoModelForCausalLM.from_pretrained(
|
19 |
MODEL_NAME,
|
20 |
torch_dtype=torch.float16 if device == "cuda" else torch.float32,
|
21 |
+
device_map="auto",
|
22 |
+
use_auth_token=HF_TOKEN
|
23 |
).to(device)
|
24 |
|
25 |
# Memoria conversacional
|