Spaces:
Running
Running
Update llm_model.py
Browse files- llm_model.py +4 -6
llm_model.py
CHANGED
@@ -3,16 +3,14 @@ from transformers import AutoTokenizer, AutoModelForCausalLM, AutoModelForSequen
|
|
3 |
from log import log
|
4 |
from pydantic import BaseModel
|
5 |
|
6 |
-
global model, tokenizer, eos_token_id
|
7 |
-
|
8 |
-
model = None
|
9 |
-
tokenizer = None
|
10 |
-
eos_token_id = None
|
11 |
-
|
12 |
class Message(BaseModel):
|
13 |
user_input: str
|
14 |
|
15 |
def setup_model(s_config):
|
|
|
|
|
|
|
|
|
16 |
try:
|
17 |
log("🧠 setup_model() başladı")
|
18 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
3 |
from log import log
|
4 |
from pydantic import BaseModel
|
5 |
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
class Message(BaseModel):
|
7 |
user_input: str
|
8 |
|
9 |
def setup_model(s_config):
|
10 |
+
global model, tokenizer, eos_token_id
|
11 |
+
model = None
|
12 |
+
tokenizer = None
|
13 |
+
eos_token_id = None
|
14 |
try:
|
15 |
log("🧠 setup_model() başladı")
|
16 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|