Update fine_tune_inference_test_mistral.py
Browse files
fine_tune_inference_test_mistral.py
CHANGED
@@ -14,6 +14,7 @@ MODEL_BASE = "mistralai/Mistral-7B-Instruct-v0.2"
|
|
14 |
FINE_TUNE_ZIP = "trained_model_000_009.zip"
|
15 |
FINE_TUNE_REPO = "UcsTurkey/trained-zips"
|
16 |
USE_SAMPLING = False
|
|
|
17 |
CONFIDENCE_THRESHOLD = -1.5
|
18 |
FALLBACK_ANSWERS = [
|
19 |
"Bu konuda maalesef bilgim yok.",
|
@@ -116,27 +117,33 @@ def detect_env():
|
|
116 |
def setup_model():
|
117 |
global model, tokenizer
|
118 |
try:
|
119 |
-
log("📦 Zip indiriliyor...")
|
120 |
-
zip_path = hf_hub_download(
|
121 |
-
repo_id=FINE_TUNE_REPO,
|
122 |
-
filename=FINE_TUNE_ZIP,
|
123 |
-
repo_type="model",
|
124 |
-
token=HF_TOKEN
|
125 |
-
)
|
126 |
extract_path = "/app/extracted"
|
127 |
-
os.makedirs(extract_path, exist_ok=True)
|
128 |
-
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
129 |
-
zip_ref.extractall(extract_path)
|
130 |
-
tokenizer = AutoTokenizer.from_pretrained(os.path.join(extract_path, "output"))
|
131 |
-
if tokenizer.pad_token is None:
|
132 |
-
tokenizer.pad_token = tokenizer.eos_token
|
133 |
-
|
134 |
device, supports_bf16 = detect_env()
|
135 |
dtype = torch.bfloat16 if supports_bf16 else torch.float32
|
136 |
log(f"🧠 Ortam: {device.upper()}, dtype: {dtype}")
|
137 |
-
|
138 |
-
|
139 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
model.eval()
|
141 |
log("✅ Model yüklendi.")
|
142 |
except Exception as e:
|
|
|
14 |
FINE_TUNE_ZIP = "trained_model_000_009.zip"
|
15 |
FINE_TUNE_REPO = "UcsTurkey/trained-zips"
|
16 |
USE_SAMPLING = False
|
17 |
+
USE_FINE_TUNE = False # ✅ Ana model ile test için False yap
|
18 |
CONFIDENCE_THRESHOLD = -1.5
|
19 |
FALLBACK_ANSWERS = [
|
20 |
"Bu konuda maalesef bilgim yok.",
|
|
|
117 |
def setup_model():
|
118 |
global model, tokenizer
|
119 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
extract_path = "/app/extracted"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
device, supports_bf16 = detect_env()
|
122 |
dtype = torch.bfloat16 if supports_bf16 else torch.float32
|
123 |
log(f"🧠 Ortam: {device.upper()}, dtype: {dtype}")
|
124 |
+
|
125 |
+
if USE_FINE_TUNE:
|
126 |
+
log("📦 Zip indiriliyor...")
|
127 |
+
zip_path = hf_hub_download(
|
128 |
+
repo_id=FINE_TUNE_REPO,
|
129 |
+
filename=FINE_TUNE_ZIP,
|
130 |
+
repo_type="model",
|
131 |
+
token=HF_TOKEN
|
132 |
+
)
|
133 |
+
os.makedirs(extract_path, exist_ok=True)
|
134 |
+
with zipfile.ZipFile(zip_path, "r") as zip_ref:
|
135 |
+
zip_ref.extractall(extract_path)
|
136 |
+
tokenizer = AutoTokenizer.from_pretrained(os.path.join(extract_path, "output"))
|
137 |
+
if tokenizer.pad_token is None:
|
138 |
+
tokenizer.pad_token = tokenizer.eos_token
|
139 |
+
base = AutoModelForCausalLM.from_pretrained(MODEL_BASE, torch_dtype=dtype).to(device)
|
140 |
+
peft = PeftModel.from_pretrained(base, os.path.join(extract_path, "output"))
|
141 |
+
model = peft.model.to(device)
|
142 |
+
else:
|
143 |
+
log("🧪 Sadece ana model yüklenecek.")
|
144 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_BASE)
|
145 |
+
model = AutoModelForCausalLM.from_pretrained(MODEL_BASE, torch_dtype=dtype).to(device)
|
146 |
+
|
147 |
model.eval()
|
148 |
log("✅ Model yüklendi.")
|
149 |
except Exception as e:
|