fine-tune-inference-test / fine_tune_inference_test.py
ciyidogan's picture
Update fine_tune_inference_test.py
d0e6741 verified
raw
history blame
2.44 kB
import os
import threading
import uvicorn
from fastapi import FastAPI
from fastapi.responses import HTMLResponse
from pydantic import BaseModel
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from datasets import load_dataset
from peft import PeftModel
import torch
from huggingface_hub import hf_hub_download
import zipfile
from datetime import datetime
# ✅ Zamanlı log fonksiyonu (flush destekli)
def log(message):
timestamp = datetime.now().strftime("%H:%M:%S")
print(f"[{timestamp}] {message}")
os.sys.stdout.flush()
# ✅ Sabitler
HF_TOKEN = os.environ.get("HF_TOKEN")
MODEL_BASE = "UcsTurkey/kanarya-750m-fixed"
FINE_TUNE_ZIP = "trained_model_000_100.zip"
FINE_TUNE_REPO = "UcsTurkey/trained-zips"
RAG_DATA_FILE = "merged_dataset_000_100.parquet"
RAG_DATA_REPO = "UcsTurkey/turkish-general-culture-tokenized"
app = FastAPI()
chat_history = []
pipe = None # global text-generation pipeline
class Message(BaseModel):
user_input: str
@app.get("/", response_class=HTMLResponse)
def root():
return """
<html>
<head><title>Fine-Tune Chat</title></head>
<body>
<h2>📘 Fine-tune Chat Test</h2>
<textarea id="input" rows="4" cols="60" placeholder="Bir şeyler yaz..."></textarea><br><br>
<button onclick="send()">Gönder</button>
<pre id="output"></pre>
<script>
async function send() {
const input = document.getElementById("input").value;
const res = await fetch("/chat", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ user_input: input })
});
const data = await res.json();
document.getElementById("output").innerText = data.answer || data.error || "Hata oluştu.";
}
</script>
</body>
</html>
"""
@app.post("/chat")
def chat(msg: Message):
try:
global pipe
if pipe is None:
log("🚫 Hata: Model henüz yüklenmedi.")
return {"error": "Model yüklenmedi. Lütfen birkaç saniye sonra tekrar deneyin."}
user_input = msg.user_input.strip()
if not user_input:
return {"error": "Boş giriş"}
full_prompt = ""
for turn in chat_history:
full_prompt += f"Kullanıcı: {turn['user']}\nAsistan: {turn['bot]()_