File size: 2,439 Bytes
6734e84 d0e6741 6734e84 0a5b12b d0e6741 6734e84 0a5b12b 6734e84 0a5b12b 6734e84 d0e6741 6734e84 cc493ed 6734e84 d0e6741 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
import os
import threading
import uvicorn
from fastapi import FastAPI
from fastapi.responses import HTMLResponse
from pydantic import BaseModel
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from datasets import load_dataset
from peft import PeftModel
import torch
from huggingface_hub import hf_hub_download
import zipfile
from datetime import datetime
# ✅ Zamanlı log fonksiyonu (flush destekli)
def log(message):
timestamp = datetime.now().strftime("%H:%M:%S")
print(f"[{timestamp}] {message}")
os.sys.stdout.flush()
# ✅ Sabitler
HF_TOKEN = os.environ.get("HF_TOKEN")
MODEL_BASE = "UcsTurkey/kanarya-750m-fixed"
FINE_TUNE_ZIP = "trained_model_000_100.zip"
FINE_TUNE_REPO = "UcsTurkey/trained-zips"
RAG_DATA_FILE = "merged_dataset_000_100.parquet"
RAG_DATA_REPO = "UcsTurkey/turkish-general-culture-tokenized"
app = FastAPI()
chat_history = []
pipe = None # global text-generation pipeline
class Message(BaseModel):
user_input: str
@app.get("/", response_class=HTMLResponse)
def root():
return """
<html>
<head><title>Fine-Tune Chat</title></head>
<body>
<h2>📘 Fine-tune Chat Test</h2>
<textarea id="input" rows="4" cols="60" placeholder="Bir şeyler yaz..."></textarea><br><br>
<button onclick="send()">Gönder</button>
<pre id="output"></pre>
<script>
async function send() {
const input = document.getElementById("input").value;
const res = await fetch("/chat", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ user_input: input })
});
const data = await res.json();
document.getElementById("output").innerText = data.answer || data.error || "Hata oluştu.";
}
</script>
</body>
</html>
"""
@app.post("/chat")
def chat(msg: Message):
try:
global pipe
if pipe is None:
log("🚫 Hata: Model henüz yüklenmedi.")
return {"error": "Model yüklenmedi. Lütfen birkaç saniye sonra tekrar deneyin."}
user_input = msg.user_input.strip()
if not user_input:
return {"error": "Boş giriş"}
full_prompt = ""
for turn in chat_history:
full_prompt += f"Kullanıcı: {turn['user']}\nAsistan: {turn['bot]()_
|