File size: 1,229 Bytes
4639901
9189277
1dbcaa4
 
 
 
 
 
6e1c786
 
1dbcaa4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6e1c786
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import gradio as gr
import requests
from transformers import AutoTokenizer, AutoModelForCausalLM, TextGenerationPipeline

# AlpDroid promptunu GitHub'dan çek
alp_prompt_url = "https://raw.githubusercontent.com/ALPERALL/AlpDroid/main/prompt.txt"
alp_prompt = requests.get(alp_prompt_url).text.strip()

# Yeni model (tamamen açık kaynak)
model_id = "tiiuae/falcon-7b-instruct"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", torch_dtype="auto")

# Pipeline oluştur
pipeline = TextGenerationPipeline(model=model, tokenizer=tokenizer)

# Gradio fonksiyonu
def chat(prompt):
    full_prompt = f"{alp_prompt}\n\nKullanıcı: {prompt}\nAlpDroid:"
    result = pipeline(full_prompt, max_new_tokens=200, temperature=0.8, top_p=0.9)[0]["generated_text"]
    response = result.split("AlpDroid:")[-1].strip()
    return response

# Gradio arayüzü
gr.Interface(fn=chat,
             inputs=gr.Textbox(label="Mesajını yaz, AlpDroid cevaplasın"),
             outputs=gr.Textbox(label="AlpDroid"),
             title="🧠 AlpDroid (Falcon 7B)",
             description="AlpDroid şu anda Falcon 7B ile çalışıyor. Özgür, eğlenceli ve asi.").launch()