import gradio as gr import requests from transformers import pipeline # Model: Mistral-7B-Instruct-v0.2 (instruct-savvy) model_name = "mistralai/Mistral-7B-Instruct-v0.2" chat = pipeline("text-generation", model=model_name, torch_dtype="auto", device_map="auto", trust_remote_code=True, max_new_tokens=512) # AlpDroid promptunu GitHub'dan çek prompt_url = "https://raw.githubusercontent.com/ALPERALL/AlpDroid/main/prompt.txt" system_prompt = requests.get(prompt_url).text def alp_droid_chat(user_input): full_prompt = system_prompt + "\n\nKullanıcı: " + user_input + "\nAlpDroid:" output = chat(full_prompt, temperature=0.7, top_p=0.9)[0]["generated_text"] return output.split("AlpDroid:")[-1].strip() iface = gr.Interface(fn=alp_droid_chat, inputs=gr.Textbox(lines=4, placeholder="Sorunu yaz..."), outputs="text", title="AlpDroid (Mistral‑7B‑Instruct‑v0.2)", description="Mistral‑7B‑Instruct‑v0.2 tabanlı AlpDroid asistanı.") iface.launch()