File size: 3,632 Bytes
4f5b2da 9cf6111 4f5b2da 44df078 fc0c78d 4f5b2da 44df078 fc0c78d 4f5b2da fc0c78d 4f5b2da fc0c78d db6d3b4 fc0c78d 4f5b2da fc0c78d 4f5b2da fc0c78d 4f5b2da fc0c78d 4f5b2da fc0c78d 4f5b2da fc0c78d 4f5b2da fc0c78d 56c42d2 fc0c78d 56c42d2 fc0c78d 56c42d2 fc0c78d 56c42d2 fc0c78d 44df078 db6d3b4 fc0c78d 44df078 4f5b2da |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
from huggingface_hub import InferenceClient
import gradio as gr
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.3")
# Prompt formatlayıcı
def format_prompt(message, history):
prompt = "<s>"
for user_prompt, bot_response in history:
prompt += f"[INST] {user_prompt} [/INST]{bot_response}</s>"
prompt += f"[INST] {message} [/INST]"
return prompt
# Modelden cevap üret
def generate(message, history, file, temperature, max_new_tokens, top_p, repetition_penalty):
if file:
try:
file_content = file.read().decode("utf-8")
message += f"\n\n📎 Dosya içeriği:\n{file_content}"
except Exception as e:
message += f"\n\n📎 (Dosya okunamadı: {str(e)})"
prompt = format_prompt(message, history)
stream = client.text_generation(
prompt,
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
stream=True,
details=True,
return_full_text=False
)
output = ""
for response in stream:
output += response.token.text
yield output
# Kullanıcı onayı kontrolü
def start_chat(approved):
if approved:
return gr.update(visible=False), gr.update(visible=True)
else:
raise gr.Error("Kutucuğu işaretlemeden devam edemezsin Patron!")
# Arayüz
with gr.Blocks(theme="Nymbo/Alyx_Theme") as app:
# Giriş ekranı
with gr.Group(visible=True) as intro:
gr.Markdown("""
## 📜 Kullanım Şartları ve Sorumluluk Reddi
Bu uygulama deneysel bir yapay zekâ içerir.
Tıbbi, hukuki, etik veya finansal tavsiye vermez.
Tüm sorumluluk kullanıcıya aittir.
Devam etmek için kutucuğu işaretleyin.
""")
agree = gr.Checkbox(label="✅ Okudum, Onaylıyorum")
start_btn = gr.Button("🚀 Başla")
# Chat ekranı
with gr.Group(visible=False) as chat:
chatbot = gr.Chatbot(label="🧠 AlpDroid", show_label=False)
with gr.Row():
user_input = gr.Textbox(placeholder="Mesajını yaz...", scale=4)
file_upload = gr.File(label="📎", file_types=[".txt", ".csv", ".md", ".json"], scale=1)
send_btn = gr.Button("Gönder")
# Ayarlar
with gr.Accordion("Ayarlar", open=False):
temperature = gr.Slider(0.1, 1.0, value=0.9, label="🔥 Temperature")
max_new_tokens = gr.Slider(64, 1024, value=256, step=64, label="🧠 Max New Tokens")
top_p = gr.Slider(0.1, 1.0, value=0.95, label="🎯 Top-p")
repetition_penalty = gr.Slider(1.0, 2.0, value=1.2, label="🔁 Repetition Penalty")
# Geçmiş
state = gr.State([])
def user_submit(msg, history):
history = history + [[msg, None]]
return "", history
def bot_response(history, file, temperature, max_new_tokens, top_p, repetition_penalty):
message = history[-1][0]
response = generate(message, history[:-1], file, temperature, max_new_tokens, top_p, repetition_penalty)
return chatbot.stream(response, history, message_index=-1)
send_btn.click(fn=user_submit, inputs=[user_input, state], outputs=[user_input, state])
send_btn.click(fn=bot_response, inputs=[state, file_upload, temperature, max_new_tokens, top_p, repetition_penalty], outputs=[chatbot, state])
# Giriş ekranından chate geçiş
start_btn.click(fn=start_chat, inputs=[agree], outputs=[intro, chat])
app.launch() |