alpdroidchat / app.py
alperall's picture
Update app.py
c6b2106 verified
raw
history blame
1.05 kB
import gradio as gr
import requests
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
import torch
model_name = "Open-Orca/Mistral-7B-OpenOrca"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto", torch_dtype=torch.float16)
chat = pipeline("text-generation", model=model, tokenizer=tokenizer, device_map="auto")
prompt_url = "https://raw.githubusercontent.com/ALPERALL/AlpDroid/main/prompt.txt"
system_prompt = requests.get(prompt_url).text
def alp_droid_chat(user_input):
full_prompt = f"{system_prompt}\n\nKullanıcı: {user_input}\nAlpDroid:"
output = chat(full_prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_p=0.9)[0]["generated_text"]
return output.split("AlpDroid:")[-1].strip()
app = gr.Interface(
fn=alp_droid_chat,
inputs=gr.Textbox(lines=4, placeholder="Sorunu yaz..."),
outputs="text",
title="AlpDroid - OpenOrca Mistral 7B",
description="Kolay deploy, zahmetsiz AlpDroid."
)
app.launch()