TinyChat / app.py
theotherdylan's picture
modify
7962d2c
raw
history blame
1.14 kB
from transformers import pipeline
import gradio
history = []
def get_history_messages():
messages = []
for user, assist in history:
messages.append({"role": "user", "content": user})
messages.append({"role": "assistant", "content": assist})
return messages
def predict(prompt):
pipe = pipeline("text-generation", model="cognitivecomputations/TinyDolphin-2.8-1.1b")
response = pipe.generate(
[
*get_history_messages(),
{"role": "user", "content": prompt}
],
)
history.append((prompt, ""))
message = ""
for chunk in response:
message += chunk["message"]["content"]
history[-1] = (prompt, message)
yield "", history
with gradio.Blocks(fill_height=True) as demo:
chat = gradio.Chatbot(scale=1)
with gradio.Row(variant="compact"):
prompt = gradio.Textbox(show_label=False, scale=6, autofocus=True)
button = gradio.Button(scale=1)
for handler in [button.click, prompt.submit]:
handler(predict, inputs=[prompt], outputs=[prompt, chat])
if __name__ == '__main__':
demo.launch()