Spaces:
Runtime error
Runtime error
File size: 1,775 Bytes
761dbcf aa36ade 3d8569e aa36ade 3d8569e aa36ade 3d8569e aa36ade 5f8d7fa aa36ade 3d8569e aa36ade 3d8569e aa36ade 5f8d7fa aa36ade 5f8d7fa aa36ade |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
import os
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
TITLE = "Chattybot"
EXAMPLE_INPUT = "hello"
SYSTEM_PROMPT = "As a generative chatbot (you are not a GPT but your structure is 50% the same), your primary function is to provide helpful and friendly responses to user queries. Feel free to add some personality, but make sure your responses are accurate and helpful. Your owner and developer is: @Costikoooo (Discord user) other developers are unknown. Your name is Chattybot."
model_name = "HuggingFaceH4/zephyr-7b-beta"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
def predict_local(message, chatbot=[], system_prompt=""):
input_prompt = "\n" + system_prompt + "</s>\n\n"
for interaction in chatbot:
input_prompt = input_prompt + str(interaction[0]) + "</s>\n\n" + str(interaction[1]) + "\n</s>\n\n"
input_prompt = input_prompt + str(message) + "</s>\n"
inputs = tokenizer(input_prompt, return_tensors="pt")
outputs = model(**inputs)
generated_text = tokenizer.decode(outputs["logits"][0], skip_special_tokens=True)
return generated_text
def test_preview_chatbot(message, history):
response = predict_local(message, history, SYSTEM_PROMPT)
text_start = response.rfind("") + len("")
response = response[text_start:]
return response
welcome_preview_message = f"""
Welcome to **{TITLE}**! Say something like:
"{EXAMPLE_INPUT}"
"""
chatbot_preview = gr.Chatbot(layout="panel", value=[(None, welcome_preview_message)])
textbox_preview = gr.Textbox(scale=7, container=False, value=EXAMPLE_INPUT)
demo = gr.ChatInterface(test_preview_chatbot, chatbot=chatbot_preview, textbox=textbox_preview)
demo.launch()
|