digitalWDF / app.py
bigPear's picture
Update app.py
38a35b2
raw
history blame
1.72 kB
from transformers import AutoModel, AutoTokenizer
import gradio as gr
tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True)
#.half().cuda()
model = model.eval()
# def add_text(history, text):
# history = history + [(text, None)]
# return history, ""
# def add_file(history, file):
# history = history + [((file.name,), None)]
# return history
# def bot(history):
# response = "**That's cool!**"
# history[-1][1] = response
# return history
def predict(input, history=None):
if history is None:
history = []
response, history = model.chat(tokenizer, input, history)
return history, history
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
clear = gr.Button("Clear")
def user(user_message, history):
return "", history + [[user_message, None]]
def bot(msg, history):
# bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"])
bot_message = predict(msg, history)
history[-1][1] = ""
for character in bot_message:
history[-1][1] += character
time.sleep(0.05)
yield history
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, [msg, chatbot], [msg, chatbot]
)
clear.click(lambda: None, None, chatbot, queue=False)
demo.queue()
demo.launch()
# txt.submit(predict, [txt, state], [chatbot, state])
# button.click(predict, [txt, state], [chatbot, state])
# btn.upload(add_file, [chatbot, btn], [chatbot]).then(
# bot, chatbot, chatbot
# )
demo.launch()