File size: 1,129 Bytes
b4f0646 d04dd40 b4f0646 d04dd40 b4f0646 d04dd40 b4f0646 d04dd40 b4f0646 d04dd40 b4f0646 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
import gradio as grad
chat_tkn = BlenderbotTokenizer.from_pretrained("facebook/blenderbot-400M-distill")
mdl = BlenderbotForConditionalGeneration.from_pretrained("facebook/blenderbot-400M-distill")
def createHistory(message):
history = grad.get_state() or []
print(history)
response = chat(message)
history.append((message, response))
grad.set_state(history)
html = "<div class='chatbot'>"
for user_msg, resp_msg in history:
html += f"<div class='user_msg'>{user_msg}</div>"
html += f"<div class='resp_msg'>{resp_msg}</div>"
html += "</div>"
return response
def chat(input):
tkn_ids = chat_tkn(input+ chat_tkn.eos_token, return_tensors='pt')
# bot responds
chat_ids = mdl.generate(**tkn_ids)
# print bot response
response= "Alicia: {}".format(chat_tkn.decode(chat_ids[0], skip_special_tokens=True))
return response
out=grad.Textbox(lines=20, label="dialog", placeholder="start conversation")
grad.Interface(createHistory, inputs="text",outputs=out).launch()
|