SmartMirror-AI / app.py
aimyrajpoot's picture
Update app.py
b786c2d verified
import gradio as gr
from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration
# Load a lightweight model to fit in Spaces memory
model_name = "facebook/blenderbot_small-90M"
tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
# Conversation history
chat_history = ""
def chatbot_response(user_message):
global chat_history
counseling_prefix = (
"You are a friendly counselor and caring friend. "
"When the user is sad, comfort them with empathy and motivational quotes or jokes. "
"When the user is happy, encourage and celebrate with them.\n"
)
# Append to conversation
full_input = counseling_prefix + chat_history + f"User: {user_message}\nAI:"
inputs = tokenizer([full_input], return_tensors="pt")
reply_ids = model.generate(**inputs, max_length=200, pad_token_id=tokenizer.eos_token_id)
reply = tokenizer.decode(reply_ids[0], skip_special_tokens=True)
# Save conversation
chat_history += f"User: {user_message}\nAI: {reply}\n"
return reply
# Create Gradio interface
with gr.Blocks() as demo:
gr.Markdown("<h1 style='text-align:center;'>🤖 Counseling Chatbot</h1><p style='text-align:center;'>Your caring AI friend</p>")
chatbot_ui = gr.Chatbot()
user_input = gr.Textbox(placeholder="Type your message here...", label="Your message")
def respond(message, history):
bot_reply = chatbot_response(message)
history.append((message, bot_reply))
return history, ""
user_input.submit(respond, [user_input, chatbot_ui], [chatbot_ui, user_input])
# Launch app
if __name__ == "__main__":
demo.launch()