File size: 1,725 Bytes
9ca958a
c12870b
9ca958a
b786c2d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import gradio as gr
from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration

# Load a lightweight model to fit in Spaces memory
model_name = "facebook/blenderbot_small-90M"
tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
model = BlenderbotForConditionalGeneration.from_pretrained(model_name)

# Conversation history
chat_history = ""

def chatbot_response(user_message):
    global chat_history
    counseling_prefix = (
        "You are a friendly counselor and caring friend. "
        "When the user is sad, comfort them with empathy and motivational quotes or jokes. "
        "When the user is happy, encourage and celebrate with them.\n"
    )

    # Append to conversation
    full_input = counseling_prefix + chat_history + f"User: {user_message}\nAI:"
    inputs = tokenizer([full_input], return_tensors="pt")
    reply_ids = model.generate(**inputs, max_length=200, pad_token_id=tokenizer.eos_token_id)
    reply = tokenizer.decode(reply_ids[0], skip_special_tokens=True)

    # Save conversation
    chat_history += f"User: {user_message}\nAI: {reply}\n"
    return reply

# Create Gradio interface
with gr.Blocks() as demo:
    gr.Markdown("<h1 style='text-align:center;'>🤖 Counseling Chatbot</h1><p style='text-align:center;'>Your caring AI friend</p>")
    chatbot_ui = gr.Chatbot()
    user_input = gr.Textbox(placeholder="Type your message here...", label="Your message")

    def respond(message, history):
        bot_reply = chatbot_response(message)
        history.append((message, bot_reply))
        return history, ""

    user_input.submit(respond, [user_input, chatbot_ui], [chatbot_ui, user_input])

# Launch app
if __name__ == "__main__":
    demo.launch()