Parishri07 commited on
Commit
31ef859
·
verified ·
1 Parent(s): 27630fb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -17
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  import os
3
  import pandas as pd
4
- from transformers import pipeline, Conversation
5
 
6
  # ----- Config -----
7
  BASE_DIR = "data"
@@ -65,29 +65,45 @@ def reset_all():
65
  {} # data_state (gr.State) reset with raw dict
66
  )
67
 
68
- # ----- Initialize Hugging Face conversational pipeline -----
69
- chatbot = pipeline(model="facebook/blenderbot-400M-distill", task="conversational")
 
 
70
 
71
- # To keep conversation history
72
- message_list = []
73
- response_list = []
74
 
75
- def chat_with_bot(message, history):
76
- global message_list, response_list
77
  if history is None:
78
  history = []
79
- message_list.append(message)
80
- conversation = Conversation(text=message, past_user_inputs=message_list[:-1], generated_responses=response_list)
81
- conversation = chatbot(conversation)
82
- response = conversation.generated_responses[-1]
83
- response_list.append(response)
84
- history.append((message, response))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  return history, history
86
 
87
  def clear_chat():
88
- global message_list, response_list
89
- message_list.clear()
90
- response_list.clear()
91
  return [], []
92
 
93
  # ----- UI -----
 
1
  import gradio as gr
2
  import os
3
  import pandas as pd
4
+ from transformers import pipeline
5
 
6
  # ----- Config -----
7
  BASE_DIR = "data"
 
65
  {} # data_state (gr.State) reset with raw dict
66
  )
67
 
68
+ # ----- Chatbot setup -----
69
+ # Use a text2text-generation pipeline with a chat-capable model
70
+ chat_model_name = "facebook/blenderbot-400M-distill" # You can replace this with any chat-capable model
71
+ chat_generator = pipeline("text2text-generation", model=chat_model_name)
72
 
73
+ # We'll maintain chat history as a list of strings (user and bot messages)
74
+ chat_history = []
 
75
 
76
+ def chat_with_bot(user_message, history):
77
+ global chat_history
78
  if history is None:
79
  history = []
80
+ chat_history = []
81
+
82
+ # Append user message to history
83
+ chat_history.append(f"User: {user_message}")
84
+
85
+ # Create prompt by joining conversation history with a Bot prompt
86
+ prompt = "\n".join(chat_history) + "\nBot:"
87
+
88
+ # Generate response (adjust max_length and other params as needed)
89
+ response = chat_generator(prompt, max_length=100, do_sample=True)[0]['generated_text']
90
+
91
+ # Extract bot reply by removing prompt prefix if present
92
+ bot_reply = response[len(prompt):].strip()
93
+ if not bot_reply:
94
+ bot_reply = response.strip() # fallback if slicing fails
95
+
96
+ # Append bot reply to history
97
+ chat_history.append(f"Bot: {bot_reply}")
98
+
99
+ # Update Gradio chat history (list of (user, bot) tuples)
100
+ history.append((user_message, bot_reply))
101
+
102
  return history, history
103
 
104
  def clear_chat():
105
+ global chat_history
106
+ chat_history = []
 
107
  return [], []
108
 
109
  # ----- UI -----