Hashim998 commited on
Commit
50ce9bf
·
verified ·
1 Parent(s): 7614512

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +64 -60
app.py CHANGED
@@ -4,7 +4,11 @@ import json
4
  import time
5
  import gradio as gr
6
  import logging
 
 
7
  from dotenv import load_dotenv
 
 
8
  import google.generativeai as genai
9
 
10
  from langgraph.graph import START, MessagesState, StateGraph
@@ -18,19 +22,20 @@ from langchain_core.prompts.chat import (
18
  )
19
  from langchain_google_genai import ChatGoogleGenerativeAI
20
 
21
-
22
- # === Setup Logging & API Key ===
23
- logging.basicConfig(level=logging.INFO)
24
  logger = logging.getLogger(__name__)
25
- load_dotenv() # For local .env use
26
 
27
- GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
28
- if not GEMINI_API_KEY:
29
- raise ValueError("Missing GEMINI_API_KEY")
30
- genai.configure(api_key=GEMINI_API_KEY)
 
 
31
 
 
32
 
33
- # === Chat History Storage ===
34
  HISTORY_FILE = "chat_history.json"
35
 
36
  def load_all_sessions():
@@ -45,8 +50,7 @@ def save_all_sessions(sessions):
45
 
46
  sessions = load_all_sessions()
47
 
48
-
49
- # === Gemini Chatbot Class ===
50
  class GeminiChatbot:
51
  def __init__(self):
52
  self.setup_model()
@@ -54,7 +58,7 @@ class GeminiChatbot:
54
  def setup_model(self):
55
  system_template = """
56
  You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.
57
- Your answers should be informative, engaging, and accurate. If a question doesn't make any sense, or isn't factually coherent, explain why instead of answering something incorrect.
58
  If you don't know the answer to a question, please don't share false information.
59
  """
60
 
@@ -75,12 +79,7 @@ class GeminiChatbot:
75
  def call_model(state: MessagesState):
76
  chat_history = state["messages"][:-1]
77
  user_input = state["messages"][-1].content
78
-
79
- formatted_messages = self.prompt.format_messages(
80
- chat_history=chat_history,
81
- input=user_input
82
- )
83
-
84
  response = self.model.invoke(formatted_messages)
85
  return {"messages": response}
86
 
@@ -92,52 +91,50 @@ class GeminiChatbot:
92
  self.app = workflow.compile(checkpointer=self.memory)
93
 
94
  def get_response(self, user_message, history, thread_id):
95
- try:
96
- from langchain_core.messages import HumanMessage, AIMessage
97
 
98
- messages = []
 
 
99
  for user, bot in history:
100
- messages.append(HumanMessage(content=user))
101
- messages.append(AIMessage(content=bot))
102
 
103
  input_msg = HumanMessage(content=user_message)
104
- full_history = messages + [input_msg]
105
  config = {"configurable": {"thread_id": thread_id}}
106
 
 
107
  response = self.app.invoke({"messages": full_history}, config)
108
- final_text = response["messages"][-1].content
109
 
110
  full_response = ""
111
- for char in final_text:
112
  full_response += char
113
  yield full_response
114
  time.sleep(0.01)
115
 
116
  except Exception as e:
117
  logger.error(f"Response error: {e}")
118
- yield f"Error: {str(e)}"
119
 
120
 
121
- # === Gradio UI ===
122
  chatbot = GeminiChatbot()
123
 
124
-
125
  def launch_interface():
126
  with gr.Blocks(
127
  theme=gr.themes.Base(),
128
  css="""
129
- body { background-color: black; }
130
- .gr-textbox textarea { background-color: #2f2f2f; color: white; }
131
- .gr-chatbot { background-color: #2f2f2f; color: white; }
132
- .gr-button, .gr-dropdown {
133
- margin: 5px auto;
134
- display: block;
135
- width: 50%;
136
- }
137
- .gr-markdown h2 {
138
- text-align: center;
139
- color: white;
140
- }
141
  """
142
  ) as demo:
143
  demo.title = "LangChain Powered ChatBot"
@@ -147,23 +144,28 @@ def launch_interface():
147
  session_names = gr.State()
148
  history = gr.State([])
149
 
150
- # Startup session
151
  if not sessions:
152
  new_id = str(uuid.uuid4())
153
  sessions[new_id] = []
154
  save_all_sessions(sessions)
 
 
155
  else:
156
- new_id = next(iter(sessions.keys()))
157
-
158
- current_thread_id.value = new_id
159
- session_names.value = [f"PREVIOUS: {k}" for k in sessions if sessions[k]]
160
 
161
  def get_dropdown_choices():
162
- return [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + \
163
- [f"NEW: {current_thread_id.value}"]
164
 
 
165
  new_chat_btn = gr.Button("New Chat", variant="primary")
166
- session_selector = gr.Dropdown(label="Chats", choices=get_dropdown_choices(), value=f"NEW: {new_id}", interactive=True)
 
 
 
 
 
 
167
  chatbot_ui = gr.Chatbot(label="Conversation", height=350)
168
 
169
  with gr.Row():
@@ -172,25 +174,28 @@ def launch_interface():
172
 
173
  clear = gr.Button("Clear Current Chat")
174
 
 
175
  def start_new_chat():
176
  new_id = str(uuid.uuid4())
177
  sessions[new_id] = []
178
  save_all_sessions(sessions)
179
- updated_choices = [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + [f"NEW: {new_id}"]
180
- return new_id, [], updated_choices, f"NEW: {new_id}"
 
181
 
182
- def switch_chat(display_name):
183
- true_id = display_name.split(": ", 1)[-1]
184
- return true_id, sessions.get(true_id, []), display_name
185
 
186
  def respond(message, history, thread_id):
 
 
 
187
  history.append((message, ""))
188
  yield history
189
 
190
- full_response = ""
191
  for chunk in chatbot.get_response(message, history[:-1], thread_id):
192
- full_response = chunk
193
- history[-1] = (message, full_response)
194
  yield history
195
 
196
  sessions[thread_id] = history
@@ -201,7 +206,7 @@ def launch_interface():
201
  save_all_sessions(sessions)
202
  return []
203
 
204
- # Events
205
  new_chat_btn.click(start_new_chat, outputs=[current_thread_id, chatbot_ui, session_selector, session_selector])
206
  session_selector.change(switch_chat, inputs=session_selector, outputs=[current_thread_id, chatbot_ui, session_selector])
207
  send.click(respond, [msg, chatbot_ui, current_thread_id], [chatbot_ui]).then(lambda: "", None, msg)
@@ -210,8 +215,7 @@ def launch_interface():
210
 
211
  return demo
212
 
213
-
214
- # === Launch ===
215
  if __name__ == "__main__":
216
  try:
217
  demo = launch_interface()
 
4
  import time
5
  import gradio as gr
6
  import logging
7
+
8
+ # Load local .env only if it exists
9
  from dotenv import load_dotenv
10
+ load_dotenv()
11
+
12
  import google.generativeai as genai
13
 
14
  from langgraph.graph import START, MessagesState, StateGraph
 
22
  )
23
  from langchain_google_genai import ChatGoogleGenerativeAI
24
 
25
+ # === Logging ===
26
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
 
27
  logger = logging.getLogger(__name__)
 
28
 
29
+ # === Load API Key ===
30
+ os.environ["GOOGLE_API_KEY"] = os.getenv["GEMINI_API_KEY"]
31
+
32
+ # GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
33
+ # if not GEMINI_API_KEY:
34
+ #raise ValueError("GEMINI_API_KEY is missing. Set it as an environment variable or Hugging Face Secret.")
35
 
36
+ genai.configure(api_key=GEMINI_API_KEY)
37
 
38
+ # === Chat Storage ===
39
  HISTORY_FILE = "chat_history.json"
40
 
41
  def load_all_sessions():
 
50
 
51
  sessions = load_all_sessions()
52
 
53
+ # === Gemini LLM Chatbot ===
 
54
  class GeminiChatbot:
55
  def __init__(self):
56
  self.setup_model()
 
58
  def setup_model(self):
59
  system_template = """
60
  You are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.
61
+ Your answers should be informative, engaging, and accurate. If a question doesn't make any sense, or isn't factually coherent, explain why.
62
  If you don't know the answer to a question, please don't share false information.
63
  """
64
 
 
79
  def call_model(state: MessagesState):
80
  chat_history = state["messages"][:-1]
81
  user_input = state["messages"][-1].content
82
+ formatted_messages = self.prompt.format_messages(chat_history=chat_history, input=user_input)
 
 
 
 
 
83
  response = self.model.invoke(formatted_messages)
84
  return {"messages": response}
85
 
 
91
  self.app = workflow.compile(checkpointer=self.memory)
92
 
93
  def get_response(self, user_message, history, thread_id):
94
+ from langchain_core.messages import HumanMessage, AIMessage
 
95
 
96
+ try:
97
+ # Format chat history for LangChain
98
+ langchain_history = []
99
  for user, bot in history:
100
+ langchain_history.append(HumanMessage(content=user))
101
+ langchain_history.append(AIMessage(content=bot))
102
 
103
  input_msg = HumanMessage(content=user_message)
104
+ full_history = langchain_history + [input_msg]
105
  config = {"configurable": {"thread_id": thread_id}}
106
 
107
+ # Get final response
108
  response = self.app.invoke({"messages": full_history}, config)
109
+ full_text = response["messages"][-1].content
110
 
111
  full_response = ""
112
+ for char in full_text:
113
  full_response += char
114
  yield full_response
115
  time.sleep(0.01)
116
 
117
  except Exception as e:
118
  logger.error(f"Response error: {e}")
119
+ yield f"Error: {type(e).__name__} — {str(e)}"
120
 
121
 
 
122
  chatbot = GeminiChatbot()
123
 
124
+ # === Gradio UI ===
125
  def launch_interface():
126
  with gr.Blocks(
127
  theme=gr.themes.Base(),
128
  css="""
129
+ body { background-color: black; }
130
+ .gr-textbox textarea { background-color: #2f2f2f; color: white; }
131
+ .gr-chatbot { background-color: #2f2f2f; color: white; }
132
+ .gr-button, .gr-dropdown {
133
+ margin: 5px auto;
134
+ display: block;
135
+ width: 50%;
136
+ }
137
+ .gr-markdown h2 { text-align: center; color: white; }
 
 
 
138
  """
139
  ) as demo:
140
  demo.title = "LangChain Powered ChatBot"
 
144
  session_names = gr.State()
145
  history = gr.State([])
146
 
 
147
  if not sessions:
148
  new_id = str(uuid.uuid4())
149
  sessions[new_id] = []
150
  save_all_sessions(sessions)
151
+ current_thread_id.value = new_id
152
+ session_names.value = [f"NEW: {new_id}"]
153
  else:
154
+ current_thread_id.value = next(iter(sessions))
155
+ session_names.value = [f"PREVIOUS: {k}" for k in sessions if sessions[k]]
 
 
156
 
157
  def get_dropdown_choices():
158
+ return [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + [f"NEW: {current_thread_id.value}"]
 
159
 
160
+ # UI
161
  new_chat_btn = gr.Button("New Chat", variant="primary")
162
+ session_selector = gr.Dropdown(
163
+ label="Chats",
164
+ choices=get_dropdown_choices(),
165
+ value=f"NEW: {current_thread_id.value}",
166
+ interactive=True
167
+ )
168
+
169
  chatbot_ui = gr.Chatbot(label="Conversation", height=350)
170
 
171
  with gr.Row():
 
174
 
175
  clear = gr.Button("Clear Current Chat")
176
 
177
+ # === Event Functions ===
178
  def start_new_chat():
179
  new_id = str(uuid.uuid4())
180
  sessions[new_id] = []
181
  save_all_sessions(sessions)
182
+ display = f"NEW: {new_id}"
183
+ updated = [f"PREVIOUS: {k}" for k in sessions if sessions[k]] + [display]
184
+ return new_id, [], gr.update(choices=updated, value=display), display
185
 
186
+ def switch_chat(display_id):
187
+ true_id = display_id.split(": ", 1)[-1]
188
+ return true_id, sessions.get(true_id, []), display_id
189
 
190
  def respond(message, history, thread_id):
191
+ if not message.strip():
192
+ yield history
193
+ return
194
  history.append((message, ""))
195
  yield history
196
 
 
197
  for chunk in chatbot.get_response(message, history[:-1], thread_id):
198
+ history[-1] = (message, chunk)
 
199
  yield history
200
 
201
  sessions[thread_id] = history
 
206
  save_all_sessions(sessions)
207
  return []
208
 
209
+ # === Bind Events ===
210
  new_chat_btn.click(start_new_chat, outputs=[current_thread_id, chatbot_ui, session_selector, session_selector])
211
  session_selector.change(switch_chat, inputs=session_selector, outputs=[current_thread_id, chatbot_ui, session_selector])
212
  send.click(respond, [msg, chatbot_ui, current_thread_id], [chatbot_ui]).then(lambda: "", None, msg)
 
215
 
216
  return demo
217
 
218
+ # === Run App ===
 
219
  if __name__ == "__main__":
220
  try:
221
  demo = launch_interface()