supratipb commited on
Commit
f632a0e
·
verified ·
1 Parent(s): b1df0e1

Upload agent.py

Browse files
Files changed (1) hide show
  1. agent.py +7 -3
agent.py CHANGED
@@ -156,13 +156,10 @@ def build_graph(provider: str = "groq"):
156
  """Build the graph"""
157
  # Load environment variables from .env file
158
  if provider == "google":
159
- # Google Gemini
160
  llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
161
  elif provider == "groq":
162
- # Groq https://console.groq.com/docs/models
163
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
164
  elif provider == "huggingface":
165
- # TODO: Add huggingface endpoint
166
  llm = ChatHuggingFace(
167
  llm=HuggingFaceEndpoint(
168
  url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
@@ -181,14 +178,21 @@ def build_graph(provider: str = "groq"):
181
 
182
  def retriever(state: MessagesState):
183
  """Retriever node"""
 
 
 
 
184
  similar_question = vector_store.similarity_search(state["messages"][0].content)
 
185
 
186
  if similar_question:
187
  example_msg = HumanMessage(
188
  content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
189
  )
 
190
  return {"messages": [sys_msg] + state["messages"] + [example_msg]}
191
  else:
 
192
  return {"messages": [sys_msg] + state["messages"]}
193
 
194
 
 
156
  """Build the graph"""
157
  # Load environment variables from .env file
158
  if provider == "google":
 
159
  llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
160
  elif provider == "groq":
 
161
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
162
  elif provider == "huggingface":
 
163
  llm = ChatHuggingFace(
164
  llm=HuggingFaceEndpoint(
165
  url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
 
178
 
179
  def retriever(state: MessagesState):
180
  """Retriever node"""
181
+ print("DEBUG: Starting retriever function")
182
+ print(f"DEBUG: Incoming state messages count: {len(state['messages'])}")
183
+ print(f"DEBUG: Content of first message: {state['messages'][0].content}")
184
+
185
  similar_question = vector_store.similarity_search(state["messages"][0].content)
186
+ print(f"DEBUG: Found {len(similar_question)} similar questions")
187
 
188
  if similar_question:
189
  example_msg = HumanMessage(
190
  content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
191
  )
192
+ print(f"DEBUG: Example message content preview: {example_msg.content[:100]}...") # print first 100 chars
193
  return {"messages": [sys_msg] + state["messages"] + [example_msg]}
194
  else:
195
+ print("DEBUG: No similar question found")
196
  return {"messages": [sys_msg] + state["messages"]}
197
 
198