Manojkumarpandi commited on
Commit
3c5bf9e
·
verified ·
1 Parent(s): 4041d23

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -13
app.py CHANGED
@@ -170,24 +170,16 @@ def orchestrator(query):
170
  }
171
  }
172
  """
173
-
174
- # Create content for the orchestrator
175
- contents = [
176
- {
177
- "role": "user",
178
- "parts": [orchestrator_prompt]
179
- }
180
- ]
181
 
182
  # Generate content
183
- response = genai.generate_content(
184
  model="gemini-1.5-flash",
185
- contents=contents,
186
  )
187
 
188
  # Parse the response
189
  try:
190
- decision_text = response.text
191
  # Extract JSON from the response (it might be wrapped in markdown code blocks)
192
  if "```json" in decision_text:
193
  json_str = decision_text.split("```json")[1].split("```")[0].strip()
@@ -319,8 +311,8 @@ if prompt := st.chat_input("Ask me anything about finance or investing..."):
319
  # Stream the response
320
  response_stream = client.models.generate_content_stream(
321
  model="gemini-1.5-flash",
322
- contents=contents,
323
- config=generate_content_config,
324
  )
325
 
326
  # Process streaming response
 
170
  }
171
  }
172
  """
 
 
 
 
 
 
 
 
173
 
174
  # Generate content
175
+ response = genai.generate_text(
176
  model="gemini-1.5-flash",
177
+ prompt=orchestrator_prompt,
178
  )
179
 
180
  # Parse the response
181
  try:
182
+ decision_text = response.result
183
  # Extract JSON from the response (it might be wrapped in markdown code blocks)
184
  if "```json" in decision_text:
185
  json_str = decision_text.split("```json")[1].split("```")[0].strip()
 
311
  # Stream the response
312
  response_stream = client.models.generate_content_stream(
313
  model="gemini-1.5-flash",
314
+ prompt=user_query,
315
+ stream=True,
316
  )
317
 
318
  # Process streaming response