Tim Luka Horstmann commited on
Commit
5bf6ded
·
1 Parent(s): 43799fd

fix argumetn

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -168,14 +168,14 @@ async def stream_response_gemini(query, history):
168
  contents.append(
169
  types.Content(
170
  role=msg["role"],
171
- parts=[ types.Part.from_text(msg["content"]) ]
172
  )
173
  )
174
  # finally append the new user question
175
  contents.append(
176
  types.Content(
177
  role="user",
178
- parts=[ types.Part.from_text(query) ]
179
  )
180
  )
181
 
@@ -206,7 +206,7 @@ async def stream_response_gemini(query, history):
206
  yield f"data: Sorry, I encountered an error with Gemini API: {str(e)}\n\n"
207
  yield "data: [DONE]\n\n"
208
 
209
-
210
  async def stream_response_local(query, history):
211
  """Stream response using local model"""
212
  logger.info(f"Processing query with local model: {query}")
 
168
  contents.append(
169
  types.Content(
170
  role=msg["role"],
171
+ parts=[ types.Part.from_text(text=msg["content"]) ]
172
  )
173
  )
174
  # finally append the new user question
175
  contents.append(
176
  types.Content(
177
  role="user",
178
+ parts=[ types.Part.from_text(text=query) ]
179
  )
180
  )
181
 
 
206
  yield f"data: Sorry, I encountered an error with Gemini API: {str(e)}\n\n"
207
  yield "data: [DONE]\n\n"
208
 
209
+
210
  async def stream_response_local(query, history):
211
  """Stream response using local model"""
212
  logger.info(f"Processing query with local model: {query}")