Nullpointer-KK commited on
Commit
d355eb6
·
verified ·
1 Parent(s): 1a16b4c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -5
app.py CHANGED
@@ -120,7 +120,7 @@ def stream_gemini(
120
  ) -> Generator[str, None, None]:
121
  """
122
  Streams from Google Gemini via google-genai SDK.
123
- Uses the correct streaming interface: client.models.stream_generate_content(...)
124
  """
125
  if genai is None:
126
  yield "❌ Gemini SDK not installed. Add `google-genai` to requirements.txt."
@@ -128,7 +128,6 @@ def stream_gemini(
128
 
129
  client = genai.Client(api_key=api_key)
130
 
131
- # Build generation config (Gemini supports these fields; seed is optional)
132
  cfg_kwargs: Dict[str, Any] = {
133
  "temperature": float(temperature),
134
  "top_p": float(top_p),
@@ -139,19 +138,18 @@ def stream_gemini(
139
 
140
  response_text = ""
141
  try:
142
- stream = client.models.stream_generate_content(
143
  model=model,
144
  contents=prompt,
145
  config=genai_types.GenerateContentConfig(**cfg_kwargs),
146
  )
147
  for chunk in stream:
148
- # Each chunk typically exposes incremental text as .text
149
  txt = getattr(chunk, "text", None)
150
  if txt:
151
  response_text += txt
152
  yield response_text
153
 
154
- # Some drivers may finish with a final aggregate text
155
  final = getattr(stream, "text", None)
156
  if final and final not in response_text:
157
  response_text += final
@@ -161,6 +159,7 @@ def stream_gemini(
161
  yield f"❌ Gemini error: {type(e).__name__}: {e}"
162
 
163
 
 
164
  # -------- Gradio callback --------
165
  def multi_llm_complete(
166
  provider: str,
 
120
  ) -> Generator[str, None, None]:
121
  """
122
  Streams from Google Gemini via google-genai SDK.
123
+ Uses the correct streaming interface: client.models.generate_content_stream(...)
124
  """
125
  if genai is None:
126
  yield "❌ Gemini SDK not installed. Add `google-genai` to requirements.txt."
 
128
 
129
  client = genai.Client(api_key=api_key)
130
 
 
131
  cfg_kwargs: Dict[str, Any] = {
132
  "temperature": float(temperature),
133
  "top_p": float(top_p),
 
138
 
139
  response_text = ""
140
  try:
141
+ stream = client.models.generate_content_stream(
142
  model=model,
143
  contents=prompt,
144
  config=genai_types.GenerateContentConfig(**cfg_kwargs),
145
  )
146
  for chunk in stream:
 
147
  txt = getattr(chunk, "text", None)
148
  if txt:
149
  response_text += txt
150
  yield response_text
151
 
152
+ # Some drivers may expose a final aggregate; safe no-op if absent.
153
  final = getattr(stream, "text", None)
154
  if final and final not in response_text:
155
  response_text += final
 
159
  yield f"❌ Gemini error: {type(e).__name__}: {e}"
160
 
161
 
162
+
163
  # -------- Gradio callback --------
164
  def multi_llm_complete(
165
  provider: str,