Nullpointer-KK commited on
Commit
1a16b4c
·
verified ·
1 Parent(s): f783fff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -31
app.py CHANGED
@@ -119,7 +119,8 @@ def stream_gemini(
119
  api_key: str,
120
  ) -> Generator[str, None, None]:
121
  """
122
- Streams from Google Gemini via google.genai SDK.
 
123
  """
124
  if genai is None:
125
  yield "❌ Gemini SDK not installed. Add `google-genai` to requirements.txt."
@@ -128,47 +129,34 @@ def stream_gemini(
128
  client = genai.Client(api_key=api_key)
129
 
130
  # Build generation config (Gemini supports these fields; seed is optional)
131
- gen_cfg = {
132
  "temperature": float(temperature),
133
  "top_p": float(top_p),
134
  "max_output_tokens": int(max_tokens),
135
  }
136
  if seed is not None:
137
- gen_cfg["seed"] = int(seed)
138
 
139
  response_text = ""
140
  try:
141
- with client.responses.stream(
142
  model=model,
143
- input=prompt,
144
- config=genai_types.GenerateContentConfig(
145
- temperature=gen_cfg["temperature"],
146
- top_p=gen_cfg["top_p"],
147
- max_output_tokens=gen_cfg["max_output_tokens"],
148
- seed=gen_cfg.get("seed"),
149
- ),
150
- ) as stream:
151
- for event in stream:
152
- # Each event may carry incremental text in candidates[0].content.parts[…].text
153
- try:
154
- for cand in getattr(event, "candidates", []) or []:
155
- parts = getattr(cand, "content", None)
156
- if parts and getattr(parts, "parts", None):
157
- for p in parts.parts:
158
- txt = getattr(p, "text", None)
159
- if txt:
160
- response_text += txt
161
- yield response_text
162
- except Exception:
163
- # Best-effort incremental parse
164
- pass
165
-
166
- # Ensure final text is yielded (for some drivers, the last event is summary)
167
- final = getattr(stream, "text", None)
168
- if final and final not in response_text:
169
- response_text += final
170
  yield response_text
171
 
 
 
 
 
 
 
172
  except Exception as e:
173
  yield f"❌ Gemini error: {type(e).__name__}: {e}"
174
 
 
119
  api_key: str,
120
  ) -> Generator[str, None, None]:
121
  """
122
+ Streams from Google Gemini via google-genai SDK.
123
+ Uses the correct streaming interface: client.models.stream_generate_content(...)
124
  """
125
  if genai is None:
126
  yield "❌ Gemini SDK not installed. Add `google-genai` to requirements.txt."
 
129
  client = genai.Client(api_key=api_key)
130
 
131
  # Build generation config (Gemini supports these fields; seed is optional)
132
+ cfg_kwargs: Dict[str, Any] = {
133
  "temperature": float(temperature),
134
  "top_p": float(top_p),
135
  "max_output_tokens": int(max_tokens),
136
  }
137
  if seed is not None:
138
+ cfg_kwargs["seed"] = int(seed)
139
 
140
  response_text = ""
141
  try:
142
+ stream = client.models.stream_generate_content(
143
  model=model,
144
+ contents=prompt,
145
+ config=genai_types.GenerateContentConfig(**cfg_kwargs),
146
+ )
147
+ for chunk in stream:
148
+ # Each chunk typically exposes incremental text as .text
149
+ txt = getattr(chunk, "text", None)
150
+ if txt:
151
+ response_text += txt
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
152
  yield response_text
153
 
154
+ # Some drivers may finish with a final aggregate text
155
+ final = getattr(stream, "text", None)
156
+ if final and final not in response_text:
157
+ response_text += final
158
+ yield response_text
159
+
160
  except Exception as e:
161
  yield f"❌ Gemini error: {type(e).__name__}: {e}"
162