Update app.py
Browse files
app.py
CHANGED
|
@@ -354,6 +354,9 @@ def summarize_web_results(query: str, search_results: List[Dict[str, str]], conv
|
|
| 354 |
return f"An error occurred during summarization: {str(e)}"
|
| 355 |
|
| 356 |
|
|
|
|
|
|
|
|
|
|
| 357 |
def get_response_from_gemini(query, context, file_type, num_calls=1, temperature=0.2):
|
| 358 |
# Configure the Gemini API
|
| 359 |
genai.configure(api_key=os.environ["GEMINI_API_KEY"])
|
|
@@ -365,7 +368,7 @@ def get_response_from_gemini(query, context, file_type, num_calls=1, temperature
|
|
| 365 |
"temperature": temperature,
|
| 366 |
"top_p": 1,
|
| 367 |
"top_k": 1,
|
| 368 |
-
"max_output_tokens":
|
| 369 |
},
|
| 370 |
)
|
| 371 |
|
|
@@ -383,22 +386,17 @@ def get_response_from_gemini(query, context, file_type, num_calls=1, temperature
|
|
| 383 |
else:
|
| 384 |
raise ValueError("Invalid file type. Use 'excel' or 'pdf'.")
|
| 385 |
|
| 386 |
-
|
| 387 |
-
chat_session = model.start_chat(history=[])
|
| 388 |
|
| 389 |
-
full_response = ""
|
| 390 |
for _ in range(num_calls):
|
| 391 |
try:
|
| 392 |
-
#
|
| 393 |
-
response =
|
| 394 |
-
|
| 395 |
-
|
| 396 |
-
|
| 397 |
except Exception as e:
|
| 398 |
-
|
| 399 |
-
return f"An error occurred with the Gemini model: {str(e)}. Please try again."
|
| 400 |
-
|
| 401 |
-
return full_response.strip()
|
| 402 |
|
| 403 |
def get_response_from_excel(query, model, context, num_calls=3, temperature=0.2):
|
| 404 |
logging.info(f"Getting response from Excel using model: {model}")
|
|
|
|
| 354 |
return f"An error occurred during summarization: {str(e)}"
|
| 355 |
|
| 356 |
|
| 357 |
+
import os
|
| 358 |
+
import google.generativeai as genai
|
| 359 |
+
|
| 360 |
def get_response_from_gemini(query, context, file_type, num_calls=1, temperature=0.2):
|
| 361 |
# Configure the Gemini API
|
| 362 |
genai.configure(api_key=os.environ["GEMINI_API_KEY"])
|
|
|
|
| 368 |
"temperature": temperature,
|
| 369 |
"top_p": 1,
|
| 370 |
"top_k": 1,
|
| 371 |
+
"max_output_tokens": 20000,
|
| 372 |
},
|
| 373 |
)
|
| 374 |
|
|
|
|
| 386 |
else:
|
| 387 |
raise ValueError("Invalid file type. Use 'excel' or 'pdf'.")
|
| 388 |
|
| 389 |
+
full_prompt = f"{system_instruction}\n\nContext:\n{context}\n\nUser query: {query}"
|
|
|
|
| 390 |
|
|
|
|
| 391 |
for _ in range(num_calls):
|
| 392 |
try:
|
| 393 |
+
# Generate content with streaming enabled
|
| 394 |
+
response = model.generate_content(full_prompt, stream=True)
|
| 395 |
+
for chunk in response:
|
| 396 |
+
if chunk.text:
|
| 397 |
+
yield chunk.text
|
| 398 |
except Exception as e:
|
| 399 |
+
yield f"An error occurred with the Gemini model: {str(e)}. Please try again."
|
|
|
|
|
|
|
|
|
|
| 400 |
|
| 401 |
def get_response_from_excel(query, model, context, num_calls=3, temperature=0.2):
|
| 402 |
logging.info(f"Getting response from Excel using model: {model}")
|