Update app.py
Browse files
app.py
CHANGED
|
@@ -542,10 +542,14 @@ def get_response_from_llama(query, model, selected_docs, file_type, num_calls=1,
|
|
| 542 |
stream=True,
|
| 543 |
top_p=0.9,
|
| 544 |
):
|
| 545 |
-
|
| 546 |
-
|
|
|
|
| 547 |
full_response += chunk
|
| 548 |
yield full_response # Yield the accumulated response so far
|
|
|
|
|
|
|
|
|
|
| 549 |
except Exception as e:
|
| 550 |
logging.error(f"Error during API call: {str(e)}")
|
| 551 |
yield f"An error occurred with the Llama model: {str(e)}. Please try again."
|
|
|
|
| 542 |
stream=True,
|
| 543 |
top_p=0.9,
|
| 544 |
):
|
| 545 |
+
# Updated response handling
|
| 546 |
+
if hasattr(response, 'text'): # Check if 'text' attribute exists
|
| 547 |
+
chunk = response.text
|
| 548 |
full_response += chunk
|
| 549 |
yield full_response # Yield the accumulated response so far
|
| 550 |
+
else:
|
| 551 |
+
logging.error("No 'text' attribute found in response object.")
|
| 552 |
+
break
|
| 553 |
except Exception as e:
|
| 554 |
logging.error(f"Error during API call: {str(e)}")
|
| 555 |
yield f"An error occurred with the Llama model: {str(e)}. Please try again."
|