Update app.py
Browse files
app.py
CHANGED
@@ -236,18 +236,37 @@ def ask_llama(conversation_history, category, is_final_guess=False):
|
|
236 |
|
237 |
def ask_help_agent(query):
|
238 |
try:
|
239 |
-
|
240 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
241 |
|
242 |
-
|
243 |
-
|
244 |
-
|
245 |
-
|
|
|
|
|
246 |
|
247 |
-
|
248 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
249 |
except Exception as e:
|
250 |
-
return f"Assistant
|
251 |
|
252 |
# Main game logic with enhanced UI
|
253 |
def main():
|
|
|
236 |
|
237 |
def ask_help_agent(query):
|
238 |
try:
|
239 |
+
import ollama
|
240 |
+
import requests
|
241 |
+
|
242 |
+
# (1) Check if Ollama server is running
|
243 |
+
try:
|
244 |
+
requests.get("http://localhost:11434", timeout=5)
|
245 |
+
except:
|
246 |
+
return "🛑 **Ollama is not running!**\n\nPlease:\n1. [Download Ollama](https://ollama.com)\n2. Run `ollama serve` in terminal\n3. Pull a model (`ollama pull llama3`)"
|
247 |
+
|
248 |
+
# (2) Build chat history
|
249 |
+
messages = [{"role": "system", "content": "You are a helpful AI assistant."}]
|
250 |
|
251 |
+
if "help_conversation" in st.session_state:
|
252 |
+
for msg in st.session_state.help_conversation:
|
253 |
+
if msg.get("query"):
|
254 |
+
messages.append({"role": "user", "content": msg["query"]})
|
255 |
+
if msg.get("response"):
|
256 |
+
messages.append({"role": "assistant", "content": msg["response"]})
|
257 |
|
258 |
+
messages.append({"role": "user", "content": query})
|
259 |
+
|
260 |
+
# (3) Get response
|
261 |
+
response = ollama.chat(
|
262 |
+
model="llama3", # or "mistral" for lighter model
|
263 |
+
messages=messages,
|
264 |
+
options={"temperature": 0.7}
|
265 |
+
)
|
266 |
+
return response['message']['content']
|
267 |
+
|
268 |
except Exception as e:
|
269 |
+
return f"⚠️ **Assistant Error**\n\n{str(e)}\n\nPlease ensure Ollama is installed and running."
|
270 |
|
271 |
# Main game logic with enhanced UI
|
272 |
def main():
|