Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,55 +1,95 @@
|
|
1 |
import gradio as gr
|
|
|
|
|
2 |
from search import search_google
|
3 |
from llm import generate_answer
|
4 |
from memory import ConversationMemory
|
|
|
5 |
|
6 |
# Initialize conversation memory
|
7 |
memory = ConversationMemory()
|
8 |
|
9 |
-
|
|
|
|
|
|
|
|
|
10 |
# Retrieve conversation context
|
11 |
context = memory.get_context()
|
12 |
|
13 |
-
# Search for information
|
14 |
-
search_results =
|
15 |
|
16 |
if not search_results:
|
17 |
return "I couldn't find any relevant information about that. Could you try rephrasing your question?"
|
18 |
|
|
|
|
|
|
|
19 |
# Generate human-like response
|
20 |
-
answer = generate_answer(
|
21 |
question=question,
|
22 |
context=context,
|
23 |
search_results=search_results
|
24 |
)
|
25 |
|
26 |
# Update conversation history
|
27 |
-
memory.add_exchange(question, answer)
|
28 |
|
29 |
# Format response with sources
|
30 |
formatted_response = f"""
|
31 |
-
π€ **Assistant**: {answer['response']}
|
32 |
-
|
33 |
-
π **Sources I used**:
|
34 |
"""
|
35 |
for source in answer['sources']:
|
36 |
formatted_response += f"- [{source['title']}]({source['url']})\n"
|
37 |
|
|
|
|
|
38 |
return formatted_response
|
39 |
|
40 |
-
#
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
clear = gr.Button("Clear History")
|
|
|
46 |
|
47 |
-
def respond(message, chat_history):
|
48 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
chat_history.append((message, bot_message))
|
50 |
return "", chat_history
|
51 |
|
52 |
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
53 |
-
clear.click(lambda: None, None, chatbot, queue=False)
|
54 |
|
55 |
-
|
|
|
|
1 |
import gradio as gr
|
2 |
+
import asyncio
|
3 |
+
from functools import lru_cache
|
4 |
from search import search_google
|
5 |
from llm import generate_answer
|
6 |
from memory import ConversationMemory
|
7 |
+
from utils import async_timeout
|
8 |
|
9 |
# Initialize conversation memory
|
10 |
memory = ConversationMemory()
|
11 |
|
12 |
+
@async_timeout(30) # Timeout after 30 seconds
|
13 |
+
async def ask_agent(question, progress=gr.Progress()):
|
14 |
+
# Track progress
|
15 |
+
progress(0.1, desc="π‘ Searching the web...")
|
16 |
+
|
17 |
# Retrieve conversation context
|
18 |
context = memory.get_context()
|
19 |
|
20 |
+
# Search for information (with caching)
|
21 |
+
search_results = await cached_search_async(question, num_results=5)
|
22 |
|
23 |
if not search_results:
|
24 |
return "I couldn't find any relevant information about that. Could you try rephrasing your question?"
|
25 |
|
26 |
+
# Track progress
|
27 |
+
progress(0.4, desc="π§ Processing information...")
|
28 |
+
|
29 |
# Generate human-like response
|
30 |
+
answer = await generate_answer(
|
31 |
question=question,
|
32 |
context=context,
|
33 |
search_results=search_results
|
34 |
)
|
35 |
|
36 |
# Update conversation history
|
37 |
+
memory.add_exchange(question, answer['response'])
|
38 |
|
39 |
# Format response with sources
|
40 |
formatted_response = f"""
|
41 |
+
π€ **Assistant**: {answer['response']}\n
|
42 |
+
π **Sources I used**:\n
|
|
|
43 |
"""
|
44 |
for source in answer['sources']:
|
45 |
formatted_response += f"- [{source['title']}]({source['url']})\n"
|
46 |
|
47 |
+
# Track progress
|
48 |
+
progress(1.0, desc="β
Response ready")
|
49 |
return formatted_response
|
50 |
|
51 |
+
# Cached async search
|
52 |
+
@lru_cache(maxsize=100)
|
53 |
+
def cached_search_async(query, num_results=5):
|
54 |
+
return search_google(query, num_results)
|
55 |
+
|
56 |
+
# Gradio chat interface with progress tracking
|
57 |
+
with gr.Blocks(theme=gr.themes.Soft(), css=".gradio-container {max-width: 800px; margin: auto;}") as demo:
|
58 |
+
gr.Markdown("""
|
59 |
+
<div style="text-align: center;">
|
60 |
+
<h1>π§ AI Research Assistant</h1>
|
61 |
+
<p>I can help you find information on any topic!</p>
|
62 |
+
</div>
|
63 |
+
""")
|
64 |
+
|
65 |
+
chatbot = gr.Chatbot(height=400, bubble_full_width=False)
|
66 |
+
msg = gr.Textbox(label="Your Question", placeholder="Ask me anything...")
|
67 |
clear = gr.Button("Clear History")
|
68 |
+
status = gr.Textbox("", label="Status", interactive=False)
|
69 |
|
70 |
+
async def respond(message, chat_history):
|
71 |
+
# Create progress tracker
|
72 |
+
tracker = []
|
73 |
+
|
74 |
+
# Wrap in try/except for better error handling
|
75 |
+
try:
|
76 |
+
bot_message = await ask_agent(
|
77 |
+
message,
|
78 |
+
progress=lambda p, d, t=tracker: tracker.append((p, d))
|
79 |
+
)
|
80 |
+
|
81 |
+
# Update status
|
82 |
+
if tracker:
|
83 |
+
status.value = tracker[-1][1]
|
84 |
+
except Exception as e:
|
85 |
+
bot_message = f"β οΈ Sorry, I encountered an error: {str(e)[:100]}"
|
86 |
+
status.value = "Error occurred"
|
87 |
+
|
88 |
chat_history.append((message, bot_message))
|
89 |
return "", chat_history
|
90 |
|
91 |
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
92 |
+
clear.click(lambda: (memory.clear(), None), None, chatbot, queue=False)
|
93 |
|
94 |
+
if __name__ == "__main__":
|
95 |
+
demo.queue(concurrency_count=4).launch()
|