Update app.py
Browse files
app.py
CHANGED
@@ -126,7 +126,8 @@ def init_session_state():
|
|
126 |
'enable_audio': True, 'download_link_cache': {}, 'username': None,
|
127 |
'autosend': True, 'autosearch': True, 'last_message': "", 'last_query': "",
|
128 |
'mp3_files': {}, 'timer_start': time.time(), 'quote_index': 0,
|
129 |
-
'quote_source': "famous", 'last_sent_transcript': "", 'old_val': None
|
|
|
130 |
}
|
131 |
for k, v in defaults.items():
|
132 |
if k not in st.session_state:
|
@@ -258,7 +259,7 @@ async def save_chat_entry(username, message, voice, is_markdown=False):
|
|
258 |
await broadcast_message(f"{username}|{message}", "chat")
|
259 |
st.session_state.last_chat_update = time.time()
|
260 |
st.session_state.chat_history.append(entry)
|
261 |
-
st.session_state.last_transcript = message
|
262 |
return md_file, audio_file
|
263 |
|
264 |
async def load_chat():
|
@@ -268,7 +269,6 @@ async def load_chat():
|
|
268 |
with open(CHAT_FILE, 'r') as f:
|
269 |
content = f.read().strip()
|
270 |
lines = content.split('\n')
|
271 |
-
# Remove duplicates and empty lines
|
272 |
unique_lines = list(dict.fromkeys(line for line in lines if line.strip()))
|
273 |
numbered_content = "\n".join(f"{i+1}. {line}" for i, line in enumerate(unique_lines))
|
274 |
return numbered_content
|
@@ -276,7 +276,7 @@ async def load_chat():
|
|
276 |
# Claude Search Function
|
277 |
async def perform_claude_search(query, username):
|
278 |
if not query.strip() or query == st.session_state.last_transcript:
|
279 |
-
return None, None
|
280 |
client = anthropic.Anthropic(api_key=anthropic_key)
|
281 |
response = client.messages.create(
|
282 |
model="claude-3-sonnet-20240229",
|
@@ -286,26 +286,25 @@ async def perform_claude_search(query, username):
|
|
286 |
result = response.content[0].text
|
287 |
st.markdown(f"### Claude's Reply π§ \n{result}")
|
288 |
|
289 |
-
# Save to chat history with audio
|
290 |
voice = FUN_USERNAMES.get(username, "en-US-AriaNeural")
|
291 |
md_file, audio_file = await save_chat_entry(username, f"Claude Search: {query}\nResponse: {result}", voice, True)
|
292 |
-
return md_file, audio_file
|
293 |
|
294 |
# ArXiv Search Function
|
295 |
-
async def perform_arxiv_search(query, username):
|
296 |
if not query.strip() or query == st.session_state.last_transcript:
|
297 |
return None, None
|
298 |
-
#
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
enhanced_query = f"{query}\n\n{claude_result}"
|
310 |
gradio_client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
311 |
refs = gradio_client.predict(
|
@@ -314,7 +313,6 @@ async def perform_arxiv_search(query, username):
|
|
314 |
result = f"π {enhanced_query}\n\n{refs}"
|
315 |
st.markdown(f"### ArXiv Results π\n{result}")
|
316 |
|
317 |
-
# Save to chat history with audio
|
318 |
voice = FUN_USERNAMES.get(username, "en-US-AriaNeural")
|
319 |
md_file, audio_file = await save_chat_entry(username, f"ArXiv Search: {query}\nClaude Response: {claude_result}\nArXiv Results: {refs}", voice, True)
|
320 |
return md_file, audio_file
|
@@ -514,7 +512,7 @@ def load_votes(file):
|
|
514 |
with open(file, 'r') as f:
|
515 |
lines = f.read().strip().split('\n')
|
516 |
votes = {}
|
517 |
-
for line in lines[2:]:
|
518 |
if line.strip() and 'voted for' in line:
|
519 |
item = line.split('voted for ')[1]
|
520 |
votes[item] = votes.get(item, 0) + 1
|
@@ -599,6 +597,7 @@ def main():
|
|
599 |
message = st.text_input(f"Message as {st.session_state.username}", key="message_input")
|
600 |
paste_result = paste_image_button("π Paste Image or Text", key="paste_button_msg")
|
601 |
if paste_result.image_data is not None:
|
|
|
602 |
if isinstance(paste_result.image_data, str):
|
603 |
st.session_state.message_text = paste_result.image_data
|
604 |
message = st.text_input(f"Message as {st.session_state.username}", key="message_input_paste", value=st.session_state.message_text)
|
@@ -607,7 +606,20 @@ def main():
|
|
607 |
filename = asyncio.run(save_pasted_image(paste_result.image_data, st.session_state.username))
|
608 |
if filename:
|
609 |
st.session_state.pasted_image_data = filename
|
610 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
611 |
if (message and message != st.session_state.last_message) or st.session_state.pasted_image_data:
|
612 |
st.session_state.last_message = message
|
613 |
col_send, col_claude, col_arxiv = st.columns([1, 1, 1])
|
@@ -630,7 +642,7 @@ def main():
|
|
630 |
if st.button("π§ Claude", key="claude_button"):
|
631 |
voice = FUN_USERNAMES.get(st.session_state.username, "en-US-AriaNeural")
|
632 |
if message.strip():
|
633 |
-
md_file, audio_file = asyncio.run(perform_claude_search(message, st.session_state.username))
|
634 |
if audio_file:
|
635 |
play_and_download_audio(audio_file)
|
636 |
st.session_state.timer_start = time.time()
|
@@ -742,6 +754,57 @@ def main():
|
|
742 |
if zip_name:
|
743 |
st.sidebar.markdown(get_download_link(zip_name, "zip"), unsafe_allow_html=True)
|
744 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
745 |
# Start WebSocket server in a separate thread
|
746 |
if not st.session_state.server_running and not st.session_state.server_task:
|
747 |
st.session_state.server_task = threading.Thread(target=start_websocket_server, daemon=True)
|
|
|
126 |
'enable_audio': True, 'download_link_cache': {}, 'username': None,
|
127 |
'autosend': True, 'autosearch': True, 'last_message': "", 'last_query': "",
|
128 |
'mp3_files': {}, 'timer_start': time.time(), 'quote_index': 0,
|
129 |
+
'quote_source': "famous", 'last_sent_transcript': "", 'old_val': None,
|
130 |
+
'last_refresh': time.time()
|
131 |
}
|
132 |
for k, v in defaults.items():
|
133 |
if k not in st.session_state:
|
|
|
259 |
await broadcast_message(f"{username}|{message}", "chat")
|
260 |
st.session_state.last_chat_update = time.time()
|
261 |
st.session_state.chat_history.append(entry)
|
262 |
+
st.session_state.last_transcript = message
|
263 |
return md_file, audio_file
|
264 |
|
265 |
async def load_chat():
|
|
|
269 |
with open(CHAT_FILE, 'r') as f:
|
270 |
content = f.read().strip()
|
271 |
lines = content.split('\n')
|
|
|
272 |
unique_lines = list(dict.fromkeys(line for line in lines if line.strip()))
|
273 |
numbered_content = "\n".join(f"{i+1}. {line}" for i, line in enumerate(unique_lines))
|
274 |
return numbered_content
|
|
|
276 |
# Claude Search Function
|
277 |
async def perform_claude_search(query, username):
|
278 |
if not query.strip() or query == st.session_state.last_transcript:
|
279 |
+
return None, None, None
|
280 |
client = anthropic.Anthropic(api_key=anthropic_key)
|
281 |
response = client.messages.create(
|
282 |
model="claude-3-sonnet-20240229",
|
|
|
286 |
result = response.content[0].text
|
287 |
st.markdown(f"### Claude's Reply π§ \n{result}")
|
288 |
|
|
|
289 |
voice = FUN_USERNAMES.get(username, "en-US-AriaNeural")
|
290 |
md_file, audio_file = await save_chat_entry(username, f"Claude Search: {query}\nResponse: {result}", voice, True)
|
291 |
+
return md_file, audio_file, result
|
292 |
|
293 |
# ArXiv Search Function
|
294 |
+
async def perform_arxiv_search(query, username, claude_result=None):
|
295 |
if not query.strip() or query == st.session_state.last_transcript:
|
296 |
return None, None
|
297 |
+
# Use Claude result if provided, otherwise perform Claude search
|
298 |
+
if claude_result is None:
|
299 |
+
client = anthropic.Anthropic(api_key=anthropic_key)
|
300 |
+
claude_response = client.messages.create(
|
301 |
+
model="claude-3-sonnet-20240229",
|
302 |
+
max_tokens=1000,
|
303 |
+
messages=[{"role": "user", "content": query}]
|
304 |
+
)
|
305 |
+
claude_result = claude_response.content[0].text
|
306 |
+
st.markdown(f"### Claude's Reply π§ \n{claude_result}")
|
307 |
+
|
308 |
enhanced_query = f"{query}\n\n{claude_result}"
|
309 |
gradio_client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
310 |
refs = gradio_client.predict(
|
|
|
313 |
result = f"π {enhanced_query}\n\n{refs}"
|
314 |
st.markdown(f"### ArXiv Results π\n{result}")
|
315 |
|
|
|
316 |
voice = FUN_USERNAMES.get(username, "en-US-AriaNeural")
|
317 |
md_file, audio_file = await save_chat_entry(username, f"ArXiv Search: {query}\nClaude Response: {claude_result}\nArXiv Results: {refs}", voice, True)
|
318 |
return md_file, audio_file
|
|
|
512 |
with open(file, 'r') as f:
|
513 |
lines = f.read().strip().split('\n')
|
514 |
votes = {}
|
515 |
+
for line in lines[2:]:
|
516 |
if line.strip() and 'voted for' in line:
|
517 |
item = line.split('voted for ')[1]
|
518 |
votes[item] = votes.get(item, 0) + 1
|
|
|
597 |
message = st.text_input(f"Message as {st.session_state.username}", key="message_input")
|
598 |
paste_result = paste_image_button("π Paste Image or Text", key="paste_button_msg")
|
599 |
if paste_result.image_data is not None:
|
600 |
+
voice = FUN_USERNAMES.get(st.session_state.username, "en-US-AriaNeural")
|
601 |
if isinstance(paste_result.image_data, str):
|
602 |
st.session_state.message_text = paste_result.image_data
|
603 |
message = st.text_input(f"Message as {st.session_state.username}", key="message_input_paste", value=st.session_state.message_text)
|
|
|
606 |
filename = asyncio.run(save_pasted_image(paste_result.image_data, st.session_state.username))
|
607 |
if filename:
|
608 |
st.session_state.pasted_image_data = filename
|
609 |
+
# Generate a placeholder speech text for ASR-like behavior
|
610 |
+
asr_text = f"User {st.session_state.username} requested analysis of an image uploaded at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
611 |
+
# Run Claude search first
|
612 |
+
md_file_claude, audio_file_claude, claude_result = asyncio.run(perform_claude_search(asr_text, st.session_state.username))
|
613 |
+
if audio_file_claude:
|
614 |
+
play_and_download_audio(audio_file_claude)
|
615 |
+
# Feed Claude result to ArXiv search
|
616 |
+
md_file_arxiv, audio_file_arxiv = asyncio.run(perform_arxiv_search(asr_text, st.session_state.username, claude_result))
|
617 |
+
if audio_file_arxiv:
|
618 |
+
play_and_download_audio(audio_file_arxiv)
|
619 |
+
st.session_state.timer_start = time.time()
|
620 |
+
save_username(st.session_state.username)
|
621 |
+
st.rerun()
|
622 |
+
|
623 |
if (message and message != st.session_state.last_message) or st.session_state.pasted_image_data:
|
624 |
st.session_state.last_message = message
|
625 |
col_send, col_claude, col_arxiv = st.columns([1, 1, 1])
|
|
|
642 |
if st.button("π§ Claude", key="claude_button"):
|
643 |
voice = FUN_USERNAMES.get(st.session_state.username, "en-US-AriaNeural")
|
644 |
if message.strip():
|
645 |
+
md_file, audio_file, _ = asyncio.run(perform_claude_search(message, st.session_state.username))
|
646 |
if audio_file:
|
647 |
play_and_download_audio(audio_file)
|
648 |
st.session_state.timer_start = time.time()
|
|
|
754 |
if zip_name:
|
755 |
st.sidebar.markdown(get_download_link(zip_name, "zip"), unsafe_allow_html=True)
|
756 |
|
757 |
+
# Refresh Timer in Sidebar
|
758 |
+
st.sidebar.subheader("Set Refresh Rate β³")
|
759 |
+
st.markdown("""
|
760 |
+
<style>
|
761 |
+
.timer {
|
762 |
+
font-size: 24px;
|
763 |
+
color: #ffcc00;
|
764 |
+
text-align: center;
|
765 |
+
animation: pulse 1s infinite;
|
766 |
+
}
|
767 |
+
@keyframes pulse {
|
768 |
+
0% { transform: scale(1); }
|
769 |
+
50% { transform: scale(1.1); }
|
770 |
+
100% { transform: scale(1); }
|
771 |
+
}
|
772 |
+
</style>
|
773 |
+
""", unsafe_allow_html=True)
|
774 |
+
|
775 |
+
refresh_rate = st.sidebar.slider("Refresh Rate (seconds)", min_value=1, max_value=300, value=st.session_state.refresh_rate, step=1)
|
776 |
+
if refresh_rate != st.session_state.refresh_rate:
|
777 |
+
st.session_state.refresh_rate = refresh_rate
|
778 |
+
st.session_state.timer_start = time.time()
|
779 |
+
save_username(st.session_state.username)
|
780 |
+
|
781 |
+
col1, col2, col3 = st.sidebar.columns(3)
|
782 |
+
with col1:
|
783 |
+
if st.button("π Small (1s)"):
|
784 |
+
st.session_state.refresh_rate = 1
|
785 |
+
st.session_state.timer_start = time.time()
|
786 |
+
save_username(st.session_state.username)
|
787 |
+
with col2:
|
788 |
+
if st.button("π’ Medium (5s)"):
|
789 |
+
st.session_state.refresh_rate = 5
|
790 |
+
st.session_state.timer_start = time.time()
|
791 |
+
save_username(st.session_state.username)
|
792 |
+
with col3:
|
793 |
+
if st.button("π Large (5m)"):
|
794 |
+
st.session_state.refresh_rate = 300
|
795 |
+
st.session_state.timer_start = time.time()
|
796 |
+
save_username(st.session_state.username)
|
797 |
+
|
798 |
+
timer_placeholder = st.sidebar.empty()
|
799 |
+
start_time = st.session_state.timer_start
|
800 |
+
remaining_time = int(st.session_state.refresh_rate - (time.time() - start_time))
|
801 |
+
if remaining_time <= 0:
|
802 |
+
st.session_state.timer_start = time.time()
|
803 |
+
st.session_state.last_refresh = time.time()
|
804 |
+
st.rerun()
|
805 |
+
else:
|
806 |
+
timer_placeholder.markdown(f"<p class='timer'>β³ Next refresh in: {remaining_time} seconds</p>", unsafe_allow_html=True)
|
807 |
+
|
808 |
# Start WebSocket server in a separate thread
|
809 |
if not st.session_state.server_running and not st.session_state.server_task:
|
810 |
st.session_state.server_task = threading.Thread(target=start_websocket_server, daemon=True)
|