Docfile commited on
Commit
8beb442
·
verified ·
1 Parent(s): 2cd6b7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -14
app.py CHANGED
@@ -45,25 +45,22 @@ def format_search_results(data):
45
 
46
  result = ""
47
 
48
- # Knowledge Graph
49
  if 'knowledgeGraph' in data:
50
  kg = data['knowledgeGraph']
51
  result += f"### {kg.get('title', '')}\n"
52
  result += f"*{kg.get('type', '')}*\n\n"
53
  result += f"{kg.get('description', '')}\n\n"
54
 
55
- # Organic Results
56
  if 'organic' in data:
57
  result += "### Résultats principaux:\n"
58
- for item in data['organic'][:3]: # Limit to top 3 results
59
  result += f"- **{item['title']}**\n"
60
  result += f" {item['snippet']}\n"
61
  result += f" [Lien]({item['link']})\n\n"
62
 
63
- # People Also Ask
64
  if 'peopleAlsoAsk' in data:
65
  result += "### Questions fréquentes:\n"
66
- for item in data['peopleAlsoAsk'][:2]: # Limit to top 2 questions
67
  result += f"- **{item['question']}**\n"
68
  result += f" {item['snippet']}\n\n"
69
 
@@ -129,16 +126,28 @@ if prompt := st.chat_input("Hey?"):
129
  formatted_results = format_search_results(web_results)
130
  prompt = f"""Question: {prompt}\n\nRésultats de recherche web:\n{formatted_results}\n\nPourrais-tu analyser ces informations et me donner une réponse complète?"""
131
 
132
- # Send message to Gemini
133
- if uploaded_gemini_file:
134
- response = st.session_state.chat.send_message([uploaded_gemini_file, "\n\n", prompt])
135
- else:
136
- response = st.session_state.chat.send_message(prompt)
137
-
138
- print(response.text)
139
- # Display assistant response
140
  with st.chat_message("assistant"):
141
- st.markdown(response.text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
 
143
  except Exception as e:
144
  st.error(f"Erreur lors de l'envoi du message : {e}")
 
45
 
46
  result = ""
47
 
 
48
  if 'knowledgeGraph' in data:
49
  kg = data['knowledgeGraph']
50
  result += f"### {kg.get('title', '')}\n"
51
  result += f"*{kg.get('type', '')}*\n\n"
52
  result += f"{kg.get('description', '')}\n\n"
53
 
 
54
  if 'organic' in data:
55
  result += "### Résultats principaux:\n"
56
+ for item in data['organic'][:3]:
57
  result += f"- **{item['title']}**\n"
58
  result += f" {item['snippet']}\n"
59
  result += f" [Lien]({item['link']})\n\n"
60
 
 
61
  if 'peopleAlsoAsk' in data:
62
  result += "### Questions fréquentes:\n"
63
+ for item in data['peopleAlsoAsk'][:2]:
64
  result += f"- **{item['question']}**\n"
65
  result += f" {item['snippet']}\n\n"
66
 
 
126
  formatted_results = format_search_results(web_results)
127
  prompt = f"""Question: {prompt}\n\nRésultats de recherche web:\n{formatted_results}\n\nPourrais-tu analyser ces informations et me donner une réponse complète?"""
128
 
129
+ # Add user message to chat history first
130
+ response_text = ""
131
+
132
+ # Display assistant message with streaming
 
 
 
 
133
  with st.chat_message("assistant"):
134
+ message_placeholder = st.empty()
135
+
136
+ # Send message to chat with streaming
137
+ if uploaded_gemini_file:
138
+ response = st.session_state.chat.send_message([uploaded_gemini_file, "\n\n", prompt], stream=True)
139
+ else:
140
+ response = st.session_state.chat.send_message(prompt, stream=True)
141
+
142
+ # Process the streaming response
143
+ for chunk in response:
144
+ if chunk.text:
145
+ response_text += chunk.text
146
+ # Update the message placeholder with the accumulated response
147
+ message_placeholder.markdown(response_text + "▌")
148
+
149
+ # Remove the cursor and display final response
150
+ message_placeholder.markdown(response_text)
151
 
152
  except Exception as e:
153
  st.error(f"Erreur lors de l'envoi du message : {e}")