Spaces:
Runtime error
Runtime error
Update src/streamlit_app.py
Browse files- src/streamlit_app.py +50 -33
src/streamlit_app.py
CHANGED
|
@@ -1,35 +1,52 @@
|
|
| 1 |
-
import streamlit as st
|
| 2 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
from google_search import google_search
|
| 4 |
-
from mistral_llm import
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
st.
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
st.
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
+
import time
|
| 3 |
+
import streamlit as st
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from dotenv import load_dotenv
|
| 6 |
from google_search import google_search
|
| 7 |
+
from mistral_llm import summarize_texts, detect_fake_news, analyze_sentiment
|
| 8 |
+
|
| 9 |
+
load_dotenv(dotenv_path=os.path.join(os.path.dirname(__file__), '..', '.env'))
|
| 10 |
+
|
| 11 |
+
st.set_page_config(page_title="π₯ Brand Crisis Detector", layout="wide", page_icon="π₯")
|
| 12 |
+
st.title("π₯ Real-Time Brand Crisis Detector")
|
| 13 |
+
st.markdown("Analyze web content about your brand in real-time using AI β‘")
|
| 14 |
+
st.caption(f"π Last refreshed: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
|
| 15 |
+
|
| 16 |
+
query = st.text_input("π Enter a brand or keyword", placeholder="e.g., Nvidia, Nestle, Jio", label_visibility="visible")
|
| 17 |
+
|
| 18 |
+
if st.button("Start Analysis π") or (query and st.session_state.get("last_query") != query):
|
| 19 |
+
st.session_state.last_query = query
|
| 20 |
+
|
| 21 |
+
with st.spinner(f"π Searching Google for **{query}**..."):
|
| 22 |
+
t1 = time.time()
|
| 23 |
+
articles = google_search(query, num_results=10)
|
| 24 |
+
fetch_time = round(time.time() - t1, 2)
|
| 25 |
+
|
| 26 |
+
if not articles:
|
| 27 |
+
st.warning("β No results found. Try a different query.")
|
| 28 |
+
st.stop()
|
| 29 |
+
|
| 30 |
+
st.success(f"β
Fetched {len(articles)} articles in {fetch_time} seconds.")
|
| 31 |
+
|
| 32 |
+
titles = [a['title'] for a in articles]
|
| 33 |
+
links = [a['link'] for a in articles]
|
| 34 |
+
contents = [a['snippet'] for a in articles]
|
| 35 |
+
|
| 36 |
+
with st.spinner("π§ Summarizing, classifying, and detecting fake news..."):
|
| 37 |
+
t2 = time.time()
|
| 38 |
+
summaries = summarize_texts(contents)
|
| 39 |
+
sentiments = analyze_sentiment(contents)
|
| 40 |
+
fakeness = detect_fake_news(contents)
|
| 41 |
+
process_time = round(time.time() - t2, 2)
|
| 42 |
+
|
| 43 |
+
st.info(f"β±οΈ AI analysis completed in {process_time} seconds.")
|
| 44 |
+
|
| 45 |
+
for i in range(len(articles)):
|
| 46 |
+
with st.container():
|
| 47 |
+
st.subheader(f"{i+1}. {titles[i]}")
|
| 48 |
+
st.markdown(f"**π Link:** [{links[i]}]({links[i]})")
|
| 49 |
+
st.markdown(f"**π¬ Sentiment:** `{sentiments[i]}`")
|
| 50 |
+
st.markdown(f"**π΅οΈ Fake News Score:** `{fakeness[i]}`")
|
| 51 |
+
st.markdown(f"**π Summary:** {summaries[i]}")
|
| 52 |
+
st.markdown("---")
|