File size: 2,274 Bytes
840561e
e48855a
 
 
 
85e645a
840561e
85e645a
e48855a
85e645a
 
e48855a
 
 
 
 
 
72bcd1f
e48855a
 
 
 
 
72bcd1f
e48855a
72bcd1f
e48855a
 
 
 
 
 
 
 
 
72bcd1f
e48855a
 
72bcd1f
 
 
 
 
e48855a
72bcd1f
e48855a
 
 
 
 
 
 
 
 
72bcd1f
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import os
import time
import streamlit as st
from datetime import datetime
from dotenv import load_dotenv

from google_search import google_search
from mistral_ll import summarize_texts, detect_fake_news, analyze_sentiment

# Load environment variables
load_dotenv(dotenv_path=os.path.join(os.path.dirname(__file__), '.env'))

st.set_page_config(page_title="πŸ”₯ Brand Crisis Detector", layout="wide", page_icon="πŸ”₯")
st.title("πŸ”₯ Real-Time Brand Crisis Detector")
st.markdown("Analyze web content about your brand in real-time using AI ⚑")
st.caption(f"πŸ•’ Last refreshed: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")

query = st.text_input("πŸ” Enter a brand or keyword", placeholder="e.g., Nvidia, Nestle, Jio")

if st.button("Start Analysis πŸš€") or (query and st.session_state.get("last_query") != query):
    st.session_state.last_query = query

    with st.spinner(f"πŸ”Ž Searching Google for **{query}**..."):
        start_time = time.time()
        articles = google_search(query, num_results=10)
        fetch_time = round(time.time() - start_time, 2)

    if not articles:
        st.warning("❌ No results found. Try a different query.")
        st.stop()

    st.success(f"βœ… Fetched {len(articles)} articles in {fetch_time} seconds.")

    titles = [a['title'] for a in articles]
    links = [a['link'] for a in articles]
    snippets = [a['snippet'] for a in articles]

    with st.spinner("🧠 Summarizing, classifying, and detecting fake news..."):
        start_ai = time.time()
        summaries = summarize_texts(snippets)
        sentiments = analyze_sentiment(snippets)
        fakeness = detect_fake_news(snippets)
        ai_time = round(time.time() - start_ai, 2)

    st.info(f"⏱️ AI analysis completed in {ai_time} seconds.")

    for i in range(len(articles)):
        with st.container():
            st.subheader(f"{i+1}. {titles[i]}")
            st.markdown(f"**πŸ”— Link:** [{links[i]}]({links[i]})")
            st.markdown(f"**πŸ’¬ Sentiment:** `{sentiments[i]}`")
            st.markdown(f"**πŸ•΅οΈ Fake News Score:** `{fakeness[i]}`")
            st.markdown(f"**πŸ“ Summary:** {summaries[i]}")
            st.markdown("---")
else:
    st.info("Enter a brand or keyword and click 'Start Analysis πŸš€' to begin.")