import os import time import streamlit as st from datetime import datetime from dotenv import load_dotenv from google_search import google_search from mistral_ll import summarize_texts, detect_fake_news, analyze_sentiment # Load environment variables load_dotenv(dotenv_path=os.path.join(os.path.dirname(__file__), '.env')) st.set_page_config(page_title="🔥 Brand Crisis Detector", layout="wide", page_icon="🔥") st.title("🔥 Real-Time Brand Crisis Detector") st.markdown("Analyze web content about your brand in real-time using AI ⚡") st.caption(f"🕒 Last refreshed: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") query = st.text_input("🔍 Enter a brand or keyword", placeholder="e.g., Nvidia, Nestle, Jio") if st.button("Start Analysis 🚀") or (query and st.session_state.get("last_query") != query): st.session_state.last_query = query with st.spinner(f"🔎 Searching Google for **{query}**..."): start_time = time.time() articles = google_search(query, num_results=10) fetch_time = round(time.time() - start_time, 2) if not articles: st.warning("❌ No results found. Try a different query.") st.stop() st.success(f"✅ Fetched {len(articles)} articles in {fetch_time} seconds.") titles = [a['title'] for a in articles] links = [a['link'] for a in articles] snippets = [a['snippet'] for a in articles] with st.spinner("🧠 Summarizing, classifying, and detecting fake news..."): start_ai = time.time() summaries = summarize_texts(snippets) sentiments = analyze_sentiment(snippets) fakeness = detect_fake_news(snippets) ai_time = round(time.time() - start_ai, 2) st.info(f"⏱️ AI analysis completed in {ai_time} seconds.") for i in range(len(articles)): with st.container(): st.subheader(f"{i+1}. {titles[i]}") st.markdown(f"**🔗 Link:** [{links[i]}]({links[i]})") st.markdown(f"**💬 Sentiment:** `{sentiments[i]}`") st.markdown(f"**🕵️ Fake News Score:** `{fakeness[i]}`") st.markdown(f"**📝 Summary:** {summaries[i]}") st.markdown("---") else: st.info("Enter a brand or keyword and click 'Start Analysis 🚀' to begin.")