File size: 6,314 Bytes
c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 5bad84b c167a83 7ebf63b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
import requests
import torch
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
from datetime import datetime
# GPT-2 setup
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model_name = "gpt2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
# Set pad_token_id if not already defined
tokenizer.pad_token = tokenizer.eos_token # Set pad_token to eos_token if not defined
model.config.pad_token_id = tokenizer.pad_token_id
# NewsAPI Setup (Replace with your own API key)
news_api_key = "35cbd14c45184a109fc2bbb5fff7fb1b" # Replace with your NewsAPI key
def fetch_trending_topics(search_term="artificial intelligence OR machine learning", page=1, page_size=9):
try:
# Fetch AI and Machine Learning related news from NewsAPI with search term
url = f"https://newsapi.org/v2/everything?q={search_term}&sortBy=publishedAt&pageSize={page_size + 5}&page={page}&language=en&apiKey={news_api_key}"
response = requests.get(url)
data = response.json()
# Check for valid response
if response.status_code == 200 and "articles" in data:
trending_topics = []
seen_titles = set()
for article in data["articles"]:
title = article["title"]
if title not in seen_titles: # Avoid duplicate titles
seen_titles.add(title)
trending_topics.append({
"title": title,
"description": article["description"] if article["description"] else "No description available.",
"url": article["url"],
"publishedAt": article["publishedAt"],
})
if not trending_topics:
return [{"title": "No news available", "description": "", "url": "", "publishedAt": ""}]
return trending_topics
else:
print(f"Error: {data.get('message', 'No articles found')}")
return [{"title": "No news available", "description": "", "url": "", "publishedAt": ""}]
except Exception as e:
print(f"Error fetching news: {e}")
return [{"title": "Error fetching news", "description": "", "url": "", "publishedAt": ""}]
# Analyze the trending topic using GPT-2
def generate_analysis(trending_topic):
input_text = f"Provide a concise analysis about the following topic: '{trending_topic['title']}'. Please summarize its significance in the AI and Machine Learning field."
# Tokenize and generate text with a max limit on tokens
inputs = tokenizer(input_text, return_tensors="pt").to(device)
outputs = model.generate(**inputs, max_length=80, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95)
analysis = tokenizer.decode(outputs[0], skip_special_tokens=True)
return analysis
# Combine both functions for Gradio
def analyze_trends(search_term="artificial intelligence OR machine learning", page=1, page_size=9):
trending_topics = fetch_trending_topics(search_term=search_term, page=page, page_size=page_size)
topic_analysis = []
for topic in trending_topics:
if topic["title"] not in ["Error fetching news", "No news available"]:
analysis = generate_analysis(topic)
topic_analysis.append({
"title": topic["title"],
"description": topic["description"],
"analysis": analysis,
"url": topic["url"],
"publishedAt": topic["publishedAt"],
})
else:
topic_analysis.append({
"title": topic["title"],
"description": topic["description"],
"analysis": "Unable to retrieve or analyze data.",
"url": topic["url"],
"publishedAt": topic["publishedAt"],
})
# Limit the results to the specified page size
return topic_analysis[:page_size] # Ensure only the specified number of articles are returned
# Gradio UI with 3 Columns Layout for Displaying News
def display_news_cards(search_term="artificial intelligence OR machine learning", page=1, page_size=9):
analysis_results = analyze_trends(search_term=search_term, page=page, page_size=page_size)
current_date = datetime.now().strftime("%d-%m-%Y") # Format: DD-MM-YYYY
display = f"### **AI & Machine Learning News for {current_date}**\n\n"
# Create a 3-column layout
display += "<div style='display:flex; flex-wrap:wrap; justify-content:space-between;'>"
for news_item in analysis_results:
display += f"""
<div style='flex: 1 1 30%; border:1px solid black; margin:10px; padding:10px; box-sizing:border-box;' >
<b>{news_item['title']}</b><br/>
<i>{news_item['publishedAt']}</i><br/><br/>
{news_item['description']}<br/><br/>
<a href='{news_item['url']}' target='_blank'>Read more</a><br/><br/>
<b>Analysis:</b> {news_item['analysis']}<br/><br/>
</div>
"""
display += "</div>"
return display
# Gradio UI with Header, Search Option, and Submit Button
def gradio_interface():
with gr.Blocks() as demo:
# Header with background color
gr.Markdown("""<h1 style='text-align:center; color:white; background-color:#007BFF; padding:20px; border-radius:10px;'>AI & Machine Learning News Analyzer</h1>""", elem_id="header")
# Search Bar and Submit Button
search_term = gr.Textbox(label="Search for News", placeholder="Search 'AI' or 'Machine Learning'", value="artificial intelligence OR machine learning")
page = gr.Slider(minimum=1, maximum=5, step=1, label="Page Number", value=1)
page_size = gr.Slider(minimum=6, maximum=15, step=3, label="News per Page", value=9)
# Button to fetch and analyze news
analyze_button = gr.Button("Submit")
# Output area for displaying the news
news_output = gr.HTML()
# Link the button click to the display function
analyze_button.click(display_news_cards, inputs=[search_term, page, page_size], outputs=news_output)
return demo
# Launch the Gradio UI
if __name__ == "__main__":
gradio_interface().launch(share=True)
|