Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,6 @@ import gradio as gr
|
|
3 |
import requests
|
4 |
from bs4 import BeautifulSoup
|
5 |
import google.generativeai as genai
|
6 |
-
from newspaper import Article
|
7 |
import os
|
8 |
|
9 |
# Configure Gemini API
|
@@ -11,13 +10,17 @@ GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
|
|
11 |
genai.configure(api_key=GEMINI_API_KEY)
|
12 |
|
13 |
def fetch_article_content(url):
|
14 |
-
"""
|
15 |
try:
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
|
|
|
|
|
|
|
|
21 |
except Exception as e:
|
22 |
return f"Error fetching article: {str(e)}"
|
23 |
|
@@ -85,7 +88,7 @@ app = gr.Interface(
|
|
85 |
examples=[
|
86 |
["https://example.com/sample-article"]
|
87 |
],
|
88 |
-
title="Article to
|
89 |
description="Convert news articles into optimized Reddit/Quora-style posts with AI-generated formatting and image descriptions"
|
90 |
)
|
91 |
|
|
|
3 |
import requests
|
4 |
from bs4 import BeautifulSoup
|
5 |
import google.generativeai as genai
|
|
|
6 |
import os
|
7 |
|
8 |
# Configure Gemini API
|
|
|
10 |
genai.configure(api_key=GEMINI_API_KEY)
|
11 |
|
12 |
def fetch_article_content(url):
|
13 |
+
"""Fetch article content using requests and BeautifulSoup"""
|
14 |
try:
|
15 |
+
headers = {'User-Agent': 'Mozilla/5.0'}
|
16 |
+
response = requests.get(url, headers=headers, timeout=10)
|
17 |
+
response.raise_for_status()
|
18 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
19 |
+
|
20 |
+
# Extract text from <p> tags
|
21 |
+
paragraphs = soup.find_all('p')
|
22 |
+
content = ' '.join([p.get_text(strip=True) for p in paragraphs])
|
23 |
+
return content
|
24 |
except Exception as e:
|
25 |
return f"Error fetching article: {str(e)}"
|
26 |
|
|
|
88 |
examples=[
|
89 |
["https://example.com/sample-article"]
|
90 |
],
|
91 |
+
title="Article to Reddit/Quora Post Converter",
|
92 |
description="Convert news articles into optimized Reddit/Quora-style posts with AI-generated formatting and image descriptions"
|
93 |
)
|
94 |
|