Spaces:
Sleeping
Sleeping
# app.py | |
import gradio as gr | |
import requests | |
from bs4 import BeautifulSoup | |
import google.generativeai as genai | |
import os | |
# Configure Gemini API | |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") | |
genai.configure(api_key=GEMINI_API_KEY) | |
def fetch_article_content(url): | |
"""Fetch article content using requests and BeautifulSoup""" | |
try: | |
headers = {'User-Agent': 'Mozilla/5.0'} | |
response = requests.get(url, headers=headers, timeout=10) | |
response.raise_for_status() | |
soup = BeautifulSoup(response.text, 'html.parser') | |
# Extract text from <p> tags | |
paragraphs = soup.find_all('p') | |
content = ' '.join([p.get_text(strip=True) for p in paragraphs]) | |
return content | |
except Exception as e: | |
return f"Error fetching article: {str(e)}" | |
def generate_platform_post(article_text): | |
"""Generate optimized post using Gemini API""" | |
try: | |
model = genai.GenerativeModel('gemini-1.5-pro') | |
prompt = f""" | |
Analyze this article content and create: | |
1. A compelling title (max 100 characters) | |
2. An optimized post in HTML format for Reddit/Quora | |
3. Include an image tag with descriptive alt text | |
Article content: | |
{article_text[:5000]} # Limit to 5000 chars for token limits | |
Format your response as: | |
[TITLE] | |
[HTML_CONTENT] | |
Requirements: | |
- Clean HTML formatting with paragraphs | |
- Add relevant image tag with descriptive alt text | |
- Mobile-friendly design | |
- Minimal CSS styling | |
""" | |
response = model.generate_content(prompt) | |
return parse_gemini_response(response.text) | |
except Exception as e: | |
return {"title": "Error generating post", "content": f"<p>{str(e)}</p>"} | |
def parse_gemini_response(response): | |
"""Parse Gemini's response into title and content""" | |
try: | |
title = response.split("[TITLE]")[1].split("[HTML_CONTENT]")[0].strip() | |
content = response.split("[HTML_CONTENT]")[1].strip() | |
except: | |
title = "Content Generation Error" | |
content = "<p>Failed to parse response from AI</p>" | |
return {"title": title, "content": content} | |
def process_url(url): | |
"""Main processing pipeline""" | |
article_text = fetch_article_content(url) | |
if article_text.startswith("Error"): | |
return {"title": "Processing Error", "content": f"<p>{article_text}</p>"} | |
return generate_platform_post(article_text) | |
# Create Gradio interface | |
url_input = gr.Textbox(label="Article URL", placeholder="https://example.com/article...") | |
title_output = gr.Textbox(label="Generated Title") | |
content_output = gr.HTML(label="Generated Post") | |
app = gr.Interface( | |
fn=process_url, | |
inputs=url_input, | |
outputs=[ | |
gr.Textbox(label="Generated Title"), | |
gr.HTML(label="Formatted Post") | |
], | |
examples=[ | |
["https://example.com/sample-article"] | |
], | |
title="Article to Reddit/Quora Post Converter", | |
description="Convert news articles into optimized Reddit/Quora-style posts with AI-generated formatting and image descriptions" | |
) | |
if __name__ == "__main__": | |
app.launch() |