Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import requests
|
2 |
+
from bs4 import BeautifulSoup
|
3 |
+
import openai
|
4 |
+
import gradio as gr
|
5 |
+
import os
|
6 |
+
from dotenv import load_dotenv
|
7 |
+
|
8 |
+
# Load environment variables from .env file
|
9 |
+
load_dotenv()
|
10 |
+
openai_api_key = os.getenv("OPENAI_API_KEY")
|
11 |
+
|
12 |
+
# Function to scrape content from a URL
|
13 |
+
def scrape_content(url):
|
14 |
+
response = requests.get(url)
|
15 |
+
soup = BeautifulSoup(response.content, 'html.parser')
|
16 |
+
|
17 |
+
# Example of extracting title and body content - modify based on actual structure of the websites
|
18 |
+
title = soup.find('title').get_text()
|
19 |
+
paragraphs = soup.find_all('p')
|
20 |
+
content = '\n'.join([para.get_text() for para in paragraphs])
|
21 |
+
|
22 |
+
return title, content
|
23 |
+
|
24 |
+
# Function to create newsletter using OpenAI
|
25 |
+
def create_newsletter(contents):
|
26 |
+
openai.api_key = openai_api_key
|
27 |
+
|
28 |
+
prompt = "Create a newsletter with the following content:\n\n"
|
29 |
+
for content in contents:
|
30 |
+
title, body, url = content
|
31 |
+
prompt += f"Title: {title}\nURL: {url}\n\n{body}\n\n"
|
32 |
+
|
33 |
+
response = openai.ChatCompletion.create(
|
34 |
+
model="gpt-4",
|
35 |
+
messages=[
|
36 |
+
{"role": "system", "content": "You are a helpful assistant."},
|
37 |
+
{"role": "user", "content": prompt}
|
38 |
+
],
|
39 |
+
max_tokens=1500
|
40 |
+
)
|
41 |
+
|
42 |
+
newsletter = response.choices[0].message['content'].strip()
|
43 |
+
return newsletter
|
44 |
+
|
45 |
+
# Function to process URLs and generate the newsletter
|
46 |
+
def process_urls(url1, url2, url3, url4, url5):
|
47 |
+
urls = [url for url in [url1, url2, url3, url4, url5] if url]
|
48 |
+
|
49 |
+
if not urls:
|
50 |
+
return "No URLs provided."
|
51 |
+
|
52 |
+
contents = []
|
53 |
+
for url in urls:
|
54 |
+
title, content = scrape_content(url)
|
55 |
+
contents.append((title, content, url))
|
56 |
+
|
57 |
+
newsletter = create_newsletter(contents)
|
58 |
+
return newsletter
|
59 |
+
|
60 |
+
# Gradio interface
|
61 |
+
iface = gr.Interface(
|
62 |
+
fn=process_urls,
|
63 |
+
inputs=[
|
64 |
+
gr.Textbox(label="URL 1"),
|
65 |
+
gr.Textbox(label="URL 2"),
|
66 |
+
gr.Textbox(label="URL 3"),
|
67 |
+
gr.Textbox(label="URL 4"),
|
68 |
+
gr.Textbox(label="URL 5")
|
69 |
+
],
|
70 |
+
outputs="html",
|
71 |
+
title="Newsletter Generator",
|
72 |
+
description="Enter up to 5 URLs to generate a newsletter."
|
73 |
+
)
|
74 |
+
|
75 |
+
iface.launch()
|