dindizz commited on
Commit
82f9412
·
verified ·
1 Parent(s): 091c644

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -13,12 +13,12 @@ openai_api_key = os.getenv("OPENAI_API_KEY")
13
  def scrape_content(url):
14
  response = requests.get(url)
15
  soup = BeautifulSoup(response.content, 'html.parser')
16
-
17
  # Example of extracting title and body content - modify based on actual structure of the websites
18
  title = soup.find('title').get_text()
19
  paragraphs = soup.find_all('p')
20
  content = '\n'.join([para.get_text() for para in paragraphs])
21
-
22
  return title, content
23
 
24
  # Function to create newsletter using OpenAI
@@ -38,22 +38,22 @@ def create_newsletter(contents):
38
  ],
39
  max_tokens=1500
40
  )
41
-
42
  newsletter = response.choices[0].message['content'].strip()
43
  return newsletter
44
 
45
  # Function to process URLs and generate the newsletter
46
  def process_urls(url1, url2, url3, url4, url5):
47
  urls = [url for url in [url1, url2, url3, url4, url5] if url]
48
-
49
  if not urls:
50
  return "No URLs provided."
51
-
52
  contents = []
53
  for url in urls:
54
  title, content = scrape_content(url)
55
  contents.append((title, content, url))
56
-
57
  newsletter = create_newsletter(contents)
58
  return newsletter
59
 
 
13
  def scrape_content(url):
14
  response = requests.get(url)
15
  soup = BeautifulSoup(response.content, 'html.parser')
16
+
17
  # Example of extracting title and body content - modify based on actual structure of the websites
18
  title = soup.find('title').get_text()
19
  paragraphs = soup.find_all('p')
20
  content = '\n'.join([para.get_text() for para in paragraphs])
21
+
22
  return title, content
23
 
24
  # Function to create newsletter using OpenAI
 
38
  ],
39
  max_tokens=1500
40
  )
41
+
42
  newsletter = response.choices[0].message['content'].strip()
43
  return newsletter
44
 
45
  # Function to process URLs and generate the newsletter
46
  def process_urls(url1, url2, url3, url4, url5):
47
  urls = [url for url in [url1, url2, url3, url4, url5] if url]
48
+
49
  if not urls:
50
  return "No URLs provided."
51
+
52
  contents = []
53
  for url in urls:
54
  title, content = scrape_content(url)
55
  contents.append((title, content, url))
56
+
57
  newsletter = create_newsletter(contents)
58
  return newsletter
59