Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,13 +3,9 @@ import requests
|
|
| 3 |
import json
|
| 4 |
import os
|
| 5 |
from datetime import datetime, timedelta
|
| 6 |
-
from bs4 import BeautifulSoup # ์น ํ์ด์ง์์ ํ
์คํธ๋ฅผ ์ถ์ถํ๊ธฐ ์ํด ์ฌ์ฉ
|
| 7 |
from huggingface_hub import InferenceClient # LLM ์ฌ์ฉ์ ์ํด ํ์
|
| 8 |
|
| 9 |
-
#
|
| 10 |
-
# !pip install bs4 huggingface_hub
|
| 11 |
-
|
| 12 |
-
# ํ๊ฒฝ ๋ณ์์์ API ํค ๊ฐ์ ธ์ค๊ธฐ (API ํค๋ ์์ ํ๊ฒ ๊ด๋ฆฌ๋์ด์ผ ํฉ๋๋ค)
|
| 13 |
API_KEY = os.getenv("SERPHOUSE_API_KEY") # ๋ณธ์ธ์ SerpHouse API ํค๋ฅผ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
| 14 |
HF_TOKEN = os.getenv("HF_TOKEN") # Hugging Face API ํ ํฐ์ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
| 15 |
|
|
@@ -37,7 +33,7 @@ def search_serphouse(query, country, page=1, num_result=10):
|
|
| 37 |
"data": {
|
| 38 |
"q": query,
|
| 39 |
"domain": "google.com",
|
| 40 |
-
"loc": country,
|
| 41 |
"lang": "en",
|
| 42 |
"device": "desktop",
|
| 43 |
"serp_type": "news",
|
|
@@ -60,7 +56,7 @@ def search_serphouse(query, country, page=1, num_result=10):
|
|
| 60 |
return response.json()
|
| 61 |
except requests.RequestException as e:
|
| 62 |
error_msg = f"Error: {str(e)}"
|
| 63 |
-
if response
|
| 64 |
error_msg += f"\nResponse content: {response.text}"
|
| 65 |
return {"error": error_msg}
|
| 66 |
|
|
@@ -72,12 +68,11 @@ def format_results_from_raw(results):
|
|
| 72 |
if not isinstance(results, dict):
|
| 73 |
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.")
|
| 74 |
|
| 75 |
-
# 'results' ํค ๋ด๋ถ์ ๊ตฌ์กฐ ํ์ธ
|
| 76 |
if 'results' in results:
|
| 77 |
results_content = results['results']
|
| 78 |
if 'results' in results_content:
|
| 79 |
results_content = results_content['results']
|
| 80 |
-
# 'news' ํค ํ์ธ
|
| 81 |
if 'news' in results_content:
|
| 82 |
news_results = results_content['news']
|
| 83 |
else:
|
|
@@ -101,6 +96,7 @@ def format_results_from_raw(results):
|
|
| 101 |
image_url = result.get("img", result.get("thumbnail", ""))
|
| 102 |
|
| 103 |
articles.append({
|
|
|
|
| 104 |
"title": title,
|
| 105 |
"link": link,
|
| 106 |
"snippet": snippet,
|
|
@@ -116,7 +112,6 @@ def format_results_from_raw(results):
|
|
| 116 |
return "Error: " + error_message, []
|
| 117 |
|
| 118 |
def serphouse_search(query, country):
|
| 119 |
-
# ํ์ด์ง์ ๊ฒฐ๊ณผ ์์ ๊ธฐ๋ณธ๊ฐ์ ์ค์ ํฉ๋๋ค.
|
| 120 |
page = 1
|
| 121 |
num_result = 10
|
| 122 |
results = search_serphouse(query, country, page, num_result)
|
|
@@ -126,19 +121,10 @@ def serphouse_search(query, country):
|
|
| 126 |
# LLM ์ค์
|
| 127 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
| 128 |
|
| 129 |
-
def summarize_article(
|
| 130 |
try:
|
| 131 |
-
#
|
| 132 |
-
|
| 133 |
-
response.raise_for_status()
|
| 134 |
-
soup = BeautifulSoup(response.text, 'html.parser')
|
| 135 |
-
# ๋ชจ๋ ํ
์คํธ๋ฅผ ์ถ์ถ (๊ฐ๋จํ ์์)
|
| 136 |
-
text = ' '.join([p.get_text() for p in soup.find_all('p')])
|
| 137 |
-
if not text.strip():
|
| 138 |
-
return "๊ธฐ์ฌ ๋ด์ฉ์ ๊ฐ์ ธ์ฌ ์ ์์ต๋๋ค."
|
| 139 |
-
|
| 140 |
-
# ์์ฝ ์์ฑ
|
| 141 |
-
prompt = f"๋ค์ ์์ด ๊ธฐ์ฌ๋ฅผ ํ๊ตญ์ด๋ก 3๋ฌธ์ฅ์ผ๋ก ์์ฝํ์ธ์:\n{text}"
|
| 142 |
summary = hf_client.text_generation(prompt, max_new_tokens=500)
|
| 143 |
return summary
|
| 144 |
except Exception as e:
|
|
@@ -160,43 +146,112 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์๋น์ค") as
|
|
| 160 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
| 161 |
search_button = gr.Button("๊ฒ์")
|
| 162 |
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
| 170 |
-
|
| 171 |
-
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
|
| 176 |
-
|
| 177 |
-
|
| 178 |
-
|
| 179 |
-
|
| 180 |
-
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 201 |
|
| 202 |
iface.launch(auth=("gini", "pick"))
|
|
|
|
| 3 |
import json
|
| 4 |
import os
|
| 5 |
from datetime import datetime, timedelta
|
|
|
|
| 6 |
from huggingface_hub import InferenceClient # LLM ์ฌ์ฉ์ ์ํด ํ์
|
| 7 |
|
| 8 |
+
# ํ๊ฒฝ ๋ณ์์์ API ํค ๊ฐ์ ธ์ค๊ธฐ
|
|
|
|
|
|
|
|
|
|
| 9 |
API_KEY = os.getenv("SERPHOUSE_API_KEY") # ๋ณธ์ธ์ SerpHouse API ํค๋ฅผ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
| 10 |
HF_TOKEN = os.getenv("HF_TOKEN") # Hugging Face API ํ ํฐ์ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
| 11 |
|
|
|
|
| 33 |
"data": {
|
| 34 |
"q": query,
|
| 35 |
"domain": "google.com",
|
| 36 |
+
"loc": country, # ๊ตญ๊ฐ ์ด๋ฆ์ ์ง์ ์ฌ์ฉํฉ๋๋ค.
|
| 37 |
"lang": "en",
|
| 38 |
"device": "desktop",
|
| 39 |
"serp_type": "news",
|
|
|
|
| 56 |
return response.json()
|
| 57 |
except requests.RequestException as e:
|
| 58 |
error_msg = f"Error: {str(e)}"
|
| 59 |
+
if hasattr(response, 'text'):
|
| 60 |
error_msg += f"\nResponse content: {response.text}"
|
| 61 |
return {"error": error_msg}
|
| 62 |
|
|
|
|
| 68 |
if not isinstance(results, dict):
|
| 69 |
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.")
|
| 70 |
|
| 71 |
+
# 'results' ํค ๋ด๋ถ์ ๊ตฌ์กฐ ํ์ธ
|
| 72 |
if 'results' in results:
|
| 73 |
results_content = results['results']
|
| 74 |
if 'results' in results_content:
|
| 75 |
results_content = results_content['results']
|
|
|
|
| 76 |
if 'news' in results_content:
|
| 77 |
news_results = results_content['news']
|
| 78 |
else:
|
|
|
|
| 96 |
image_url = result.get("img", result.get("thumbnail", ""))
|
| 97 |
|
| 98 |
articles.append({
|
| 99 |
+
"index": idx,
|
| 100 |
"title": title,
|
| 101 |
"link": link,
|
| 102 |
"snippet": snippet,
|
|
|
|
| 112 |
return "Error: " + error_message, []
|
| 113 |
|
| 114 |
def serphouse_search(query, country):
|
|
|
|
| 115 |
page = 1
|
| 116 |
num_result = 10
|
| 117 |
results = search_serphouse(query, country, page, num_result)
|
|
|
|
| 121 |
# LLM ์ค์
|
| 122 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
| 123 |
|
| 124 |
+
def summarize_article(title, snippet):
|
| 125 |
try:
|
| 126 |
+
# ๊ธฐ์ฌ ์ ๋ชฉ๊ณผ ์ค๋ํซ์ ๊ธฐ๋ฐ์ผ๋ก ์์ฝ ์์ฑ
|
| 127 |
+
prompt = f"๋ค์ ๋ด์ค ์ ๋ชฉ๊ณผ ์์ฝ์ ๋ฐํ์ผ๋ก ํ๊ตญ์ด๋ก 3๋ฌธ์ฅ์ผ๋ก ์์ฝํ์ธ์:\n์ ๋ชฉ: {title}\n์์ฝ: {snippet}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 128 |
summary = hf_client.text_generation(prompt, max_new_tokens=500)
|
| 129 |
return summary
|
| 130 |
except Exception as e:
|
|
|
|
| 146 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
| 147 |
search_button = gr.Button("๊ฒ์")
|
| 148 |
|
| 149 |
+
# ์ต๋ 10๊ฐ์ ๊ธฐ์ฌ์ ๋ํ ์ปดํฌ๋ํธ๋ฅผ ๋ฏธ๋ฆฌ ์์ฑํฉ๋๋ค.
|
| 150 |
+
article_components = []
|
| 151 |
+
for i in range(10):
|
| 152 |
+
with gr.Group(visible=False) as article_group:
|
| 153 |
+
title = gr.Markdown()
|
| 154 |
+
image = gr.Image(width=200, height=150)
|
| 155 |
+
snippet = gr.Markdown()
|
| 156 |
+
info = gr.Markdown()
|
| 157 |
+
analyze_button = gr.Button("๋ถ์")
|
| 158 |
+
summary_output = gr.Markdown(visible=False)
|
| 159 |
+
|
| 160 |
+
article_components.append({
|
| 161 |
+
'group': article_group,
|
| 162 |
+
'title': title,
|
| 163 |
+
'image': image,
|
| 164 |
+
'snippet': snippet,
|
| 165 |
+
'info': info,
|
| 166 |
+
'analyze_button': analyze_button,
|
| 167 |
+
'summary_output': summary_output,
|
| 168 |
+
})
|
| 169 |
+
|
| 170 |
+
def search_and_display(query, country):
|
| 171 |
+
error_message, articles = serphouse_search(query, country)
|
| 172 |
+
outputs = []
|
| 173 |
+
if error_message:
|
| 174 |
+
outputs.append(gr.update(value=error_message, visible=True))
|
| 175 |
+
# ๋๋จธ์ง ์ปดํฌ๋ํธ ์จ๊ธฐ๊ธฐ
|
| 176 |
+
for comp in article_components:
|
| 177 |
+
outputs.extend([
|
| 178 |
+
gr.update(visible=False), # group
|
| 179 |
+
gr.update(), # title
|
| 180 |
+
gr.update(), # image
|
| 181 |
+
gr.update(), # snippet
|
| 182 |
+
gr.update(), # info
|
| 183 |
+
gr.update(), # analyze_button
|
| 184 |
+
gr.update(visible=False), # summary_output
|
| 185 |
+
])
|
| 186 |
+
return outputs
|
| 187 |
+
else:
|
| 188 |
+
# ๊ธฐ์ฌ ์ปดํฌ๋ํธ ์
๋ฐ์ดํธ
|
| 189 |
+
for idx, comp in enumerate(article_components):
|
| 190 |
+
if idx < len(articles):
|
| 191 |
+
article = articles[idx]
|
| 192 |
+
comp['group'].visible = True
|
| 193 |
+
comp['title'].value = f"### [{article['title']}]({article['link']})"
|
| 194 |
+
if article['image_url'] and not article['image_url'].startswith("data:image"):
|
| 195 |
+
comp['image'].value = article['image_url']
|
| 196 |
+
comp['image'].visible = True
|
| 197 |
+
else:
|
| 198 |
+
comp['image'].visible = False
|
| 199 |
+
comp['snippet'].value = f"**์์ฝ:** {article['snippet']}"
|
| 200 |
+
comp['info'].value = f"**์ถ์ฒ:** {article['channel']} | **์๊ฐ:** {article['time']}"
|
| 201 |
+
comp['summary_output'].visible = False # ์ด๊ธฐ์๋ ์์ฝ ์จ๊น
|
| 202 |
+
|
| 203 |
+
# ๋ถ์ ๋ฒํผ ํด๋ฆญ ์ด๋ฒคํธ ์ ์
|
| 204 |
+
def create_analyze_function(article_title, article_snippet):
|
| 205 |
+
def analyze_article():
|
| 206 |
+
summary = summarize_article(article_title, article_snippet)
|
| 207 |
+
return gr.update(value=summary, visible=True)
|
| 208 |
+
return analyze_article
|
| 209 |
+
|
| 210 |
+
comp['analyze_button'].click(
|
| 211 |
+
create_analyze_function(article['title'], article['snippet']),
|
| 212 |
+
inputs=[],
|
| 213 |
+
outputs=comp['summary_output']
|
| 214 |
+
)
|
| 215 |
+
|
| 216 |
+
outputs.extend([
|
| 217 |
+
gr.update(visible=True), # group
|
| 218 |
+
gr.update(), # title
|
| 219 |
+
gr.update(), # image
|
| 220 |
+
gr.update(), # snippet
|
| 221 |
+
gr.update(), # info
|
| 222 |
+
gr.update(), # analyze_button
|
| 223 |
+
gr.update(visible=False), # summary_output
|
| 224 |
+
])
|
| 225 |
+
else:
|
| 226 |
+
# ๋จ์ ์ปดํฌ๋ํธ ์จ๊ธฐ๊ธฐ
|
| 227 |
+
comp['group'].visible = False
|
| 228 |
+
outputs.extend([
|
| 229 |
+
gr.update(visible=False), # group
|
| 230 |
+
gr.update(), # title
|
| 231 |
+
gr.update(), # image
|
| 232 |
+
gr.update(), # snippet
|
| 233 |
+
gr.update(), # info
|
| 234 |
+
gr.update(), # analyze_button
|
| 235 |
+
gr.update(visible=False), # summary_output
|
| 236 |
+
])
|
| 237 |
+
return outputs
|
| 238 |
+
|
| 239 |
+
# search_button ํด๋ฆญ ์ ์
๋ฐ์ดํธ๋ ์ถ๋ ฅ ์ปดํฌ๋ํธ ๋ชฉ๋ก ์์ฑ
|
| 240 |
+
search_outputs = []
|
| 241 |
+
search_outputs.append(gr.Markdown(visible=False)) # ์ค๋ฅ ๋ฉ์์ง ์ถ๋ ฅ์ฉ
|
| 242 |
+
for comp in article_components:
|
| 243 |
+
search_outputs.append(comp['group'])
|
| 244 |
+
search_outputs.append(comp['title'])
|
| 245 |
+
search_outputs.append(comp['image'])
|
| 246 |
+
search_outputs.append(comp['snippet'])
|
| 247 |
+
search_outputs.append(comp['info'])
|
| 248 |
+
search_outputs.append(comp['analyze_button'])
|
| 249 |
+
search_outputs.append(comp['summary_output'])
|
| 250 |
+
|
| 251 |
+
search_button.click(
|
| 252 |
+
search_and_display,
|
| 253 |
+
inputs=[query, country],
|
| 254 |
+
outputs=search_outputs
|
| 255 |
+
)
|
| 256 |
|
| 257 |
iface.launch(auth=("gini", "pick"))
|