Update app.py
Browse files
app.py
CHANGED
@@ -1,265 +1,155 @@
|
|
|
|
1 |
import time
|
2 |
-
import hashlib
|
3 |
import hmac
|
|
|
4 |
import base64
|
5 |
import requests
|
6 |
-
import gradio as gr
|
7 |
-
import urllib.request
|
8 |
-
import urllib.parse
|
9 |
-
import json
|
10 |
import pandas as pd
|
11 |
-
from concurrent.futures import ThreadPoolExecutor
|
12 |
-
import os
|
13 |
import tempfile
|
14 |
-
|
15 |
-
from dotenv import load_dotenv # dotenv μΆκ°
|
16 |
-
|
17 |
-
# .env νμΌμ νκ²½ λ³μλ₯Ό λ‘λν©λλ€.
|
18 |
-
load_dotenv()
|
19 |
-
|
20 |
-
# νκ²½ λ³μμμ API ν€μ μν¬λ¦Ώ ν€λ₯Ό λΆλ¬μ΅λλ€.
|
21 |
-
BASE_URL = "https://api.searchad.naver.com"
|
22 |
-
API_KEY = os.environ.get("NAVER_API_KEY")
|
23 |
-
SECRET_KEY = os.environ.get("NAVER_SECRET_KEY")
|
24 |
-
CUSTOMER_ID = 2666992
|
25 |
-
|
26 |
-
# νκ²½ λ³μμμ ν΄λΌμ΄μΈνΈ IDμ μν¬λ¦Ώμ λΆλ¬μ΅λλ€.
|
27 |
-
CLIENT_ID = os.environ.get("NAVER_CLIENT_ID")
|
28 |
-
CLIENT_SECRET = os.environ.get("NAVER_CLIENT_SECRET")
|
29 |
-
|
30 |
-
# νκ²½ λ³μ λ‘λ νμΈ
|
31 |
-
if not API_KEY or not SECRET_KEY or not CLIENT_ID or not CLIENT_SECRET:
|
32 |
-
raise ValueError("νμ νκ²½ λ³μκ° μ€μ λμ§ μμμ΅λλ€. .env νμΌμ νμΈνμΈμ.")
|
33 |
-
else:
|
34 |
-
print("νκ²½ λ³μκ° μ μμ μΌλ‘ λ‘λλμμ΅λλ€.")
|
35 |
-
|
36 |
-
class NaverAPI:
|
37 |
-
def __init__(self, base_url, api_key, secret_key, customer_id):
|
38 |
-
self.base_url = base_url
|
39 |
-
self.api_key = api_key
|
40 |
-
self.secret_key = secret_key
|
41 |
-
self.customer_id = customer_id
|
42 |
-
|
43 |
-
def generate_signature(self, timestamp, method, path):
|
44 |
-
sign = f"{timestamp}.{method}.{path}"
|
45 |
-
signature = hmac.new(self.secret_key.encode('utf-8'), sign.encode('utf-8'), hashlib.sha256).digest()
|
46 |
-
return base64.b64encode(signature).decode('utf-8')
|
47 |
-
|
48 |
-
def get_timestamp(self):
|
49 |
-
return str(int(time.time() * 1000))
|
50 |
-
|
51 |
-
def get_headers(self, method, uri):
|
52 |
-
timestamp = self.get_timestamp()
|
53 |
-
headers = {
|
54 |
-
'Content-Type': 'application/json; charset=UTF-8',
|
55 |
-
'X-Timestamp': timestamp,
|
56 |
-
'X-API-KEY': self.api_key,
|
57 |
-
'X-Customer': str(self.customer_id),
|
58 |
-
'X-Signature': self.generate_signature(timestamp, method, uri),
|
59 |
-
}
|
60 |
-
return headers
|
61 |
-
|
62 |
-
def get_keywords_data(self, keywords):
|
63 |
-
uri = "/keywordstool"
|
64 |
-
method = "GET"
|
65 |
-
query = {
|
66 |
-
'hintKeywords': ','.join(keywords),
|
67 |
-
'showDetail': 1
|
68 |
-
}
|
69 |
-
headers = self.get_headers(method, uri)
|
70 |
-
response = requests.get(self.base_url + uri, headers=headers, params=query)
|
71 |
-
response.raise_for_status() # HTTP μ€λ₯ λ°μ μ μμΈ λ°μ
|
72 |
-
return response.json()
|
73 |
-
|
74 |
-
def get_blog_count(keyword):
|
75 |
-
# ν΄λΌμ΄μΈνΈ IDμ μν¬λ¦Ώμ νκ²½ λ³μμμ λΆλ¬μ΅λλ€.
|
76 |
-
client_id = CLIENT_ID
|
77 |
-
client_secret = CLIENT_SECRET
|
78 |
-
|
79 |
-
# keywordκ° λ°μ΄νΈ νμ
μΌ κ²½μ° λμ½λ©
|
80 |
-
if isinstance(keyword, bytes):
|
81 |
-
keyword = keyword.decode('utf-8')
|
82 |
-
elif not isinstance(keyword, str):
|
83 |
-
keyword = str(keyword)
|
84 |
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
return 0
|
99 |
-
except Exception as e:
|
100 |
-
print(f"Error fetching blog count for keyword '{keyword}': {e}")
|
101 |
-
return 0
|
102 |
|
103 |
-
|
104 |
-
|
105 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
|
107 |
-
|
108 |
-
|
|
|
|
|
|
|
|
|
109 |
|
110 |
-
|
|
|
111 |
"""
|
112 |
-
|
|
|
|
|
|
|
113 |
"""
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
try:
|
127 |
-
monthly_pc = int(monthly_pc)
|
128 |
-
except ValueError:
|
129 |
-
monthly_pc = 0
|
130 |
-
if isinstance(monthly_mobile, str):
|
131 |
-
monthly_mobile = monthly_mobile.replace(',', '').replace('< 10', '0')
|
132 |
-
try:
|
133 |
-
monthly_mobile = int(monthly_mobile)
|
134 |
-
except ValueError:
|
135 |
-
monthly_mobile = 0
|
136 |
-
|
137 |
-
total_searches = monthly_pc + monthly_mobile
|
138 |
-
return (keyword, monthly_pc, monthly_mobile, total_searches)
|
139 |
-
# μ
λ ₯ν ν€μλμ μΌμΉνλ νλͺ©μ΄ μμ κ²½μ°
|
140 |
-
return (keyword, 0, 0, 0)
|
141 |
else:
|
142 |
-
|
143 |
-
|
144 |
-
print(f"Error fetching search volumes for keyword '{keyword}': {e}")
|
145 |
-
return (keyword, 0, 0, 0)
|
146 |
-
|
147 |
-
def get_monthly_search_volumes(keywords, include_related_keywords=True):
|
148 |
-
all_data = []
|
149 |
-
results = []
|
150 |
-
|
151 |
-
if include_related_keywords:
|
152 |
-
chunk_size = 10 # ν€μλλ₯Ό 10κ°μ© λλμ΄ μμ²
|
153 |
-
# API λ³λ ¬ μμ²
|
154 |
-
with ThreadPoolExecutor(max_workers=5) as executor:
|
155 |
-
futures = [executor.submit(get_keywords_data_chunk, keywords[i:i+chunk_size]) for i in range(0, len(keywords), chunk_size)]
|
156 |
-
for future in futures:
|
157 |
-
try:
|
158 |
-
data = future.result()
|
159 |
-
if 'keywordList' in data:
|
160 |
-
all_data.extend(data['keywordList'])
|
161 |
-
except Exception as e:
|
162 |
-
print(f"Error fetching keywords data chunk: {e}")
|
163 |
-
|
164 |
-
if not all_data:
|
165 |
-
return [("Error", "λ°μ΄ν°κ° λ°νλμ§ μμκ±°λ API μλ΅μ΄ μ ν¨νμ§ μμ΅λλ€.", "", "", "")]
|
166 |
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
if
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
monthly_pc = monthly_pc.replace(',', '').replace('< 10', '0')
|
177 |
-
try:
|
178 |
-
monthly_pc = int(monthly_pc)
|
179 |
-
except ValueError:
|
180 |
-
monthly_pc = 0
|
181 |
-
if isinstance(monthly_mobile, str):
|
182 |
-
monthly_mobile = monthly_mobile.replace(',', '').replace('< 10', '0')
|
183 |
-
try:
|
184 |
-
monthly_mobile = int(monthly_mobile)
|
185 |
-
except ValueError:
|
186 |
-
monthly_mobile = 0
|
187 |
-
|
188 |
-
total_searches = monthly_pc + monthly_mobile
|
189 |
-
results.append((keyword, monthly_pc, monthly_mobile, total_searches))
|
190 |
-
|
191 |
-
if len(results) >= 100:
|
192 |
-
break
|
193 |
-
|
194 |
else:
|
195 |
-
|
196 |
-
with ThreadPoolExecutor(max_workers=5) as executor:
|
197 |
-
futures = [executor.submit(get_search_volumes, keyword) for keyword in keywords]
|
198 |
-
for future in futures:
|
199 |
-
try:
|
200 |
-
result = future.result()
|
201 |
-
results.append(result)
|
202 |
-
except Exception as e:
|
203 |
-
print(f"Error fetching search volumes for keyword '{keyword}': {e}")
|
204 |
-
results.append((keyword, 0, 0, 0))
|
205 |
|
206 |
-
|
207 |
-
|
208 |
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
return results
|
233 |
-
|
234 |
-
def save_to_excel(results, keyword):
|
235 |
-
df = pd.DataFrame(results, columns=["ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ", "λΈλ‘κ·Έλ¬Έμμ"])
|
236 |
-
now = datetime.now().strftime('%Y-%m-%d')
|
237 |
-
sanitized_keyword = keyword.replace(' ', '_')
|
238 |
-
filename = f"{now}_{sanitized_keyword}_μ°κ΄κ²μμ΄.xlsx"
|
239 |
-
file_path = os.path.join(tempfile.gettempdir(), filename)
|
240 |
-
df.to_excel(file_path, index=False)
|
241 |
-
return file_path
|
242 |
-
|
243 |
-
def display_search_volumes(keywords, include_related):
|
244 |
-
keyword_list = [keyword.strip() for keyword in keywords.split(',') if keyword.strip()]
|
245 |
-
if not keyword_list:
|
246 |
-
return [("Error", "μ
λ ₯λ ν€μλκ° μμ΅λλ€.", "", "", "")], None
|
247 |
-
results = get_monthly_search_volumes(keyword_list, include_related_keywords=include_related)
|
248 |
-
file_path = save_to_excel(results, keywords)
|
249 |
-
return results, file_path
|
250 |
-
|
251 |
-
iface = gr.Interface(
|
252 |
-
fn=display_search_volumes,
|
253 |
-
inputs=[
|
254 |
-
gr.Textbox(placeholder="ν€μλλ₯Ό μ
λ ₯νμΈμ (μΌνλ‘ κ΅¬λΆ)", lines=2),
|
255 |
-
gr.Checkbox(label="μ°κ΄κ²μμ΄ ν¬ν¨", value=True) # μ°κ΄κ²μμ΄ ν κΈ μΆκ°
|
256 |
-
],
|
257 |
-
outputs=[
|
258 |
-
gr.Dataframe(headers=["ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ", "λΈλ‘κ·Έλ¬Έμμ"]),
|
259 |
-
gr.File(label="λ€μ΄λ‘λ μμ
νμΌ")
|
260 |
-
],
|
261 |
-
title="λ€μ΄λ² μκ²μλ κ²μκΈ°",
|
262 |
-
description="ν€μλμ μ κ²μλκ³Ό λΈλ‘κ·Έ λ¬Έμ μλ₯Ό νμΈν μ μμ΅λλ€. μ°κ΄κ²μμ΄λ₯Ό ν¬ν¨ν μ§ μ ννμΈμ.",
|
263 |
-
)
|
264 |
|
265 |
-
|
|
|
|
1 |
+
import os
|
2 |
import time
|
|
|
3 |
import hmac
|
4 |
+
import hashlib
|
5 |
import base64
|
6 |
import requests
|
|
|
|
|
|
|
|
|
7 |
import pandas as pd
|
|
|
|
|
8 |
import tempfile
|
9 |
+
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
|
11 |
+
# λ€μ΄λ² κ΄κ³ API νΈμΆ μ μ¬μ©ν μλͺ
μμ± ν¨μ
|
12 |
+
def generate_signature(timestamp, method, uri, secret_key):
|
13 |
+
message = f"{timestamp}.{method}.{uri}"
|
14 |
+
digest = hmac.new(secret_key.encode("utf-8"), message.encode("utf-8"), hashlib.sha256).digest()
|
15 |
+
return base64.b64encode(digest).decode()
|
16 |
+
|
17 |
+
# λ€μ΄λ² κ΄κ³ API νΈμΆ ν€λ μμ± ν¨μ
|
18 |
+
def get_header(method, uri, api_key, secret_key, customer_id):
|
19 |
+
timestamp = str(round(time.time() * 1000))
|
20 |
+
signature = generate_signature(timestamp, method, uri, secret_key)
|
21 |
+
return {
|
22 |
+
"Content-Type": "application/json; charset=UTF-8",
|
23 |
+
"X-Timestamp": timestamp,
|
24 |
+
"X-API-KEY": api_key,
|
25 |
+
"X-Customer": str(customer_id),
|
26 |
+
"X-Signature": signature
|
27 |
+
}
|
28 |
+
|
29 |
+
# λ€μ΄λ² κ΄κ³ APIλ₯Ό ν΅ν΄ λ¨μΌ ν€μλμ μ°κ΄κ²μμ΄ λ° κ²μλ μ 보λ₯Ό κ°μ Έμ€λ ν¨μ
|
30 |
+
def fetch_related_keywords(keyword):
|
31 |
+
# νκ²½λ³μμμ κ΄κ³ API ν€κ°λ€μ λΆλ¬μ΅λλ€.
|
32 |
+
API_KEY = os.environ["NAVER_API_KEY"]
|
33 |
+
SECRET_KEY = os.environ["NAVER_SECRET_KEY"]
|
34 |
+
CUSTOMER_ID = os.environ["NAVER_CUSTOMER_ID"]
|
35 |
+
|
36 |
+
BASE_URL = "https://api.naver.com"
|
37 |
+
uri = "/keywordstool"
|
38 |
+
method = "GET"
|
39 |
+
headers = get_header(method, uri, API_KEY, SECRET_KEY, CUSTOMER_ID)
|
40 |
+
params = {
|
41 |
+
"hintKeywords": [keyword],
|
42 |
+
"showDetail": "1"
|
43 |
+
}
|
44 |
+
response = requests.get(BASE_URL + uri, params=params, headers=headers)
|
45 |
+
data = response.json()
|
46 |
+
if "keywordList" not in data:
|
47 |
+
return pd.DataFrame()
|
48 |
+
df = pd.DataFrame(data["keywordList"])
|
49 |
+
if len(df) > 100:
|
50 |
+
df = df.head(100)
|
51 |
+
|
52 |
+
def parse_count(x):
|
53 |
+
try:
|
54 |
+
x_str = str(x).replace(",", "")
|
55 |
+
return int(x_str)
|
56 |
+
except:
|
57 |
return 0
|
|
|
|
|
|
|
58 |
|
59 |
+
df["PCμκ²μλ"] = df["monthlyPcQcCnt"].apply(parse_count)
|
60 |
+
df["λͺ¨λ°μΌμκ²μλ"] = df["monthlyMobileQcCnt"].apply(parse_count)
|
61 |
+
df["ν νμκ²μλ"] = df["PCμκ²μλ"] + df["λͺ¨λ°μΌμκ²μλ"]
|
62 |
+
df.rename(columns={"relKeyword": "μ 보ν€μλ"}, inplace=True)
|
63 |
+
result_df = df[["μ 보ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ"]]
|
64 |
+
return result_df
|
65 |
+
|
66 |
+
# λ€μ΄λ² κ²μ κ°λ° APIλ₯Ό νμ©νμ¬ λΈλ‘κ·Έ λ¬Έμμλ₯Ό μ‘°ννλ ν¨μ
|
67 |
+
def fetch_blog_count(keyword):
|
68 |
+
# νκ²½λ³μμμ λ€μ΄λ² κ²μ API μ격μ¦λͺ
μ λΆλ¬μ΅λλ€.
|
69 |
+
client_id = os.environ["NAVER_SEARCH_CLIENT_ID"]
|
70 |
+
client_secret = os.environ["NAVER_SEARCH_CLIENT_SECRET"]
|
71 |
+
url = "https://openapi.naver.com/v1/search/blog.json"
|
72 |
+
headers = {
|
73 |
+
"X-Naver-Client-Id": client_id,
|
74 |
+
"X-Naver-Client-Secret": client_secret
|
75 |
+
}
|
76 |
+
params = {"query": keyword, "display": 1}
|
77 |
+
response = requests.get(url, headers=headers, params=params)
|
78 |
+
if response.status_code == 200:
|
79 |
+
data = response.json()
|
80 |
+
return data.get("total", 0)
|
81 |
+
else:
|
82 |
+
return 0
|
83 |
|
84 |
+
# μμ μμ
νμΌ μμ± ν¨μ
|
85 |
+
def create_excel_file(df):
|
86 |
+
with tempfile.NamedTemporaryFile(suffix=".xlsx", delete=False) as tmp:
|
87 |
+
excel_path = tmp.name
|
88 |
+
df.to_excel(excel_path, index=False)
|
89 |
+
return excel_path
|
90 |
|
91 |
+
# μ
λ ₯λ μ¬λ¬ ν€μλλ₯Ό μ²λ¦¬νλ ν¨μ
|
92 |
+
def process_keyword(keywords: str, include_related: bool):
|
93 |
"""
|
94 |
+
1. ν
μ€νΈλ°μ€μ μν°λ‘ ꡬλΆλ μ¬λ¬ ν€μλλ₯Ό λ°μ κ° ν€μλμ λν΄ λ€μ΄λ² κ΄κ³ APIλ₯Ό ν΅ν΄ κ²μλ μ 보λ₯Ό μ‘°νν©λλ€.
|
95 |
+
2. κ° ν€μλμ λν΄ μ
λ ₯ν ν€μλ μ체μ κ²°κ³Όλ₯Ό ν¬ν¨ν©λλ€.
|
96 |
+
3. 체ν¬λ°μ€(True)μΈ κ²½μ°, 첫 λ²μ§Έ ν€μλμ λν΄μλ§ μ°κ΄κ²μμ΄(μ
λ ₯ ν€μλλ₯Ό μ μΈν κ²°κ³Ό)λ₯Ό μΆκ°ν©λλ€.
|
97 |
+
4. λ§μ§λ§μΌλ‘, κ° "μ 보ν€μλ"μ λν΄ λ€μ΄λ² κ²μ APIλ₯Ό νΈμΆνμ¬ λΈλ‘κ·Έ λ¬Έμμλ₯Ό μ‘°ννκ³ "λΈλ‘κ·Έλ¬Έμμ" 컬λΌμ μΆκ°ν©λλ€.
|
98 |
"""
|
99 |
+
# μ€λ°κΏμΌλ‘ λΆλ¦¬νμ¬ μ
λ ₯ ν€μλ 리μ€νΈ μμ± (λΉ μ€ μ μΈ)
|
100 |
+
input_keywords = [k.strip() for k in keywords.splitlines() if k.strip() != ""]
|
101 |
+
result_dfs = []
|
102 |
+
|
103 |
+
for idx, kw in enumerate(input_keywords):
|
104 |
+
df_kw = fetch_related_keywords(kw)
|
105 |
+
if df_kw.empty:
|
106 |
+
continue
|
107 |
+
# μ
λ ₯ ν€μλ μ체μ κ²°κ³Όλ₯Ό μ°μ ν¬ν¨
|
108 |
+
row_kw = df_kw[df_kw["μ 보ν€μλ"] == kw]
|
109 |
+
if not row_kw.empty:
|
110 |
+
result_dfs.append(row_kw)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
else:
|
112 |
+
# μ
λ ₯ ν€μλμ ν΄λΉνλ νμ΄ μμΌλ©΄ 첫 λ²μ§Έ νμ λμ²΄λ‘ μΆκ°
|
113 |
+
result_dfs.append(df_kw.head(1))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
|
115 |
+
# 체ν¬λ°μ€κ° Trueμ΄κ³ , 첫 λ²μ§Έ ν€μλμ λν΄μλ§ μ°κ΄κ²μμ΄ μΆκ° (μ
λ ₯ ν€μλ μ μΈ)
|
116 |
+
if include_related and idx == 0:
|
117 |
+
df_related = df_kw[df_kw["μ 보ν€μλ"] != kw]
|
118 |
+
if not df_related.empty:
|
119 |
+
result_dfs.append(df_related)
|
120 |
+
|
121 |
+
if result_dfs:
|
122 |
+
result_df = pd.concat(result_dfs, ignore_index=True)
|
123 |
+
result_df.drop_duplicates(subset=["μ 보ν€μλ"], inplace=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
124 |
else:
|
125 |
+
result_df = pd.DataFrame(columns=["μ 보ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ"])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
126 |
|
127 |
+
# λΈλ‘κ·Έ λ¬Έμμ μ»¬λΌ μΆκ°: κ° μ 보ν€μλλ§λ€ λ€μ΄λ² λΈλ‘κ·Έ κ²μ APIλ‘ μ΄ λ¬Έμμλ₯Ό μ‘°ν
|
128 |
+
result_df["λΈλ‘κ·Έλ¬Έμμ"] = result_df["μ 보ν€μλ"].apply(fetch_blog_count)
|
129 |
|
130 |
+
result_df.sort_values(by="ν νμκ²μλ", ascending=False, inplace=True)
|
131 |
+
return result_df, create_excel_file(result_df)
|
132 |
+
|
133 |
+
# Gradio UI ꡬμ±
|
134 |
+
with gr.Blocks() as demo:
|
135 |
+
gr.Markdown("### λ€μ΄λ² μ°κ΄κ²μμ΄ λ° κ²μλ, λΈλ‘κ·Έ λ¬Έμμ μ‘°ν μ±")
|
136 |
+
gr.Markdown(
|
137 |
+
"μ¬λ¬ ν€μλλ₯Ό μν°λ‘ ꡬλΆνμ¬ μ
λ ₯νλ©΄ κ° ν€μλμ κ²μλ μ 보λ₯Ό μ‘°ννκ³ , "
|
138 |
+
"첫 λ²μ§Έ ν€μλμ κ²½μ° 'μ°κ΄κ²μμ΄ ν¬ν¨' μ²΄ν¬ μ μ°κ΄κ²μμ΄λ ν¨κ» μ‘°νν©λλ€. "
|
139 |
+
"λν, κ° μ 보ν€μλμ λν λ€μ΄λ² λΈλ‘κ·Έ λ¬Έμμλ ν¨κ» μΆλ ₯λ©λλ€."
|
140 |
+
)
|
141 |
+
|
142 |
+
with gr.Row():
|
143 |
+
keyword_input = gr.Textbox(label="ν€μλ μ
λ ₯ (μ¬λ¬ κ°μΌ κ²½μ° μν°λ‘ ꡬλΆ)", lines=5, placeholder="μ:\nκ°μλνλΉλΌ\nμλ°μ€ν¬λ¦½νΈ")
|
144 |
+
include_checkbox = gr.Checkbox(label="μ°κ΄κ²μμ΄ ν¬ν¨ (첫λ²μ§Έ ν€μλμ νν¨)", value=False)
|
145 |
+
search_button = gr.Button("κ²μ")
|
146 |
+
|
147 |
+
with gr.Row():
|
148 |
+
df_output = gr.Dataframe(label="κ²μ κ²°κ³Ό")
|
149 |
+
excel_output = gr.File(label="μμ
λ€μ΄λ‘λ")
|
150 |
+
|
151 |
+
# λ²νΌ ν΄λ¦ μ process_keyword ν¨μ μ€ν
|
152 |
+
search_button.click(fn=process_keyword, inputs=[keyword_input, include_checkbox], outputs=[df_output, excel_output])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
153 |
|
154 |
+
# μ± μ€ν (Hugging Face Spaces λ°°ν¬ κ°λ₯)
|
155 |
+
demo.launch()
|