Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,6 @@ import time
|
|
12 |
import hmac
|
13 |
import hashlib
|
14 |
import base64
|
15 |
-
import concurrent.futures # ๋์ ์คํ์ ์ํ ๋ชจ๋
|
16 |
|
17 |
# ๋๋ฒ๊น
(๋ก๊ทธ)์ฉ ํจ์
|
18 |
def debug_log(message: str):
|
@@ -144,7 +143,7 @@ def get_header(method, uri, api_key, secret_key, customer_id):
|
|
144 |
"X-Signature": signature
|
145 |
}
|
146 |
|
147 |
-
# ๊ธฐ์กด ๋จ์ผ ํค์๋์ฉ ํจ์ (
|
148 |
def fetch_related_keywords(keyword):
|
149 |
debug_log(f"fetch_related_keywords ํธ์ถ, ํค์๋: {keyword}")
|
150 |
API_KEY = os.environ["NAVER_API_KEY"]
|
@@ -181,7 +180,8 @@ def fetch_related_keywords(keyword):
|
|
181 |
debug_log("fetch_related_keywords ์๋ฃ")
|
182 |
return result_df
|
183 |
|
184 |
-
# ์ ๊ท ์ถ๊ฐ: ํค์๋ 10๊ฐ์ฉ ๊ทธ๋ฃน์ผ๋ก ๋ฌถ์ด ํ
|
|
|
185 |
def fetch_related_keywords_batch(keywords: list):
|
186 |
debug_log(f"fetch_related_keywords_batch ํธ์ถ, ํค์๋ ๊ทธ๋ฃน: {keywords}")
|
187 |
API_KEY = os.environ["NAVER_API_KEY"]
|
@@ -192,9 +192,8 @@ def fetch_related_keywords_batch(keywords: list):
|
|
192 |
uri = "/keywordstool"
|
193 |
method = "GET"
|
194 |
headers = get_header(method, uri, API_KEY, SECRET_KEY, CUSTOMER_ID)
|
195 |
-
# API์ ์ฌ๋ฌ ํค์๋๋ฅผ ์ ๋ฌํ ๋๋ ๋ฆฌ์คํธ ํํ ๊ทธ๋๋ก ์ ๋ฌ
|
196 |
params = {
|
197 |
-
"hintKeywords": keywords,
|
198 |
"showDetail": "1"
|
199 |
}
|
200 |
response = requests.get(BASE_URL + uri, params=params, headers=headers)
|
@@ -219,29 +218,18 @@ def fetch_related_keywords_batch(keywords: list):
|
|
219 |
debug_log("fetch_related_keywords_batch ์๋ฃ")
|
220 |
return result_df
|
221 |
|
222 |
-
# process_keyword ํจ์๋ฅผ
|
223 |
def process_keyword(keywords: str, include_related: bool):
|
224 |
debug_log(f"process_keyword ํธ์ถ, ํค์๋๋ค: {keywords}, ์ฐ๊ด๊ฒ์์ด ํฌํจ: {include_related}")
|
225 |
input_keywords = [k.strip() for k in keywords.splitlines() if k.strip()]
|
226 |
groups = [input_keywords[i:i+10] for i in range(0, len(input_keywords), 10)]
|
227 |
result_dfs = []
|
228 |
-
|
229 |
-
|
230 |
-
# ๊ทธ๋ฃน๋ณ API ํธ์ถ์ ๋์ ์คํ
|
231 |
-
with concurrent.futures.ThreadPoolExecutor() as executor:
|
232 |
-
future_to_group = {executor.submit(fetch_related_keywords_batch, group): group for group in groups}
|
233 |
-
for future in concurrent.futures.as_completed(future_to_group):
|
234 |
-
group = future_to_group[future]
|
235 |
-
try:
|
236 |
-
df_batch = future.result()
|
237 |
-
group_results[tuple(group)] = df_batch
|
238 |
-
except Exception as exc:
|
239 |
-
debug_log(f"๊ทธ๋ฃน {group}์์ ์ค๋ฅ ๋ฐ์: {exc}")
|
240 |
-
|
241 |
-
# ๊ฐ ๊ทธ๋ฃน์ ๊ฒฐ๊ณผ๋ฅผ ์์๋๋ก ์ฒ๋ฆฌ
|
242 |
for idx, group in enumerate(groups):
|
243 |
-
|
244 |
-
|
|
|
245 |
continue
|
246 |
# ๊ทธ๋ฃน ๋ด ๊ฐ ํค์๋์ ๋ํด ๊ฒฐ๊ณผ ์ถ์ถ
|
247 |
for kw in group:
|
@@ -250,12 +238,13 @@ def process_keyword(keywords: str, include_related: bool):
|
|
250 |
result_dfs.append(row_kw)
|
251 |
else:
|
252 |
result_dfs.append(df_batch.head(1))
|
253 |
-
# ์ฒซ ๋ฒ์งธ ๊ทธ๋ฃน์
|
254 |
if include_related and idx == 0:
|
255 |
first_keyword = group[0]
|
256 |
df_related = df_batch[df_batch["์ ๋ณดํค์๋"] != first_keyword]
|
257 |
if not df_related.empty:
|
258 |
result_dfs.append(df_related)
|
|
|
259 |
|
260 |
if result_dfs:
|
261 |
result_df = pd.concat(result_dfs, ignore_index=True)
|
|
|
12 |
import hmac
|
13 |
import hashlib
|
14 |
import base64
|
|
|
15 |
|
16 |
# ๋๋ฒ๊น
(๋ก๊ทธ)์ฉ ํจ์
|
17 |
def debug_log(message: str):
|
|
|
143 |
"X-Signature": signature
|
144 |
}
|
145 |
|
146 |
+
# ๊ธฐ์กด ๋จ์ผ ํค์๋์ฉ ํจ์ (์ฐธ๊ณ ์ฉ)
|
147 |
def fetch_related_keywords(keyword):
|
148 |
debug_log(f"fetch_related_keywords ํธ์ถ, ํค์๋: {keyword}")
|
149 |
API_KEY = os.environ["NAVER_API_KEY"]
|
|
|
180 |
debug_log("fetch_related_keywords ์๋ฃ")
|
181 |
return result_df
|
182 |
|
183 |
+
# ์ ๊ท ์ถ๊ฐ: ํค์๋ 10๊ฐ์ฉ ๊ทธ๋ฃน์ผ๋ก ๋ฌถ์ด ํ ๋ฒ์ API ํธ์ถ์ ํ๋ ํจ์
|
184 |
+
# (๋จ, ๊ฐ ๊ทธ๋ฃน์ ์์ฐจ์ ์ผ๋ก ํธ์ถ๋จ)
|
185 |
def fetch_related_keywords_batch(keywords: list):
|
186 |
debug_log(f"fetch_related_keywords_batch ํธ์ถ, ํค์๋ ๊ทธ๋ฃน: {keywords}")
|
187 |
API_KEY = os.environ["NAVER_API_KEY"]
|
|
|
192 |
uri = "/keywordstool"
|
193 |
method = "GET"
|
194 |
headers = get_header(method, uri, API_KEY, SECRET_KEY, CUSTOMER_ID)
|
|
|
195 |
params = {
|
196 |
+
"hintKeywords": keywords, # ๋ฆฌ์คํธ ๊ทธ๋๋ก ์ ๋ฌ (์ต๋ 10๊ฐ)
|
197 |
"showDetail": "1"
|
198 |
}
|
199 |
response = requests.get(BASE_URL + uri, params=params, headers=headers)
|
|
|
218 |
debug_log("fetch_related_keywords_batch ์๋ฃ")
|
219 |
return result_df
|
220 |
|
221 |
+
# process_keyword ํจ์๋ฅผ ๊ทธ๋ฃน๋ณ๋ก(๊ฐ ๊ทธ๋ฃน์ ์์ฐจ์ ์ผ๋ก) ์ฒ๋ฆฌํ๋๋ก ๊ฐ์
|
222 |
def process_keyword(keywords: str, include_related: bool):
|
223 |
debug_log(f"process_keyword ํธ์ถ, ํค์๋๋ค: {keywords}, ์ฐ๊ด๊ฒ์์ด ํฌํจ: {include_related}")
|
224 |
input_keywords = [k.strip() for k in keywords.splitlines() if k.strip()]
|
225 |
groups = [input_keywords[i:i+10] for i in range(0, len(input_keywords), 10)]
|
226 |
result_dfs = []
|
227 |
+
|
228 |
+
# ๊ฐ ๊ทธ๋ฃน์ ์์ฐจ์ ์ผ๋ก ์ฒ๋ฆฌ (๋์์ ํธ์ถํ์ง ์์)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
229 |
for idx, group in enumerate(groups):
|
230 |
+
debug_log(f"๊ทธ๋ฃน {idx+1} ์ฒ๋ฆฌ ์์: {group}")
|
231 |
+
df_batch = fetch_related_keywords_batch(group)
|
232 |
+
if df_batch.empty:
|
233 |
continue
|
234 |
# ๊ทธ๋ฃน ๋ด ๊ฐ ํค์๋์ ๋ํด ๊ฒฐ๊ณผ ์ถ์ถ
|
235 |
for kw in group:
|
|
|
238 |
result_dfs.append(row_kw)
|
239 |
else:
|
240 |
result_dfs.append(df_batch.head(1))
|
241 |
+
# ์ฒซ ๋ฒ์งธ ๊ทธ๋ฃน์ ๋ํด์๋ง ์ฐ๊ด๊ฒ์์ด ์ต์
์ ์ฉ (์ฒซ ํค์๋ ์ ์ธ)
|
242 |
if include_related and idx == 0:
|
243 |
first_keyword = group[0]
|
244 |
df_related = df_batch[df_batch["์ ๋ณดํค์๋"] != first_keyword]
|
245 |
if not df_related.empty:
|
246 |
result_dfs.append(df_related)
|
247 |
+
debug_log(f"๊ทธ๋ฃน {idx+1} ์ฒ๋ฆฌ ์๋ฃ")
|
248 |
|
249 |
if result_dfs:
|
250 |
result_df = pd.concat(result_dfs, ignore_index=True)
|