Kims12 commited on
Commit
cb9d18f
ยท
verified ยท
1 Parent(s): ff406c1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -16
app.py CHANGED
@@ -12,6 +12,10 @@ from concurrent.futures import ThreadPoolExecutor
12
  import os
13
  import tempfile
14
  from datetime import datetime
 
 
 
 
15
 
16
  # ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ API ํ‚ค์™€ ์‹œํฌ๋ฆฟ ํ‚ค๋ฅผ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค.
17
  BASE_URL = "https://api.searchad.naver.com"
@@ -19,6 +23,14 @@ API_KEY = os.environ.get("NAVER_API_KEY")
19
  SECRET_KEY = os.environ.get("NAVER_SECRET_KEY")
20
  CUSTOMER_ID = 2666992
21
 
 
 
 
 
 
 
 
 
22
  class NaverAPI:
23
  def __init__(self, base_url, api_key, secret_key, customer_id):
24
  self.base_url = base_url
@@ -58,20 +70,25 @@ class NaverAPI:
58
  return response.json()
59
 
60
  def get_blog_count(keyword):
61
- client_id = "421ZKFMM5TS1xmvsF7C0"
62
- client_secret = "h47UQHAOGV"
 
63
  encText = urllib.parse.quote(keyword)
64
  url = "https://openapi.naver.com/v1/search/blog?query=" + encText
65
  request = urllib.request.Request(url)
66
  request.add_header("X-Naver-Client-Id", client_id)
67
  request.add_header("X-Naver-Client-Secret", client_secret)
68
- response = urllib.request.urlopen(request)
69
- rescode = response.getcode()
70
- if rescode == 200:
71
- response_body = response.read()
72
- data = json.loads(response_body.decode('utf-8'))
73
- return data['total']
74
- else:
 
 
 
 
75
  return 0
76
 
77
  def get_keywords_data_chunk(chunk):
@@ -89,9 +106,12 @@ def get_monthly_search_volumes(keywords):
89
  with ThreadPoolExecutor(max_workers=5) as executor:
90
  futures = [executor.submit(get_keywords_data_chunk, keywords[i:i+chunk_size]) for i in range(0, len(keywords), chunk_size)]
91
  for future in futures:
92
- data = future.result()
93
- if 'keywordList' in data:
94
- all_data.extend(data['keywordList'])
 
 
 
95
 
96
  if not all_data:
97
  return [("Error", "๋ฐ์ดํ„ฐ๊ฐ€ ๋ฐ˜ํ™˜๋˜์ง€ ์•Š์•˜๊ฑฐ๋‚˜ API ์‘๋‹ต์ด ์œ ํšจํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.", "", "", "")] # ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜ ์นผ๋Ÿผ ์ถ”๊ฐ€
@@ -120,9 +140,13 @@ def get_monthly_search_volumes(keywords):
120
  with ThreadPoolExecutor(max_workers=5) as executor:
121
  blog_futures = [executor.submit(get_blog_count_parallel, result[0]) for result in results]
122
  for i, future in enumerate(blog_futures):
123
- keyword, blog_count = future.result()
124
- results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], blog_count)
125
-
 
 
 
 
126
  return results
127
 
128
  def save_to_excel(results, keyword):
@@ -150,4 +174,4 @@ iface = gr.Interface(
150
  title="๋„ค์ด๋ฒ„ ์›”๊ฒ€์ƒ‰๋Ÿ‰ ๊ฒ€์ƒ‰๊ธฐ",
151
  )
152
 
153
- iface.launch()
 
12
  import os
13
  import tempfile
14
  from datetime import datetime
15
+ from dotenv import load_dotenv # dotenv ์ถ”๊ฐ€
16
+
17
+ # .env ํŒŒ์ผ์˜ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋ฅผ ๋กœ๋“œํ•ฉ๋‹ˆ๋‹ค.
18
+ load_dotenv()
19
 
20
  # ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ API ํ‚ค์™€ ์‹œํฌ๋ฆฟ ํ‚ค๋ฅผ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค.
21
  BASE_URL = "https://api.searchad.naver.com"
 
23
  SECRET_KEY = os.environ.get("NAVER_SECRET_KEY")
24
  CUSTOMER_ID = 2666992
25
 
26
+ # ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ํด๋ผ์ด์–ธํŠธ ID์™€ ์‹œํฌ๋ฆฟ์„ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค.
27
+ CLIENT_ID = os.environ.get("NAVER_CLIENT_ID")
28
+ CLIENT_SECRET = os.environ.get("NAVER_CLIENT_SECRET")
29
+
30
+ # ํ™˜๊ฒฝ ๋ณ€์ˆ˜ ๋กœ๋“œ ํ™•์ธ
31
+ if not API_KEY or not SECRET_KEY or not CLIENT_ID or not CLIENT_SECRET:
32
+ raise ValueError("ํ•„์ˆ˜ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค. .env ํŒŒ์ผ์„ ํ™•์ธํ•˜์„ธ์š”.")
33
+
34
  class NaverAPI:
35
  def __init__(self, base_url, api_key, secret_key, customer_id):
36
  self.base_url = base_url
 
70
  return response.json()
71
 
72
  def get_blog_count(keyword):
73
+ # ํด๋ผ์ด์–ธํŠธ ID์™€ ์‹œํฌ๋ฆฟ์„ ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค.
74
+ client_id = CLIENT_ID
75
+ client_secret = CLIENT_SECRET
76
  encText = urllib.parse.quote(keyword)
77
  url = "https://openapi.naver.com/v1/search/blog?query=" + encText
78
  request = urllib.request.Request(url)
79
  request.add_header("X-Naver-Client-Id", client_id)
80
  request.add_header("X-Naver-Client-Secret", client_secret)
81
+ try:
82
+ response = urllib.request.urlopen(request)
83
+ rescode = response.getcode()
84
+ if rescode == 200:
85
+ response_body = response.read()
86
+ data = json.loads(response_body.decode('utf-8'))
87
+ return data['total']
88
+ else:
89
+ return 0
90
+ except Exception as e:
91
+ print(f"Error fetching blog count for keyword '{keyword}': {e}")
92
  return 0
93
 
94
  def get_keywords_data_chunk(chunk):
 
106
  with ThreadPoolExecutor(max_workers=5) as executor:
107
  futures = [executor.submit(get_keywords_data_chunk, keywords[i:i+chunk_size]) for i in range(0, len(keywords), chunk_size)]
108
  for future in futures:
109
+ try:
110
+ data = future.result()
111
+ if 'keywordList' in data:
112
+ all_data.extend(data['keywordList'])
113
+ except Exception as e:
114
+ print(f"Error fetching keywords data chunk: {e}")
115
 
116
  if not all_data:
117
  return [("Error", "๋ฐ์ดํ„ฐ๊ฐ€ ๋ฐ˜ํ™˜๋˜์ง€ ์•Š์•˜๊ฑฐ๋‚˜ API ์‘๋‹ต์ด ์œ ํšจํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.", "", "", "")] # ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜ ์นผ๋Ÿผ ์ถ”๊ฐ€
 
140
  with ThreadPoolExecutor(max_workers=5) as executor:
141
  blog_futures = [executor.submit(get_blog_count_parallel, result[0]) for result in results]
142
  for i, future in enumerate(blog_futures):
143
+ try:
144
+ keyword, blog_count = future.result()
145
+ results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], blog_count)
146
+ except Exception as e:
147
+ print(f"Error fetching blog count for keyword '{results[i][0]}': {e}")
148
+ results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], "Error")
149
+
150
  return results
151
 
152
  def save_to_excel(results, keyword):
 
174
  title="๋„ค์ด๋ฒ„ ์›”๊ฒ€์ƒ‰๋Ÿ‰ ๊ฒ€์ƒ‰๊ธฐ",
175
  )
176
 
177
+ iface.launch(share=True) # share=True๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ๊ณต๊ฐœ ๋งํฌ ์ƒ์„ฑ