Kims12 commited on
Commit
cb9d18f
Β·
verified Β·
1 Parent(s): ff406c1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -16
app.py CHANGED
@@ -12,6 +12,10 @@ from concurrent.futures import ThreadPoolExecutor
12
  import os
13
  import tempfile
14
  from datetime import datetime
 
 
 
 
15
 
16
  # ν™˜κ²½ λ³€μˆ˜μ—μ„œ API 킀와 μ‹œν¬λ¦Ώ ν‚€λ₯Ό λΆˆλŸ¬μ˜΅λ‹ˆλ‹€.
17
  BASE_URL = "https://api.searchad.naver.com"
@@ -19,6 +23,14 @@ API_KEY = os.environ.get("NAVER_API_KEY")
19
  SECRET_KEY = os.environ.get("NAVER_SECRET_KEY")
20
  CUSTOMER_ID = 2666992
21
 
 
 
 
 
 
 
 
 
22
  class NaverAPI:
23
  def __init__(self, base_url, api_key, secret_key, customer_id):
24
  self.base_url = base_url
@@ -58,20 +70,25 @@ class NaverAPI:
58
  return response.json()
59
 
60
  def get_blog_count(keyword):
61
- client_id = "421ZKFMM5TS1xmvsF7C0"
62
- client_secret = "h47UQHAOGV"
 
63
  encText = urllib.parse.quote(keyword)
64
  url = "https://openapi.naver.com/v1/search/blog?query=" + encText
65
  request = urllib.request.Request(url)
66
  request.add_header("X-Naver-Client-Id", client_id)
67
  request.add_header("X-Naver-Client-Secret", client_secret)
68
- response = urllib.request.urlopen(request)
69
- rescode = response.getcode()
70
- if rescode == 200:
71
- response_body = response.read()
72
- data = json.loads(response_body.decode('utf-8'))
73
- return data['total']
74
- else:
 
 
 
 
75
  return 0
76
 
77
  def get_keywords_data_chunk(chunk):
@@ -89,9 +106,12 @@ def get_monthly_search_volumes(keywords):
89
  with ThreadPoolExecutor(max_workers=5) as executor:
90
  futures = [executor.submit(get_keywords_data_chunk, keywords[i:i+chunk_size]) for i in range(0, len(keywords), chunk_size)]
91
  for future in futures:
92
- data = future.result()
93
- if 'keywordList' in data:
94
- all_data.extend(data['keywordList'])
 
 
 
95
 
96
  if not all_data:
97
  return [("Error", "데이터가 λ°˜ν™˜λ˜μ§€ μ•Šμ•˜κ±°λ‚˜ API 응닡이 μœ νš¨ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€.", "", "", "")] # λΈ”λ‘œκ·Έ λ¬Έμ„œ 수 칼럼 μΆ”κ°€
@@ -120,9 +140,13 @@ def get_monthly_search_volumes(keywords):
120
  with ThreadPoolExecutor(max_workers=5) as executor:
121
  blog_futures = [executor.submit(get_blog_count_parallel, result[0]) for result in results]
122
  for i, future in enumerate(blog_futures):
123
- keyword, blog_count = future.result()
124
- results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], blog_count)
125
-
 
 
 
 
126
  return results
127
 
128
  def save_to_excel(results, keyword):
@@ -150,4 +174,4 @@ iface = gr.Interface(
150
  title="넀이버 μ›”κ²€μƒ‰λŸ‰ 검색기",
151
  )
152
 
153
- iface.launch()
 
12
  import os
13
  import tempfile
14
  from datetime import datetime
15
+ from dotenv import load_dotenv # dotenv μΆ”κ°€
16
+
17
+ # .env 파일의 ν™˜κ²½ λ³€μˆ˜λ₯Ό λ‘œλ“œν•©λ‹ˆλ‹€.
18
+ load_dotenv()
19
 
20
  # ν™˜κ²½ λ³€μˆ˜μ—μ„œ API 킀와 μ‹œν¬λ¦Ώ ν‚€λ₯Ό λΆˆλŸ¬μ˜΅λ‹ˆλ‹€.
21
  BASE_URL = "https://api.searchad.naver.com"
 
23
  SECRET_KEY = os.environ.get("NAVER_SECRET_KEY")
24
  CUSTOMER_ID = 2666992
25
 
26
+ # ν™˜κ²½ λ³€μˆ˜μ—μ„œ ν΄λΌμ΄μ–ΈνŠΈ ID와 μ‹œν¬λ¦Ώμ„ λΆˆλŸ¬μ˜΅λ‹ˆλ‹€.
27
+ CLIENT_ID = os.environ.get("NAVER_CLIENT_ID")
28
+ CLIENT_SECRET = os.environ.get("NAVER_CLIENT_SECRET")
29
+
30
+ # ν™˜κ²½ λ³€μˆ˜ λ‘œλ“œ 확인
31
+ if not API_KEY or not SECRET_KEY or not CLIENT_ID or not CLIENT_SECRET:
32
+ raise ValueError("ν•„μˆ˜ ν™˜κ²½ λ³€μˆ˜κ°€ μ„€μ •λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€. .env νŒŒμΌμ„ ν™•μΈν•˜μ„Έμš”.")
33
+
34
  class NaverAPI:
35
  def __init__(self, base_url, api_key, secret_key, customer_id):
36
  self.base_url = base_url
 
70
  return response.json()
71
 
72
  def get_blog_count(keyword):
73
+ # ν΄λΌμ΄μ–ΈνŠΈ ID와 μ‹œν¬λ¦Ώμ„ ν™˜κ²½ λ³€μˆ˜μ—μ„œ λΆˆλŸ¬μ˜΅λ‹ˆλ‹€.
74
+ client_id = CLIENT_ID
75
+ client_secret = CLIENT_SECRET
76
  encText = urllib.parse.quote(keyword)
77
  url = "https://openapi.naver.com/v1/search/blog?query=" + encText
78
  request = urllib.request.Request(url)
79
  request.add_header("X-Naver-Client-Id", client_id)
80
  request.add_header("X-Naver-Client-Secret", client_secret)
81
+ try:
82
+ response = urllib.request.urlopen(request)
83
+ rescode = response.getcode()
84
+ if rescode == 200:
85
+ response_body = response.read()
86
+ data = json.loads(response_body.decode('utf-8'))
87
+ return data['total']
88
+ else:
89
+ return 0
90
+ except Exception as e:
91
+ print(f"Error fetching blog count for keyword '{keyword}': {e}")
92
  return 0
93
 
94
  def get_keywords_data_chunk(chunk):
 
106
  with ThreadPoolExecutor(max_workers=5) as executor:
107
  futures = [executor.submit(get_keywords_data_chunk, keywords[i:i+chunk_size]) for i in range(0, len(keywords), chunk_size)]
108
  for future in futures:
109
+ try:
110
+ data = future.result()
111
+ if 'keywordList' in data:
112
+ all_data.extend(data['keywordList'])
113
+ except Exception as e:
114
+ print(f"Error fetching keywords data chunk: {e}")
115
 
116
  if not all_data:
117
  return [("Error", "데이터가 λ°˜ν™˜λ˜μ§€ μ•Šμ•˜κ±°λ‚˜ API 응닡이 μœ νš¨ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€.", "", "", "")] # λΈ”λ‘œκ·Έ λ¬Έμ„œ 수 칼럼 μΆ”κ°€
 
140
  with ThreadPoolExecutor(max_workers=5) as executor:
141
  blog_futures = [executor.submit(get_blog_count_parallel, result[0]) for result in results]
142
  for i, future in enumerate(blog_futures):
143
+ try:
144
+ keyword, blog_count = future.result()
145
+ results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], blog_count)
146
+ except Exception as e:
147
+ print(f"Error fetching blog count for keyword '{results[i][0]}': {e}")
148
+ results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], "Error")
149
+
150
  return results
151
 
152
  def save_to_excel(results, keyword):
 
174
  title="넀이버 μ›”κ²€μƒ‰λŸ‰ 검색기",
175
  )
176
 
177
+ iface.launch(share=True) # share=Trueλ₯Ό μΆ”κ°€ν•˜μ—¬ 곡개 링크 생성