Kims12 commited on
Commit
9c4b33c
ยท
verified ยท
1 Parent(s): 2da13b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -19
app.py CHANGED
@@ -100,12 +100,12 @@ def get_keywords_data_chunk(chunk):
100
  def get_blog_count_parallel(keyword):
101
  return (keyword, get_blog_count(keyword))
102
 
103
- def get_monthly_search_volumes(keywords, include_related):
104
  all_data = []
105
  chunk_size = 10 # ํ‚ค์›Œ๋“œ๋ฅผ 10๊ฐœ์”ฉ ๋‚˜๋ˆ„์–ด ์š”์ฒญ
106
 
107
- if include_related:
108
- # ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ํฌํ•จ ์‹œ, API๋ฅผ ํ†ตํ•ด ์—ฐ๊ด€ ํ‚ค์›Œ๋“œ๋ฅผ ๊ฐ€์ ธ์˜ด
109
  with ThreadPoolExecutor(max_workers=5) as executor:
110
  futures = [executor.submit(get_keywords_data_chunk, keywords[i:i+chunk_size]) for i in range(0, len(keywords), chunk_size)]
111
  for future in futures:
@@ -116,8 +116,14 @@ def get_monthly_search_volumes(keywords, include_related):
116
  except Exception as e:
117
  print(f"Error fetching keywords data chunk: {e}")
118
  else:
119
- # ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ๋ฏธํฌํ•จ ์‹œ, ์ž…๋ ฅ ํ‚ค์›Œ๋“œ๋งŒ ์‚ฌ์šฉ
120
- all_data = [{'relKeyword': keyword, 'monthlyPcQcCnt': '0', 'monthlyMobileQcCnt': '0'} for keyword in keywords]
 
 
 
 
 
 
121
 
122
  if not all_data:
123
  return [("Error", "๋ฐ์ดํ„ฐ๊ฐ€ ๋ฐ˜ํ™˜๋˜์ง€ ์•Š์•˜๊ฑฐ๋‚˜ API ์‘๋‹ต์ด ์œ ํšจํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.", "", "", "")] # ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜ ์นผ๋Ÿผ ์ถ”๊ฐ€
@@ -139,19 +145,33 @@ def get_monthly_search_volumes(keywords, include_related):
139
  total_searches = monthly_pc + monthly_mobile
140
  results.append((keyword, monthly_pc, monthly_mobile, total_searches))
141
 
142
- if len(results) >= 100:
143
  break
144
 
145
- # ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜ ๋ณ‘๋ ฌ ์š”์ฒญ
146
- with ThreadPoolExecutor(max_workers=5) as executor:
147
- blog_futures = [executor.submit(get_blog_count_parallel, result[0]) for result in results]
148
- for i, future in enumerate(blog_futures):
149
- try:
150
- keyword, blog_count = future.result()
151
- results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], blog_count)
152
- except Exception as e:
153
- print(f"Error fetching blog count for keyword '{results[i][0]}': {e}")
154
- results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], "Error")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
 
156
  return results
157
 
@@ -166,7 +186,7 @@ def save_to_excel(results, keyword):
166
 
167
  def display_search_volumes(keywords, include_related):
168
  keyword_list = [keyword.strip() for keyword in keywords.split(',')]
169
- results = get_monthly_search_volumes(keyword_list, include_related)
170
  file_path = save_to_excel(results, keywords)
171
  return results, file_path
172
 
@@ -174,14 +194,14 @@ iface = gr.Interface(
174
  fn=display_search_volumes,
175
  inputs=[
176
  gr.Textbox(placeholder="ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”"),
177
- gr.Checkbox(label="์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ํฌํ•จ", value=True) # ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ON/OFF ์ฒดํฌ๋ฐ•์Šค ์ถ”๊ฐ€
178
  ],
179
  outputs=[
180
  gr.Dataframe(headers=["ํ‚ค์›Œ๋“œ", "PC์›”๊ฒ€์ƒ‰๋Ÿ‰", "๋ชจ๋ฐ”์ผ์›”๊ฒ€์ƒ‰๋Ÿ‰", "ํ† ํƒˆ์›”๊ฒ€์ƒ‰๋Ÿ‰", "๋ธ”๋กœ๊ทธ๋ฌธ์„œ์ˆ˜"]),
181
  gr.File(label="๋‹ค์šด๋กœ๋“œ ์—‘์…€ ํŒŒ์ผ")
182
  ],
183
  title="๋„ค์ด๋ฒ„ ์›”๊ฒ€์ƒ‰๋Ÿ‰ ๊ฒ€์ƒ‰๊ธฐ",
184
- description="ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ํฌํ•จ ์—ฌ๋ถ€๋ฅผ ์„ ํƒํ•˜์„ธ์š”."
185
  )
186
 
187
  iface.launch(share=True) # share=True๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ๊ณต๊ฐœ ๋งํฌ ์ƒ์„ฑ
 
100
  def get_blog_count_parallel(keyword):
101
  return (keyword, get_blog_count(keyword))
102
 
103
+ def get_monthly_search_volumes(keywords, include_related_keywords=True):
104
  all_data = []
105
  chunk_size = 10 # ํ‚ค์›Œ๋“œ๋ฅผ 10๊ฐœ์”ฉ ๋‚˜๋ˆ„์–ด ์š”์ฒญ
106
 
107
+ if include_related_keywords:
108
+ # API ๋ณ‘๋ ฌ ์š”์ฒญ
109
  with ThreadPoolExecutor(max_workers=5) as executor:
110
  futures = [executor.submit(get_keywords_data_chunk, keywords[i:i+chunk_size]) for i in range(0, len(keywords), chunk_size)]
111
  for future in futures:
 
116
  except Exception as e:
117
  print(f"Error fetching keywords data chunk: {e}")
118
  else:
119
+ # ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด๋ฅผ ํฌํ•จํ•˜์ง€ ์•Š์œผ๋ฏ€๋กœ ์ž…๋ ฅ ํ‚ค์›Œ๋“œ๋งŒ ์ฒ˜๋ฆฌ
120
+ for keyword in keywords:
121
+ # ๊ฐ€์ƒ์˜ ๋ฐ์ดํ„ฐ ๊ตฌ์กฐ์„ ๋งž์ถ”๊ธฐ ์œ„ํ•ด
122
+ all_data.append({
123
+ 'relKeyword': keyword,
124
+ 'monthlyPcQcCnt': '0', # ์‹ค์ œ API์—์„œ ๊ฐ’์„ ๊ฐ€์ ธ์˜ค๋ ค๋ฉด ๋ณ„๋„ ์š”์ฒญ ํ•„์š”
125
+ 'monthlyMobileQcCnt': '0'
126
+ })
127
 
128
  if not all_data:
129
  return [("Error", "๋ฐ์ดํ„ฐ๊ฐ€ ๋ฐ˜ํ™˜๋˜์ง€ ์•Š์•˜๊ฑฐ๋‚˜ API ์‘๋‹ต์ด ์œ ํšจํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.", "", "", "")] # ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜ ์นผ๋Ÿผ ์ถ”๊ฐ€
 
145
  total_searches = monthly_pc + monthly_mobile
146
  results.append((keyword, monthly_pc, monthly_mobile, total_searches))
147
 
148
+ if len(results) >= 100 and include_related_keywords:
149
  break
150
 
151
+ if include_related_keywords:
152
+ # ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜ ๋ณ‘๋ ฌ ์š”์ฒญ
153
+ with ThreadPoolExecutor(max_workers=5) as executor:
154
+ blog_futures = [executor.submit(get_blog_count_parallel, result[0]) for result in results]
155
+ for i, future in enumerate(blog_futures):
156
+ try:
157
+ keyword, blog_count = future.result()
158
+ results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], blog_count)
159
+ except Exception as e:
160
+ print(f"Error fetching blog count for keyword '{results[i][0]}': {e}")
161
+ results[i] = (results[i][0], results[i][1], results[i][2], results[i][3], "Error")
162
+ else:
163
+ # ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜ ๋ณ‘๋ ฌ ์š”์ฒญ (์—ฐ๊ด€๊ฒ€์ƒ‰์–ด๊ฐ€ ์•„๋‹ ๊ฒฝ์šฐ ๊ฐ ํ‚ค์›Œ๋“œ์— ๋Œ€ํ•ด)
164
+ with ThreadPoolExecutor(max_workers=5) as executor:
165
+ blog_futures = [executor.submit(get_blog_count_parallel, keyword) for keyword in results]
166
+ temp_results = []
167
+ for future in blog_futures:
168
+ try:
169
+ keyword, blog_count = future.result()
170
+ temp_results.append((keyword, 0, 0, 0, blog_count))
171
+ except Exception as e:
172
+ print(f"Error fetching blog count for keyword '{keyword}': {e}")
173
+ temp_results.append((keyword, 0, 0, 0, "Error"))
174
+ results = temp_results
175
 
176
  return results
177
 
 
186
 
187
  def display_search_volumes(keywords, include_related):
188
  keyword_list = [keyword.strip() for keyword in keywords.split(',')]
189
+ results = get_monthly_search_volumes(keyword_list, include_related_keywords=include_related)
190
  file_path = save_to_excel(results, keywords)
191
  return results, file_path
192
 
 
194
  fn=display_search_volumes,
195
  inputs=[
196
  gr.Textbox(placeholder="ํ‚ค์›Œ๋“œ๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”"),
197
+ gr.Checkbox(label="์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ํฌํ•จ", value=True) # ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด ํ† ๊ธ€ ์ถ”๊ฐ€
198
  ],
199
  outputs=[
200
  gr.Dataframe(headers=["ํ‚ค์›Œ๋“œ", "PC์›”๊ฒ€์ƒ‰๋Ÿ‰", "๋ชจ๋ฐ”์ผ์›”๊ฒ€์ƒ‰๋Ÿ‰", "ํ† ํƒˆ์›”๊ฒ€์ƒ‰๋Ÿ‰", "๋ธ”๋กœ๊ทธ๋ฌธ์„œ์ˆ˜"]),
201
  gr.File(label="๋‹ค์šด๋กœ๋“œ ์—‘์…€ ํŒŒ์ผ")
202
  ],
203
  title="๋„ค์ด๋ฒ„ ์›”๊ฒ€์ƒ‰๋Ÿ‰ ๊ฒ€์ƒ‰๊ธฐ",
204
+ description="ํ‚ค์›Œ๋“œ์˜ ์›” ๊ฒ€์ƒ‰๋Ÿ‰๊ณผ ๋ธ”๋กœ๊ทธ ๋ฌธ์„œ ์ˆ˜๋ฅผ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค. ์—ฐ๊ด€๊ฒ€์ƒ‰์–ด๋ฅผ ํฌํ•จํ• ์ง€ ์„ ํƒํ•˜์„ธ์š”.",
205
  )
206
 
207
  iface.launch(share=True) # share=True๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ๊ณต๊ฐœ ๋งํฌ ์ƒ์„ฑ