Update app.py
Browse files
app.py
CHANGED
@@ -8,13 +8,13 @@ import pandas as pd
|
|
8 |
import tempfile
|
9 |
import gradio as gr
|
10 |
|
11 |
-
#
|
12 |
def generate_signature(timestamp, method, uri, secret_key):
|
13 |
message = f"{timestamp}.{method}.{uri}"
|
14 |
digest = hmac.new(secret_key.encode("utf-8"), message.encode("utf-8"), hashlib.sha256).digest()
|
15 |
return base64.b64encode(digest).decode()
|
16 |
|
17 |
-
# API νΈμΆ
|
18 |
def get_header(method, uri, api_key, secret_key, customer_id):
|
19 |
timestamp = str(round(time.time() * 1000))
|
20 |
signature = generate_signature(timestamp, method, uri, secret_key)
|
@@ -26,13 +26,9 @@ def get_header(method, uri, api_key, secret_key, customer_id):
|
|
26 |
"X-Signature": signature
|
27 |
}
|
28 |
|
29 |
-
# λ€μ΄λ² μ°κ΄κ²μμ΄ λ° κ²μλ
|
30 |
def fetch_related_keywords(keyword):
|
31 |
-
|
32 |
-
λ¨μΌ ν€μλμ λν΄ λ€μ΄λ² κ²μκ΄κ³ APIλ₯Ό νΈμΆνμ¬ μ°κ΄κ²μμ΄ λ° κ²μλ λ°μ΄ν°λ₯Ό DataFrameμΌλ‘ λ°νν©λλ€.
|
33 |
-
λ°ν 컬λΌ: "μ 보ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ"
|
34 |
-
"""
|
35 |
-
# νκ²½λ³μμμ API ν€ λ±μ μ½μ΄μ΅λλ€ (κΈ°λ³Έκ° μμ΄ μ¬μ©)
|
36 |
API_KEY = os.environ["NAVER_API_KEY"]
|
37 |
SECRET_KEY = os.environ["NAVER_SECRET_KEY"]
|
38 |
CUSTOMER_ID = os.environ["NAVER_CUSTOMER_ID"]
|
@@ -41,7 +37,6 @@ def fetch_related_keywords(keyword):
|
|
41 |
uri = "/keywordstool"
|
42 |
method = "GET"
|
43 |
headers = get_header(method, uri, API_KEY, SECRET_KEY, CUSTOMER_ID)
|
44 |
-
# API νλΌλ―Έν°: hintKeywordsλ 리μ€νΈλ‘ μ λ¬
|
45 |
params = {
|
46 |
"hintKeywords": [keyword],
|
47 |
"showDetail": "1"
|
@@ -51,11 +46,9 @@ def fetch_related_keywords(keyword):
|
|
51 |
if "keywordList" not in data:
|
52 |
return pd.DataFrame()
|
53 |
df = pd.DataFrame(data["keywordList"])
|
54 |
-
# μ΅λ 100κ°κΉμ§ κ²°κ³Ό μ¬μ©
|
55 |
if len(df) > 100:
|
56 |
df = df.head(100)
|
57 |
|
58 |
-
# λ¬Έμμ΄ ννμ κ²μλμ μ μλ‘ λ³ννλ ν¨μ
|
59 |
def parse_count(x):
|
60 |
try:
|
61 |
x_str = str(x).replace(",", "")
|
@@ -63,16 +56,31 @@ def fetch_related_keywords(keyword):
|
|
63 |
except:
|
64 |
return 0
|
65 |
|
66 |
-
# κ° κ²μλ νλλ₯Ό μ μνμΌλ‘ λ³ν ν ν ν κ²μλ κ³μ°
|
67 |
df["PCμκ²μλ"] = df["monthlyPcQcCnt"].apply(parse_count)
|
68 |
df["λͺ¨λ°μΌμκ²μλ"] = df["monthlyMobileQcCnt"].apply(parse_count)
|
69 |
df["ν νμκ²μλ"] = df["PCμκ²μλ"] + df["λͺ¨λ°μΌμκ²μλ"]
|
70 |
-
# 'relKeyword' 컬λΌλͺ
μ "μ 보ν€μλ"λ‘ λ³κ²½
|
71 |
df.rename(columns={"relKeyword": "μ 보ν€μλ"}, inplace=True)
|
72 |
-
# νμν 컬λΌλ§ μ ν
|
73 |
result_df = df[["μ 보ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ"]]
|
74 |
return result_df
|
75 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
# μμ μμ
νμΌ μμ± ν¨μ
|
77 |
def create_excel_file(df):
|
78 |
with tempfile.NamedTemporaryFile(suffix=".xlsx", delete=False) as tmp:
|
@@ -83,10 +91,10 @@ def create_excel_file(df):
|
|
83 |
# μ
λ ₯λ μ¬λ¬ ν€μλλ₯Ό μ²λ¦¬νλ ν¨μ
|
84 |
def process_keyword(keywords: str, include_related: bool):
|
85 |
"""
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
"""
|
91 |
# μ€λ°κΏμΌλ‘ λΆλ¦¬νμ¬ μ
λ ₯ ν€μλ 리μ€νΈ μμ± (λΉ μ€ μ μΈ)
|
92 |
input_keywords = [k.strip() for k in keywords.splitlines() if k.strip() != ""]
|
@@ -96,15 +104,15 @@ def process_keyword(keywords: str, include_related: bool):
|
|
96 |
df_kw = fetch_related_keywords(kw)
|
97 |
if df_kw.empty:
|
98 |
continue
|
99 |
-
# μ
λ ₯
|
100 |
row_kw = df_kw[df_kw["μ 보ν€μλ"] == kw]
|
101 |
if not row_kw.empty:
|
102 |
result_dfs.append(row_kw)
|
103 |
else:
|
104 |
-
#
|
105 |
result_dfs.append(df_kw.head(1))
|
106 |
|
107 |
-
# 체ν¬λ°μ€κ° Trueμ΄κ³ , 첫 λ²μ§Έ
|
108 |
if include_related and idx == 0:
|
109 |
df_related = df_kw[df_kw["μ 보ν€μλ"] != kw]
|
110 |
if not df_related.empty:
|
@@ -112,18 +120,24 @@ def process_keyword(keywords: str, include_related: bool):
|
|
112 |
|
113 |
if result_dfs:
|
114 |
result_df = pd.concat(result_dfs, ignore_index=True)
|
115 |
-
# μ€λ³΅ ν μ κ±° (λ§μ½ μ
λ ₯ ν€μλκ° μ°κ΄κ²μμ΄ κ²°κ³Όμ ν¬ν¨λ κ²½μ°)
|
116 |
result_df.drop_duplicates(subset=["μ 보ν€μλ"], inplace=True)
|
117 |
else:
|
118 |
result_df = pd.DataFrame(columns=["μ 보ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ"])
|
119 |
-
|
|
|
|
|
|
|
120 |
result_df.sort_values(by="ν νμκ²μλ", ascending=False, inplace=True)
|
121 |
return result_df, create_excel_file(result_df)
|
122 |
|
123 |
# Gradio UI ꡬμ±
|
124 |
with gr.Blocks() as demo:
|
125 |
-
gr.Markdown("### λ€μ΄λ² μ°κ΄κ²μμ΄ λ°
|
126 |
-
gr.Markdown(
|
|
|
|
|
|
|
|
|
127 |
|
128 |
with gr.Row():
|
129 |
keyword_input = gr.Textbox(label="ν€μλ μ
λ ₯ (μ¬λ¬ κ°μΌ κ²½μ° μν°λ‘ ꡬλΆ)", lines=5, placeholder="μ:\nκ°μλνλΉλΌ\nμλ°μ€ν¬λ¦½νΈ")
|
@@ -134,7 +148,7 @@ with gr.Blocks() as demo:
|
|
134 |
df_output = gr.Dataframe(label="κ²μ κ²°κ³Ό")
|
135 |
excel_output = gr.File(label="μμ
λ€μ΄λ‘λ")
|
136 |
|
137 |
-
# λ²νΌ ν΄λ¦ μ process_keyword
|
138 |
search_button.click(fn=process_keyword, inputs=[keyword_input, include_checkbox], outputs=[df_output, excel_output])
|
139 |
|
140 |
# μ± μ€ν (Hugging Face Spaces λ°°ν¬ κ°λ₯)
|
|
|
8 |
import tempfile
|
9 |
import gradio as gr
|
10 |
|
11 |
+
# λ€μ΄λ² κ΄κ³ API νΈμΆ μ μ¬μ©ν μλͺ
μμ± ν¨μ
|
12 |
def generate_signature(timestamp, method, uri, secret_key):
|
13 |
message = f"{timestamp}.{method}.{uri}"
|
14 |
digest = hmac.new(secret_key.encode("utf-8"), message.encode("utf-8"), hashlib.sha256).digest()
|
15 |
return base64.b64encode(digest).decode()
|
16 |
|
17 |
+
# λ€μ΄λ² κ΄κ³ API νΈμΆ ν€λ μμ± ν¨μ
|
18 |
def get_header(method, uri, api_key, secret_key, customer_id):
|
19 |
timestamp = str(round(time.time() * 1000))
|
20 |
signature = generate_signature(timestamp, method, uri, secret_key)
|
|
|
26 |
"X-Signature": signature
|
27 |
}
|
28 |
|
29 |
+
# λ€μ΄λ² κ΄κ³ APIλ₯Ό ν΅ν΄ λ¨μΌ ν€μλμ μ°κ΄κ²μμ΄ λ° κ²μλ μ 보λ₯Ό κ°μ Έμ€λ ν¨μ
|
30 |
def fetch_related_keywords(keyword):
|
31 |
+
# νκ²½λ³μμμ κ΄κ³ API ν€κ°λ€μ λΆλ¬μ΅λλ€.
|
|
|
|
|
|
|
|
|
32 |
API_KEY = os.environ["NAVER_API_KEY"]
|
33 |
SECRET_KEY = os.environ["NAVER_SECRET_KEY"]
|
34 |
CUSTOMER_ID = os.environ["NAVER_CUSTOMER_ID"]
|
|
|
37 |
uri = "/keywordstool"
|
38 |
method = "GET"
|
39 |
headers = get_header(method, uri, API_KEY, SECRET_KEY, CUSTOMER_ID)
|
|
|
40 |
params = {
|
41 |
"hintKeywords": [keyword],
|
42 |
"showDetail": "1"
|
|
|
46 |
if "keywordList" not in data:
|
47 |
return pd.DataFrame()
|
48 |
df = pd.DataFrame(data["keywordList"])
|
|
|
49 |
if len(df) > 100:
|
50 |
df = df.head(100)
|
51 |
|
|
|
52 |
def parse_count(x):
|
53 |
try:
|
54 |
x_str = str(x).replace(",", "")
|
|
|
56 |
except:
|
57 |
return 0
|
58 |
|
|
|
59 |
df["PCμκ²μλ"] = df["monthlyPcQcCnt"].apply(parse_count)
|
60 |
df["λͺ¨λ°μΌμκ²μλ"] = df["monthlyMobileQcCnt"].apply(parse_count)
|
61 |
df["ν νμκ²μλ"] = df["PCμκ²μλ"] + df["λͺ¨λ°μΌμκ²μλ"]
|
|
|
62 |
df.rename(columns={"relKeyword": "μ 보ν€μλ"}, inplace=True)
|
|
|
63 |
result_df = df[["μ 보ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ"]]
|
64 |
return result_df
|
65 |
|
66 |
+
# λ€μ΄λ² κ²μ κ°λ° APIλ₯Ό νμ©νμ¬ λΈλ‘κ·Έ λ¬Έμμλ₯Ό μ‘°ννλ ν¨μ
|
67 |
+
def fetch_blog_count(keyword):
|
68 |
+
# νκ²½λ³μμμ λ€μ΄λ² κ²μ API μ격μ¦λͺ
μ λΆλ¬μ΅λλ€.
|
69 |
+
client_id = os.environ["NAVER_SEARCH_CLIENT_ID"]
|
70 |
+
client_secret = os.environ["NAVER_SEARCH_CLIENT_SECRET"]
|
71 |
+
url = "https://openapi.naver.com/v1/search/blog.json"
|
72 |
+
headers = {
|
73 |
+
"X-Naver-Client-Id": client_id,
|
74 |
+
"X-Naver-Client-Secret": client_secret
|
75 |
+
}
|
76 |
+
params = {"query": keyword, "display": 1}
|
77 |
+
response = requests.get(url, headers=headers, params=params)
|
78 |
+
if response.status_code == 200:
|
79 |
+
data = response.json()
|
80 |
+
return data.get("total", 0)
|
81 |
+
else:
|
82 |
+
return 0
|
83 |
+
|
84 |
# μμ μμ
νμΌ μμ± ν¨μ
|
85 |
def create_excel_file(df):
|
86 |
with tempfile.NamedTemporaryFile(suffix=".xlsx", delete=False) as tmp:
|
|
|
91 |
# μ
λ ₯λ μ¬λ¬ ν€μλλ₯Ό μ²λ¦¬νλ ν¨μ
|
92 |
def process_keyword(keywords: str, include_related: bool):
|
93 |
"""
|
94 |
+
1. ν
μ€νΈλ°μ€μ μν°λ‘ ꡬλΆλ μ¬λ¬ ν€μλλ₯Ό λ°μ κ° ν€μλμ λν΄ λ€μ΄λ² κ΄κ³ APIλ₯Ό ν΅ν΄ κ²μλ μ 보λ₯Ό μ‘°νν©λλ€.
|
95 |
+
2. κ° ν€μλμ λν΄ μ
λ ₯ν ν€μλ μ체μ κ²°κ³Όλ₯Ό ν¬ν¨ν©λλ€.
|
96 |
+
3. 체ν¬λ°μ€(True)μΈ κ²½μ°, 첫 λ²μ§Έ ν€μλμ λν΄μλ§ μ°κ΄κ²μμ΄(μ
λ ₯ ν€μλλ₯Ό μ μΈν κ²°κ³Ό)λ₯Ό μΆκ°ν©λλ€.
|
97 |
+
4. λ§μ§λ§μΌλ‘, κ° "μ 보ν€μλ"μ λν΄ λ€μ΄λ² κ²μ APIλ₯Ό νΈμΆνμ¬ λΈλ‘κ·Έ λ¬Έμμλ₯Ό μ‘°ννκ³ "λΈλ‘κ·Έλ¬Έμμ" 컬λΌμ μΆκ°ν©λλ€.
|
98 |
"""
|
99 |
# μ€λ°κΏμΌλ‘ λΆλ¦¬νμ¬ μ
λ ₯ ν€μλ 리μ€νΈ μμ± (λΉ μ€ μ μΈ)
|
100 |
input_keywords = [k.strip() for k in keywords.splitlines() if k.strip() != ""]
|
|
|
104 |
df_kw = fetch_related_keywords(kw)
|
105 |
if df_kw.empty:
|
106 |
continue
|
107 |
+
# μ
λ ₯ ν€μλ μ체μ κ²°κ³Όλ₯Ό μ°μ ν¬ν¨
|
108 |
row_kw = df_kw[df_kw["μ 보ν€μλ"] == kw]
|
109 |
if not row_kw.empty:
|
110 |
result_dfs.append(row_kw)
|
111 |
else:
|
112 |
+
# μ
λ ₯ ν€μλμ ν΄λΉνλ νμ΄ μμΌλ©΄ 첫 λ²μ§Έ νμ λμ²΄λ‘ μΆκ°
|
113 |
result_dfs.append(df_kw.head(1))
|
114 |
|
115 |
+
# 체ν¬λ°μ€κ° Trueμ΄κ³ , 첫 λ²μ§Έ ν€μλμ λν΄μλ§ μ°κ΄κ²μμ΄ μΆκ° (μ
λ ₯ ν€μλ μ μΈ)
|
116 |
if include_related and idx == 0:
|
117 |
df_related = df_kw[df_kw["μ 보ν€μλ"] != kw]
|
118 |
if not df_related.empty:
|
|
|
120 |
|
121 |
if result_dfs:
|
122 |
result_df = pd.concat(result_dfs, ignore_index=True)
|
|
|
123 |
result_df.drop_duplicates(subset=["μ 보ν€μλ"], inplace=True)
|
124 |
else:
|
125 |
result_df = pd.DataFrame(columns=["μ 보ν€μλ", "PCμκ²μλ", "λͺ¨λ°μΌμκ²μλ", "ν νμκ²μλ"])
|
126 |
+
|
127 |
+
# λΈλ‘κ·Έ λ¬Έμμ μ»¬λΌ μΆκ°: κ° μ 보ν€μλλ§λ€ λ€μ΄λ² λΈλ‘κ·Έ κ²μ APIλ‘ μ΄ λ¬Έμμλ₯Ό μ‘°ν
|
128 |
+
result_df["λΈλ‘κ·Έλ¬Έμμ"] = result_df["μ 보ν€μλ"].apply(fetch_blog_count)
|
129 |
+
|
130 |
result_df.sort_values(by="ν νμκ²μλ", ascending=False, inplace=True)
|
131 |
return result_df, create_excel_file(result_df)
|
132 |
|
133 |
# Gradio UI ꡬμ±
|
134 |
with gr.Blocks() as demo:
|
135 |
+
gr.Markdown("### λ€μ΄λ² μ°κ΄κ²μμ΄ λ° κ²μλ, λΈλ‘κ·Έ λ¬Έμμ μ‘°ν μ±")
|
136 |
+
gr.Markdown(
|
137 |
+
"μ¬λ¬ ν€μλλ₯Ό μν°λ‘ ꡬλΆνμ¬ μ
λ ₯νλ©΄ κ° ν€μλμ κ²μλ μ 보λ₯Ό μ‘°ννκ³ , "
|
138 |
+
"첫 λ²μ§Έ ν€μλμ κ²½μ° 'μ°κ΄κ²μμ΄ ν¬ν¨' μ²΄ν¬ μ μ°κ΄κ²μμ΄λ ν¨κ» μ‘°νν©λλ€. "
|
139 |
+
"λν, κ° μ 보ν€μλμ λν λ€μ΄λ² λΈλ‘κ·Έ λ¬Έμμλ ν¨κ» μΆλ ₯λ©λλ€."
|
140 |
+
)
|
141 |
|
142 |
with gr.Row():
|
143 |
keyword_input = gr.Textbox(label="ν€μλ μ
λ ₯ (μ¬λ¬ κ°μΌ κ²½μ° μν°λ‘ ꡬλΆ)", lines=5, placeholder="μ:\nκ°μλνλΉλΌ\nμλ°μ€ν¬λ¦½νΈ")
|
|
|
148 |
df_output = gr.Dataframe(label="κ²μ κ²°κ³Ό")
|
149 |
excel_output = gr.File(label="μμ
λ€μ΄λ‘λ")
|
150 |
|
151 |
+
# λ²νΌ ν΄λ¦ μ process_keyword ν¨μ μ€ν
|
152 |
search_button.click(fn=process_keyword, inputs=[keyword_input, include_checkbox], outputs=[df_output, excel_output])
|
153 |
|
154 |
# μ± μ€ν (Hugging Face Spaces λ°°ν¬ κ°λ₯)
|