|
""" |
|
Square Theory Generator (10 best variations) |
|
=========================================== |
|
2025โ05โ28ย v4 โ ํ๊ธ ๊นจ์ง ์์ ํด๊ฒฐ + Topย 10 ๊ฒฐ๊ณผ |
|
------------------------------------------------- |
|
๋ณ๊ฒฝ ์์ฝ |
|
--------- |
|
1. **ํ๊ธ ํฐํธ ๊ฐ์ ๋ฑ๋ก ๋ก์ง ๊ฐํ** |
|
* ์์คํ
์ โMalgun/Nanum/Appleโ ํฐํธ๊ฐ ์์ผ๋ฉด โ Google Fonts์์ **NanumGothic TTF**๋ฅผ ๋ค์ด๋ก๋ํด ๋ฐํ์ ๋ฑ๋ก. |
|
2. **draw_square ์์ ์ฑ ๊ฐํ** |
|
* ๊ฐ ์ฝ๋ ๋ฌธ์์ด์ `str()`ย ์บ์คํ
, ๋ ๊ฐ ๋ฐฉ์ง. |
|
3. **๊ธฐํ** |
|
* ์ฝ๋ ์ฃผ์ยท์ ๋ฆฌ. |
|
|
|
์คํ๋ฒ |
|
------ |
|
```bash |
|
pip install --upgrade gradio matplotlib openai requests |
|
export OPENAI_API_KEY="sk-..." |
|
python square_theory_gradio.py |
|
``` |
|
""" |
|
|
|
import os |
|
import json |
|
import tempfile |
|
import urllib.request |
|
import requests |
|
import gradio as gr |
|
import matplotlib.pyplot as plt |
|
from matplotlib import patches, font_manager, rcParams |
|
from openai import OpenAI |
|
|
|
|
|
|
|
|
|
|
|
PREFERRED_FONTS = ["Malgun Gothic", "NanumGothic", "AppleGothic", "DejaVu Sans"] |
|
NANUM_URL = ( |
|
"https://github.com/google/fonts/raw/main/ofl/nanumgothic/" |
|
"NanumGothic-Regular.ttf" |
|
) |
|
|
|
def _set_korean_font(): |
|
available = {f.name for f in font_manager.fontManager.ttflist} |
|
for cand in PREFERRED_FONTS: |
|
if cand in available: |
|
rcParams["font.family"] = cand |
|
break |
|
else: |
|
|
|
try: |
|
tmp_dir = tempfile.gettempdir() |
|
font_path = os.path.join(tmp_dir, "NanumGothic-Regular.ttf") |
|
if not os.path.exists(font_path): |
|
print("[INFO] Downloading NanumGothic fontโฆ") |
|
urllib.request.urlretrieve(NANUM_URL, font_path) |
|
font_manager.fontManager.addfont(font_path) |
|
rcParams["font.family"] = font_manager.FontProperties(fname=font_path).get_name() |
|
print(f"[INFO] Registered fallback font: {rcParams['font.family']}") |
|
except Exception as e: |
|
print("[WARN] Font download failed, Korean text may break:", e) |
|
|
|
rcParams["axes.unicode_minus"] = False |
|
|
|
_set_korean_font() |
|
|
|
|
|
|
|
|
|
if not os.getenv("OPENAI_API_KEY"): |
|
raise EnvironmentError("OPENAI_API_KEY ํ๊ฒฝ ๋ณ์๋ฅผ ์ค์ ํ์ธ์.") |
|
|
|
client = OpenAI() |
|
|
|
|
|
|
|
|
|
|
|
def draw_square(words): |
|
fig, ax = plt.subplots(figsize=(4, 4)) |
|
ax.add_patch(patches.Rectangle((0, 0), 1, 1, fill=False, linewidth=2)) |
|
ax.text(-0.05, 1.05, str(words.get("tl", "")), ha="right", va="bottom", fontsize=14, fontweight="bold") |
|
ax.text(1.05, 1.05, str(words.get("tr", "")), ha="left", va="bottom", fontsize=14, fontweight="bold") |
|
ax.text(1.05, -0.05, str(words.get("br", "")), ha="left", va="top", fontsize=14, fontweight="bold") |
|
ax.text(-0.05, -0.05, str(words.get("bl", "")), ha="right", va="top", fontsize=14, fontweight="bold") |
|
ax.set_xticks([]) |
|
ax.set_yticks([]) |
|
ax.set_xlim(-0.2, 1.2) |
|
ax.set_ylim(-0.2, 1.2) |
|
ax.set_aspect("equal") |
|
return fig |
|
|
|
|
|
|
|
|
|
SYSTEM_PROMPT = ( |
|
"๋๋ ํ๊ตญ์ด ์นดํผยท๋ธ๋๋ ๋ค์ด๋ฐ ์ ๋ฌธ๊ฐ์ด์ Square Theory ๋์ฐ๋ฏธ๋ค. " |
|
"์ฌ์ฉ์๊ฐ ์ค ํ๋์ ๋จ์ด(tl)๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ํ์ง์ด ๊ฐ์ฅ ๋ฐ์ด๋ ๊ฒ๋ถํฐ 10๊ฐ์ ์ ์์ JSON ๋ฐฐ์ด๋ก ๋ฐํํด๋ผ. " |
|
"๊ฐ ๋ฐฐ์ด ์์๋ tl, tr, br, bl, top_phrase, bottom_phrase, slogan, brand ํ๋๋ฅผ ๊ฐ์ง๋ฉฐ, " |
|
"์ฌ๊ฐํ ๋ค ๊ผญ์ง์ (tl>tr>br>bl)๊ณผ ๋ ํํยท์ฌ๋ก๊ฑดยท๋ธ๋๋ ๋ค์์ด ์์ฐ์ค๋ฝ๊ฒ ์ฐ๊ฒฐ๋ผ์ผ ํ๋ค. " |
|
"๋ฐฐ์ด์ ์ต๊ณ ์ ์ ์์ด index 0, ๊ทธ๋ค์์ด index 1 โฆ 9 ์์์ฌ์ผ ํ๋ค. " |
|
"๊ฒฐ๊ณผ๋ JSON ์ธ ๋ค๋ฅธ ํ
์คํธ๋ฅผ ํฌํจํ๋ฉด ์ ๋๋ค." |
|
) |
|
|
|
|
|
def call_llm(seed: str): |
|
resp = client.chat.completions.create( |
|
model="gpt-4o-mini", |
|
messages=[ |
|
{"role": "system", "content": SYSTEM_PROMPT}, |
|
{"role": "user", "content": seed}, |
|
], |
|
temperature=0.9, |
|
max_tokens=1024, |
|
) |
|
raw = resp.choices[0].message.content.strip() |
|
try: |
|
data = json.loads(raw) |
|
if not isinstance(data, list) or len(data) != 10: |
|
raise ValueError("JSON ๋ฐฐ์ด ๊ธธ์ด๊ฐ 10์ด ์๋") |
|
except Exception as exc: |
|
raise ValueError(f"LLM JSON ํ์ฑ ์คํจ: {exc}\n์๋ฌธ: {raw[:300]} โฆ") |
|
return data |
|
|
|
|
|
|
|
|
|
|
|
def generate(seed_word: str): |
|
results = call_llm(seed_word) |
|
fig = draw_square({k: results[0][k] for k in ("tl", "tr", "br", "bl")}) |
|
md_lines = [] |
|
for idx, item in enumerate(results, 1): |
|
md_lines.append( |
|
f"### {idx}. {item['top_phrase']} / {item['bottom_phrase']}\n" |
|
f"- **์ฌ๋ก๊ฑด**: {item['slogan']}\n" |
|
f"- **๋ธ๋๋ ๋ค์**: {item['brand']}\n" |
|
f"- (tl={item['tl']}, tr={item['tr']}, br={item['br']}, bl={item['bl']})\n" |
|
) |
|
return fig, "\n".join(md_lines) |
|
|
|
|
|
|
|
|
|
with gr.Blocks(title="Square Theory โ Topย 10 ๐ฐ๐ท") as demo: |
|
gr.Markdown("""# ๐ Square Theory ์ ์ Topย 10\n๋จ์ด 1๊ฐ โ LLM์ด ํ๊ฐยท์ ๋ ฌํ 10๊ฐ ์ฌ๊ฐํ/์นดํผ/๋ธ๋๋ ๋ค์""") |
|
seed = gr.Textbox(label="์๋ ๋จ์ด(TL)", placeholder="์: ๊ณจ๋ ") |
|
run = gr.Button("์์ฑ") |
|
fig_out = gr.Plot(label="1์ ์ฌ๊ฐํ") |
|
md_out = gr.Markdown(label="Topย 10 ์ ์") |
|
|
|
run.click(generate, inputs=seed, outputs=[fig_out, md_out]) |
|
|
|
if __name__ == "__main__": |
|
demo.launch() |
|
|