Spaces:
Running
Running
Update app-BACKUP-LAST.py
Browse files- app-BACKUP-LAST.py +205 -105
app-BACKUP-LAST.py
CHANGED
@@ -15,10 +15,10 @@ from typing import Iterator
|
|
15 |
|
16 |
import streamlit as st
|
17 |
import pandas as pd
|
18 |
-
import PyPDF2
|
19 |
from collections import Counter
|
20 |
|
21 |
-
from openai import OpenAI
|
22 |
from gradio_client import Client
|
23 |
from kaggle.api.kaggle_api_extended import KaggleApi
|
24 |
import tempfile
|
@@ -30,6 +30,52 @@ import pyarrow.parquet as pq
|
|
30 |
from sklearn.feature_extraction.text import TfidfVectorizer
|
31 |
from sklearn.metrics.pairwise import cosine_similarity
|
32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
# βββββββββββββββββββββββββββββββ Environment Variables / Constants βββββββββββββββββββββββββ
|
34 |
|
35 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
|
@@ -46,7 +92,7 @@ os.environ["KAGGLE_KEY"] = KAGGLE_KEY
|
|
46 |
|
47 |
BRAVE_ENDPOINT = "https://api.search.brave.com/res/v1/web/search"
|
48 |
IMAGE_API_URL = "http://211.233.58.201:7896" # μμ μ΄λ―Έμ§ μμ±μ© API
|
49 |
-
MAX_TOKENS = 7999
|
50 |
|
51 |
# βββββββββββββββββββββββββββββββ Logging βββββββββββββββββββββββββββββββ
|
52 |
logging.basicConfig(
|
@@ -54,6 +100,7 @@ logging.basicConfig(
|
|
54 |
format="%(asctime)s - %(levelname)s - %(message)s"
|
55 |
)
|
56 |
|
|
|
57 |
# βββββββββββββββββββββββββββββββ κ΅°μ¬(λ°λ¦¬ν°λ¦¬) μ μ λ°μ΄ν°μ
λ‘λ βββββββββββββββββ
|
58 |
@st.cache_resource
|
59 |
def load_military_dataset():
|
@@ -750,8 +797,7 @@ physical_transformation_categories = {
|
|
750 |
]
|
751 |
}
|
752 |
|
753 |
-
|
754 |
-
|
755 |
SWOT_FRAMEWORK = {
|
756 |
"strengths": {
|
757 |
"title": "κ°μ (Strengths)",
|
@@ -841,7 +887,6 @@ class Category:
|
|
841 |
tags: list[str]
|
842 |
items: list[str]
|
843 |
|
844 |
-
# ββββββββββββββββββββββββββββββββ νλ μμν¬ λΆμ ν¨μλ€ βββββββββββββββββββββββββ
|
845 |
def analyze_with_swot(prompt: str) -> dict:
|
846 |
prompt_lower = prompt.lower()
|
847 |
results = {}
|
@@ -1662,78 +1707,116 @@ PHYS_CATEGORIES: list[Category] = [
|
|
1662 |
items=physical_transformation_categories["λ―Έν λ° κ°μ± κ²½ν"]
|
1663 |
)
|
1664 |
]
|
|
|
1665 |
|
1666 |
-
# ββββββββββββββββββββββββββββββββ μμ€ν
ν둬ννΈ μμ± βββββββββββββββββββββ
|
1667 |
def get_idea_system_prompt(selected_category: str | None = None,
|
1668 |
selected_frameworks: list | None = None) -> str:
|
1669 |
"""
|
1670 |
-
|
|
|
|
|
|
|
|
|
1671 |
"""
|
1672 |
cat_clause = (
|
1673 |
-
f'\n**μΆκ° μ§μΉ¨**: μ νλ μΉ΄ν
κ³ λ¦¬ "{selected_category}"
|
1674 |
-
f'μ΄ μΉ΄ν
κ³ λ¦¬μ νλͺ©λ€μ μμ΄λμ΄ λ°μ μ κ³Όμ μ μ°μ κ³ λ €νμΈμ.\n'
|
1675 |
) if selected_category else ""
|
1676 |
-
|
1677 |
-
# (κΈ°μ‘΄μ 'sunzi','swot','porter','bcg' λ± μ¬μ©νμΌλ, μ΄μ λμμΈ/λ°λͺ
μμ£Όμ΄λ―λ‘ μ΅μν)
|
1678 |
if not selected_frameworks:
|
1679 |
selected_frameworks = []
|
1680 |
-
|
1681 |
-
# μ λͺ©μ : λμμΈ/λ°λͺ
μμ΄λμ΄ μ°©μ
|
1682 |
-
framework_instruction = "\n\n### (λμμΈ/λ°λͺ
) νμ© νλ μμν¬\n"
|
1683 |
-
framework_output_format = ""
|
1684 |
-
|
1685 |
-
# νλ μμν¬ μ¬μ© μμ(μνλ€λ©΄ "sunzi" λ± μ¬μ© κ°λ₯)
|
1686 |
for fw in selected_frameworks:
|
1687 |
if fw == "sunzi":
|
1688 |
-
framework_instruction += "-
|
1689 |
-
framework_output_format += """
|
1690 |
-
## μμλ³λ² κ΄μ μμμ μ°½μ μμ΄λμ΄
|
1691 |
-
(μνλ κ²½μ°μ μ¬μ©)
|
1692 |
-
"""
|
1693 |
elif fw == "swot":
|
1694 |
-
framework_instruction += "-
|
1695 |
elif fw == "porter":
|
1696 |
-
framework_instruction += "-
|
1697 |
elif fw == "bcg":
|
1698 |
-
framework_instruction += "-
|
1699 |
-
|
1700 |
base_prompt = f"""
|
1701 |
-
λΉμ μ μ°½μμ λμμΈ/λ°λͺ
μ λ¬Έκ° AIμ
λλ€.
|
1702 |
-
|
1703 |
-
|
1704 |
-
|
1705 |
-
1)
|
1706 |
-
|
1707 |
-
-
|
1708 |
-
|
1709 |
-
|
1710 |
-
-
|
1711 |
-
-
|
1712 |
-
|
1713 |
-
|
1714 |
-
-
|
1715 |
-
|
1716 |
-
|
1717 |
{framework_instruction}
|
1718 |
|
1719 |
-
|
1720 |
-
|
1721 |
-
|
1722 |
-
|
1723 |
-
|
1724 |
-
|
1725 |
-
|
1726 |
-
|
1727 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1728 |
{cat_clause}
|
1729 |
-
|
1730 |
-
|
1731 |
-
μ€κ° μ¬κ³ κ³Όμ μ λ΄λΆμ μΌλ‘ μννλ, μ΅μ’
λ΅λ³λ§ μΆλ ₯.
|
1732 |
-
μΉ κ²μ λ° Kaggle λ°μ΄ν°μ
λΆμλ ν΅ν©νμ¬ μμ΄λμ΄ νμ§μ λμ΄μμμ€.
|
1733 |
"""
|
1734 |
return base_prompt.strip()
|
1735 |
|
1736 |
-
# ββββββββββββββββββββββββββββββββ
|
|
|
1737 |
@st.cache_data(ttl=3600)
|
1738 |
def brave_search(query: str, count: int = 20):
|
1739 |
if not BRAVE_KEY:
|
@@ -1889,15 +1972,15 @@ def idea_generator_app():
|
|
1889 |
|
1890 |
# μμ μ£Όμ
|
1891 |
example_topics = {
|
1892 |
-
"example1": "
|
1893 |
-
"example2": "
|
1894 |
"example3": "μ¬μ©μ μΈν°νμ΄μ€(UI/UX) νμ μ μν μ¨μ΄λ¬λΈ κΈ°κΈ° μμ΄λμ΄"
|
1895 |
}
|
1896 |
sb.subheader("Example Topics")
|
1897 |
c1, c2, c3 = sb.columns(3)
|
1898 |
-
if c1.button("
|
1899 |
process_example(example_topics["example1"])
|
1900 |
-
if c2.button("
|
1901 |
process_example(example_topics["example2"])
|
1902 |
if c3.button("UI/UX νμ ", key="ex3"):
|
1903 |
process_example(example_topics["example3"])
|
@@ -1993,7 +2076,10 @@ def process_example(topic):
|
|
1993 |
def process_input(prompt: str, uploaded_files):
|
1994 |
"""
|
1995 |
λ©μΈ μ±ν
μ
λ ₯μ λ°μ λμμΈ/λ°λͺ
μμ΄λμ΄λ₯Ό μμ±νλ€.
|
|
|
|
|
1996 |
"""
|
|
|
1997 |
if not any(m["role"] == "user" and m["content"] == prompt for m in st.session_state.messages):
|
1998 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
1999 |
with st.chat_message("user"):
|
@@ -2005,6 +2091,7 @@ def process_input(prompt: str, uploaded_files):
|
|
2005 |
and st.session_state.messages[i + 1]["role"] == "assistant"):
|
2006 |
return
|
2007 |
|
|
|
2008 |
with st.chat_message("assistant"):
|
2009 |
status = st.status("Preparing to generate invention ideasβ¦")
|
2010 |
stream_placeholder = st.empty()
|
@@ -2014,10 +2101,8 @@ def process_input(prompt: str, uploaded_files):
|
|
2014 |
client = get_openai_client()
|
2015 |
status.update(label="Initializing modelβ¦")
|
2016 |
|
2017 |
-
selected_cat
|
2018 |
selected_frameworks = st.session_state.get("selected_frameworks", [])
|
2019 |
-
|
2020 |
-
# λͺ©μ μ΄ "λμμΈ/λ°λͺ
"μ΄λ―λ‘, system prompt λ³κ²½
|
2021 |
sys_prompt = get_idea_system_prompt(
|
2022 |
selected_category=selected_cat,
|
2023 |
selected_frameworks=selected_frameworks
|
@@ -2032,11 +2117,9 @@ def process_input(prompt: str, uploaded_files):
|
|
2032 |
use_kaggle = st.session_state.kaggle_enabled
|
2033 |
has_uploaded = bool(uploaded_files)
|
2034 |
|
2035 |
-
search_content
|
2036 |
-
kaggle_content = None
|
2037 |
-
file_content = None
|
2038 |
|
2039 |
-
# β
|
2040 |
if use_web_search:
|
2041 |
status.update(label="Searching the webβ¦")
|
2042 |
with st.spinner("Searchingβ¦"):
|
@@ -2071,8 +2154,7 @@ def process_input(prompt: str, uploaded_files):
|
|
2071 |
with st.spinner("Processing filesβ¦"):
|
2072 |
file_content = process_uploaded_files(uploaded_files)
|
2073 |
|
2074 |
-
# β£ κ΅°μ¬ μ μ λ°μ΄ν°
|
2075 |
-
mil_content = None
|
2076 |
if is_military_query(prompt):
|
2077 |
status.update(label="Searching military tactics datasetβ¦")
|
2078 |
with st.spinner("Loading military insightsβ¦"):
|
@@ -2087,17 +2169,13 @@ def process_input(prompt: str, uploaded_files):
|
|
2087 |
f"**Defense Reasoning:** {row['defense_reasoning']}\n\n---\n"
|
2088 |
)
|
2089 |
|
|
|
2090 |
user_content = prompt
|
2091 |
-
|
2092 |
-
|
2093 |
-
|
2094 |
-
|
2095 |
-
|
2096 |
-
user_content += "\n\n" + file_content
|
2097 |
-
if mil_content:
|
2098 |
-
user_content += "\n\n" + mil_content
|
2099 |
-
|
2100 |
-
# λ΄λΆ λΆμ
|
2101 |
status.update(label="λΆμ μ€β¦")
|
2102 |
decision_purpose = identify_decision_purpose(prompt)
|
2103 |
relevance_scores = compute_relevance_scores(prompt, PHYS_CATEGORIES)
|
@@ -2132,22 +2210,21 @@ def process_input(prompt: str, uploaded_files):
|
|
2132 |
for c, s in decision_purpose['constraints']:
|
2133 |
purpose_info += f"- **{c}** (κ΄λ ¨μ±: {s})\n"
|
2134 |
|
2135 |
-
# νλ μμν¬
|
2136 |
framework_contents = []
|
2137 |
for fw in selected_frameworks:
|
2138 |
if fw == "swot":
|
2139 |
-
|
2140 |
-
|
|
|
2141 |
elif fw == "porter":
|
2142 |
-
|
2143 |
-
|
|
|
2144 |
elif fw == "bcg":
|
2145 |
-
|
2146 |
-
|
2147 |
-
|
2148 |
-
# μμλ³λ² μμ
|
2149 |
-
# (μ€μ λ‘λ λ³λ λ‘μ§μ΄ νμνλ μ¬κΈ°μ μλ΅)
|
2150 |
-
pass
|
2151 |
|
2152 |
if framework_contents:
|
2153 |
user_content += "\n\n## (Optional) κΈ°ν νλ μμν¬ λΆμ\n\n" + "\n\n".join(framework_contents)
|
@@ -2155,29 +2232,50 @@ def process_input(prompt: str, uploaded_files):
|
|
2155 |
user_content += f"\n\n## μΉ΄ν
κ³ λ¦¬ λ§€νΈλ¦μ€ λΆμ{purpose_info}\n{combos_table}"
|
2156 |
|
2157 |
status.update(label="Generating final design/invention ideasβ¦")
|
|
|
2158 |
api_messages = [
|
2159 |
{"role": "system", "content": sys_prompt},
|
2160 |
{"role": "system", "name": "category_db", "content": category_context(selected_cat)},
|
2161 |
{"role": "user", "content": user_content},
|
2162 |
]
|
2163 |
-
|
2164 |
-
|
2165 |
-
|
2166 |
-
|
2167 |
-
max_tokens=MAX_TOKENS,
|
2168 |
-
top_p=1,
|
2169 |
-
stream=True
|
2170 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2171 |
|
2172 |
-
for chunk in stream:
|
2173 |
-
if chunk.choices and chunk.choices[0].delta.content:
|
2174 |
-
full_response += chunk.choices[0].delta.content
|
2175 |
-
stream_placeholder.markdown(full_response + "β")
|
2176 |
|
2177 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2178 |
status.update(label="Invention ideas created!", state="complete")
|
2179 |
|
2180 |
-
# μ΄λ―Έμ§ μμ±
|
2181 |
img_data = img_caption = None
|
2182 |
if st.session_state.generate_image and full_response:
|
2183 |
match = re.search(r"###\s*μ΄λ―Έμ§\s*ν둬ννΈ\s*\n+([^\n]+)", full_response, re.I)
|
@@ -2190,6 +2288,7 @@ def process_input(prompt: str, uploaded_files):
|
|
2190 |
if img_data:
|
2191 |
st.image(img_data, caption=f"Visualized Concept β {img_caption}")
|
2192 |
|
|
|
2193 |
answer_msg = {"role": "assistant", "content": full_response}
|
2194 |
if img_data:
|
2195 |
answer_msg["image"] = img_data
|
@@ -2197,7 +2296,7 @@ def process_input(prompt: str, uploaded_files):
|
|
2197 |
st.session_state["_skip_dup_idx"] = len(st.session_state.messages)
|
2198 |
st.session_state.messages.append(answer_msg)
|
2199 |
|
2200 |
-
# λ€μ΄λ‘λ
|
2201 |
st.subheader("Download This Output")
|
2202 |
col_md, col_html = st.columns(2)
|
2203 |
col_md.download_button(
|
@@ -2225,6 +2324,7 @@ def process_input(prompt: str, uploaded_files):
|
|
2225 |
{"role": "assistant", "content": f"β οΈ μ€λ₯: {e}"}
|
2226 |
)
|
2227 |
|
|
|
2228 |
def main():
|
2229 |
idea_generator_app()
|
2230 |
|
|
|
15 |
|
16 |
import streamlit as st
|
17 |
import pandas as pd
|
18 |
+
import PyPDF2 # For handling PDF files
|
19 |
from collections import Counter
|
20 |
|
21 |
+
from openai import OpenAI, APIError, APITimeoutError
|
22 |
from gradio_client import Client
|
23 |
from kaggle.api.kaggle_api_extended import KaggleApi
|
24 |
import tempfile
|
|
|
30 |
from sklearn.feature_extraction.text import TfidfVectorizer
|
31 |
from sklearn.metrics.pairwise import cosine_similarity
|
32 |
|
33 |
+
# βββ λ€νΈμν¬ μμ νμ© λΌμ΄λΈλ¬λ¦¬ ββββββββββββββββββββββββββββββββββββββ
|
34 |
+
import httpx
|
35 |
+
from httpx import RemoteProtocolError
|
36 |
+
|
37 |
+
# βΈ backoff λͺ¨λμ΄ μμΌλ©΄ μ¦μμμ λ체 ꡬν
|
38 |
+
try:
|
39 |
+
import backoff
|
40 |
+
except ImportError:
|
41 |
+
logging.warning("`backoff` λͺ¨λμ΄ μμ΄ κ°λ¨ λ체 λ°μ½λ μ΄ν°λ₯Ό μ¬μ©ν©λλ€.")
|
42 |
+
|
43 |
+
def _simple_backoff_on_exception(exceptions, *args, **kwargs):
|
44 |
+
"""
|
45 |
+
κ°λ²Όμ΄ μ§μ(backoff=2^n) μ¬μλ λ°μ½λ μ΄ν°.
|
46 |
+
backoff.on_exception APIμ νμ μΈμλ§ νλ΄λ
λλ€.
|
47 |
+
- exceptions : μ¬μλ λμ μμΈ(tuple λλ λ¨μΌ)
|
48 |
+
- max_tries : kwargs λ‘ μ§μ (κΈ°λ³Έ 3)
|
49 |
+
- base : kwargs λ‘ μ§μ (κΈ°λ³Έ 2, μ§μ λ°°μ)
|
50 |
+
κΈ°ν μΈμλ 무μν©λλ€.
|
51 |
+
"""
|
52 |
+
max_tries = kwargs.get("max_tries", 3)
|
53 |
+
base = kwargs.get("base", 2)
|
54 |
+
|
55 |
+
def decorator(fn):
|
56 |
+
def wrapper(*f_args, **f_kwargs):
|
57 |
+
attempt = 0
|
58 |
+
while True:
|
59 |
+
try:
|
60 |
+
return fn(*f_args, **f_kwargs)
|
61 |
+
except exceptions as e:
|
62 |
+
attempt += 1
|
63 |
+
if attempt >= max_tries:
|
64 |
+
raise
|
65 |
+
sleep = base ** attempt
|
66 |
+
logging.info(
|
67 |
+
f"[retry {attempt}/{max_tries}] {fn.__name__} -> {e} β¦ {sleep}s λκΈ°"
|
68 |
+
)
|
69 |
+
time.sleep(sleep)
|
70 |
+
return wrapper
|
71 |
+
return decorator
|
72 |
+
|
73 |
+
class _DummyBackoff:
|
74 |
+
on_exception = _simple_backoff_on_exception
|
75 |
+
|
76 |
+
backoff = _DummyBackoff()
|
77 |
+
|
78 |
+
|
79 |
# βββββββββββββββββββββββββββββββ Environment Variables / Constants βββββββββββββββββββββββββ
|
80 |
|
81 |
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
|
|
|
92 |
|
93 |
BRAVE_ENDPOINT = "https://api.search.brave.com/res/v1/web/search"
|
94 |
IMAGE_API_URL = "http://211.233.58.201:7896" # μμ μ΄λ―Έμ§ μμ±μ© API
|
95 |
+
MAX_TOKENS = 7999 # μμ ν ν ν° νλ
|
96 |
|
97 |
# βββββββββββββββββββββββββββββββ Logging βββββββββββββββββββββββββββββββ
|
98 |
logging.basicConfig(
|
|
|
100 |
format="%(asctime)s - %(levelname)s - %(message)s"
|
101 |
)
|
102 |
|
103 |
+
|
104 |
# βββββββββββββββββββββββββββββββ κ΅°μ¬(λ°λ¦¬ν°λ¦¬) μ μ λ°μ΄ν°μ
λ‘λ βββββββββββββββββ
|
105 |
@st.cache_resource
|
106 |
def load_military_dataset():
|
|
|
797 |
]
|
798 |
}
|
799 |
|
800 |
+
# ββββββββββββββββββββββββββββββββ νλ μμν¬ λΆμ ν¨μλ€ βββββββββββββββββββββββββ
|
|
|
801 |
SWOT_FRAMEWORK = {
|
802 |
"strengths": {
|
803 |
"title": "κ°μ (Strengths)",
|
|
|
887 |
tags: list[str]
|
888 |
items: list[str]
|
889 |
|
|
|
890 |
def analyze_with_swot(prompt: str) -> dict:
|
891 |
prompt_lower = prompt.lower()
|
892 |
results = {}
|
|
|
1707 |
items=physical_transformation_categories["λ―Έν λ° κ°μ± κ²½ν"]
|
1708 |
)
|
1709 |
]
|
1710 |
+
# ββββββββββββββββββββββββββββββββ (μ€κ° λΆλΆ μλ΅ μμ΄) ββββββββββββββββββββββββββ
|
1711 |
|
|
|
1712 |
def get_idea_system_prompt(selected_category: str | None = None,
|
1713 |
selected_frameworks: list | None = None) -> str:
|
1714 |
"""
|
1715 |
+
λμμΈ/λ°λͺ
λͺ©μ μ μν΄ λμ± κ°νλ μμ€ν
ν둬ννΈ.
|
1716 |
+
- μ¬μ©μ μμ²: "κ°μ₯ μ°μν 10κ°μ§ μμ΄λμ΄"λ₯Ό μμΈ μ€λͺ
|
1717 |
+
- κ²°κ³Ό μΆλ ₯μ 'κ°μ₯ μ°μν 10κ°μ§ μμ΄λμ΄'μ ν¬ν¨λμ§ μμ 'λΆκ° μμ΄λμ΄' 30κ°μ§ 리μ€νΈ(νμ€μ©)λ μ€λͺ
|
1718 |
+
- κ²°κ³Ό μΆλ ₯ μ, μ΄λ―Έμ§ μμ± μλν
|
1719 |
+
- Kaggle + μΉ κ²μ μΆμ² μ μ
|
1720 |
"""
|
1721 |
cat_clause = (
|
1722 |
+
f'\n**μΆκ° μ§μΉ¨**: μ νλ μΉ΄ν
κ³ λ¦¬ "{selected_category}"λ₯Ό νΉλ³ν μ°μ νμ¬ κ³ λ €νμΈμ.\n'
|
|
|
1723 |
) if selected_category else ""
|
|
|
|
|
1724 |
if not selected_frameworks:
|
1725 |
selected_frameworks = []
|
1726 |
+
framework_instruction = "\n\n### (μ νλ κΈ°ν λΆμ νλ μμν¬)\n"
|
|
|
|
|
|
|
|
|
|
|
1727 |
for fw in selected_frameworks:
|
1728 |
if fw == "sunzi":
|
1729 |
+
framework_instruction += "- μμλ³λ² 36κ³\n"
|
|
|
|
|
|
|
|
|
1730 |
elif fw == "swot":
|
1731 |
+
framework_instruction += "- SWOT λΆμ\n"
|
1732 |
elif fw == "porter":
|
1733 |
+
framework_instruction += "- Porterμ 5 Forces\n"
|
1734 |
elif fw == "bcg":
|
1735 |
+
framework_instruction += "- BCG λ§€νΈλ¦μ€\n"
|
1736 |
+
# ν΅μ¬: "κ°μ₯ μ°μν 10κ°μ§ μμ΄λμ΄λ₯Ό μμ£Ό μμΈνκ²" + "κ° μμ΄λμ΄λ³ μ΄λ―Έμ§ ν둬ννΈ" + "μΆμ² μ μ"
|
1737 |
base_prompt = f"""
|
1738 |
+
λΉμ μ μ°½μμ λμμΈ/λ°λͺ
μ λ¬Έκ° AIμ
λλ€.
|
1739 |
+
μ¬μ©μκ° μ
λ ₯ν μ£Όμ λ₯Ό λΆμνμ¬,
|
1740 |
+
**"κ°μ₯ μ°μν 5κ°μ§ λμμΈ/λ°λͺ
μμ΄λμ΄"**λ₯Ό λμΆνμμ€.
|
1741 |
+
κ° μμ΄λμ΄λ λ€μ μꡬλ₯Ό μΆ©μ‘±ν΄μΌ ν©λλ€:
|
1742 |
+
1) **μμ£Ό μμΈνκ²** μ€λͺ
νμ¬, λ
μκ° λ¨Έλ¦Ώμμ μ΄λ―Έμ§λ₯Ό 그릴 μ μμ μ λλ‘ κ΅¬μ²΄μ μΌλ‘ μμ
|
1743 |
+
2) **μ΄λ―Έμ§ ν둬ννΈ**λ ν¨κ» μ μνμ¬, μλ μ΄λ―Έμ§ μμ±μ΄ λλλ‘ νλΌ
|
1744 |
+
- μ: `### μ΄λ―Έμ§ ν둬ννΈ\\nν μ€ μλ¬Έ 문ꡬ`
|
1745 |
+
3) **Kaggle λ°μ΄ν°μ
**, **μΉ κ²μ**μ νμ©ν ν΅μ°°(λλ μ°Έμ‘°)μ΄ μμΌλ©΄ λ°λμ κ²°κ³Όμ μΈκΈ
|
1746 |
+
4) μ΅μ’
μΆλ ₯μ λ§μ§λ§μ **"μΆμ²"** μΉμ
μ λ§λ€κ³ ,
|
1747 |
+
- μΉ κ²μ(Brave)μμ μ°Έμ‘°ν URL 3~5κ°
|
1748 |
+
- Kaggle λ°μ΄ν°μ
μ΄λ¦/URL(μλ€λ©΄)
|
1749 |
+
- κ·Έ λ°μ μ°Έκ³ μλ£
|
1750 |
+
5) **λΆκ° μμ΄λμ΄** 5κ°μ§μ ν¬ν¨λμ§ μμ λ€μ μμ 10κ°λ₯Ό μμΈνκ² μμ±νμ¬ κΈ΄ νμ€λ‘ κ° λΌμΈλ³λ‘ μ€λͺ
/μΆλ ₯
|
1751 |
+
- μ: `#### λΆκ° μμ΄λμ΄ X:\\nν μ€ νκΈ λ¬Έκ΅¬`
|
1752 |
+
|
|
|
1753 |
{framework_instruction}
|
1754 |
|
1755 |
+
## μμ΄λμ΄ νκ° κΈ°μ€
|
1756 |
+
μοΏ½οΏ½οΏ½λμ΄ μ μ μ λ€μ κΈ°μ€μΌλ‘ νκ°νκ³ μ μννμ¬ μμλ₯Ό λ§€κΈ°μμμ€:
|
1757 |
+
1. **νμ μ±** (30%): κΈ°μ‘΄ μ루μ
κ³Όμ μ°¨λ³μ±, λ
μ°½μ±, κΈ°μ μ μ§λ³΄μ±
|
1758 |
+
2. **μ€ν κ°λ₯μ±** (25%): κΈ°μ μ , κ²½μ μ μ€ν κ°λ₯μ±, ꡬν λμ΄λ
|
1759 |
+
3. **μμ₯ μ μ¬λ ₯** (20%): νκ² μμ₯ κ·λͺ¨, μ±μ₯ κ°λ₯μ±, μμ΅μ±, ROI
|
1760 |
+
4. **μ¬νμ μν₯λ ₯** (15%): μ¬ν, νκ²½μ λ¬Έμ ν΄κ²° κΈ°μ¬λ, μΆμ μ§ ν₯μ μ λ
|
1761 |
+
5. **νμ₯μ±** (10%): λ€μν μν©/μμ₯μΌλ‘ νμ₯ κ°λ₯μ±, μ΅ν© κ°λ₯μ±
|
1762 |
+
|
1763 |
+
μΆλ ₯μ λ°λμ **νκ΅μ΄**λ‘ νλ©°, μλ ꡬ쑰λ₯Ό μ€μνμμμ€:
|
1764 |
+
1. **μ£Όμ μμ½** (μ¬μ©μ μ§λ¬Έ μμ½ λ° λΆμ μ κ·Ό λ°©μ - 300μ μ΄λ΄)
|
1765 |
+
2. **Top 5 μμ΄λμ΄ κ°μ** (5κ° μμ΄λμ΄ μμ½ λ° μ μ μ΄μ κ°λ΅ν - 400λ¨μ΄ μ΄λ΄)
|
1766 |
+
3. **Top 5 μμ΄λμ΄ μμΈ**
|
1767 |
+
- κ° μμ΄λμ΄λ λ€μ 체κ³μ μΈ κ΅¬μ‘°λ‘ μ κ°νμμμ€:
|
1768 |
+
- ### μμ΄λμ΄ X: [μμ΄λμ΄λͺ
] (μ’
ν© μ μ: x.x/10)
|
1769 |
+
- #### ν΅μ¬ κ°λ
|
1770 |
+
* μμ΄λμ΄μ ν΅μ¬ μ리μ μλ λ©μ»€λμ¦μ 400μ μ΄μ μμΈν μ€λͺ
|
1771 |
+
* ν΄κ²°νκ³ μ νλ ꡬ체μ μΈ λ¬Έμ μ κ·Έ μ¬νμ /κ²½μ μ μ€μμ±
|
1772 |
+
* κΈ°μ‘΄ μ루μ
λλΉ νμ μ μΈ μ°¨λ³μ 3κ°μ§ μ΄μ λͺ
νν μ μ
|
1773 |
+
* ν΅μ¬ κ°μΉ μ μ(Value Proposition) λͺ
νν μ μ
|
1774 |
+
- #### μμΈ μ€κ³ λ° κΈ°μ μ ꡬν
|
1775 |
+
* ꡬ체μ μΈ κ΅¬μ±μμ, λμμΈ νΉμ±, μ μ λ°©λ² λ± κΈ°μ μ μΈλΆμ¬ν μ€λͺ
|
1776 |
+
* μΉμ, μ¬λ£, μλ μ리 λ± μ€ν κ°λ₯ν μμΈ μ 보 μ 곡
|
1777 |
+
* ν΅μ¬ κΈ°μ μ λμ κ³Όμ 3κ°μ§ μ΄μκ³Ό κ°κ°μ λν ν΄κ²° λ°©μ
|
1778 |
+
* νΉν κ°λ₯μ±μ΄ μλ κ³ μ κΈ°μ μμ μ€λͺ
|
1779 |
+
* νμν ν΅μ¬ κΈ°μ λ° λ¦¬μμ€ λͺ©λ‘
|
1780 |
+
- #### μ¬μ© μλλ¦¬μ€ λ° μ¬μ©μ κ²½ν
|
1781 |
+
* μ΅μ 3κ°μ§ μ΄μμ μ€μ μ¬μ© μν© μλ리μ€λ₯Ό μ€ν 리ν
λ§ λ°©μμΌλ‘ μ€λͺ
|
1782 |
+
* μ£Όμ μ¬μ©μ νλ₯΄μλ 2κ° μ΄μ ꡬ체μ μΌλ‘ μ μ
|
1783 |
+
* μ¬μ©μ μ¬μ (User Journey)μ λ¨κ³λ³λ‘ μκ°μ μΌλ‘ λ¬μ¬
|
1784 |
+
* μ¬μ©μ κ²½νμ ν΅μ¬ κ°μΉμ κ°μ±μ μ°κ²°μ μ€λͺ
|
1785 |
+
* μ μ¬μ μ¬μ©μ νΌλλ°± μμΈ‘ λ° λμ λ°©μ
|
1786 |
+
- #### μμ₯ λΆμ λ° λΉμ¦λμ€ λͺ¨λΈ
|
1787 |
+
* νκ² μμ₯ κ·λͺ¨(TAM, SAM, SOM)μ μ±μ₯λ₯ μΆμ
|
1788 |
+
* μ£Όμ κ³ κ° μΈκ·Έλ¨ΌνΈ λΆμ λ° κ΅¬μ²΄μ μΈ λμ¦ μ°κ²°
|
1789 |
+
* κ²½μ μ ν/μλΉμ€ 5κ° μ΄μκ³Όμ μμΈ λΉκ΅ν λ° κ²½μ μ°μμ
|
1790 |
+
* μμ΅ λͺ¨λΈ λ° μμ΅ νλ¦ μμΈ μ€λͺ
|
1791 |
+
* μμ₯ μ§μ
μ λ΅ λ° μ΄κΈ° λ§μΌν
μ κ·Όλ²
|
1792 |
+
* νμ₯ κ°λ₯ν λΉμ¦λμ€ λͺ¨λΈ μΊλ²μ€ μμ λΆμ
|
1793 |
+
- #### ꡬν λ‘λλ§΅ λ° μμ κ³ν
|
1794 |
+
* μ€νμ μν λ¨κ³λ³ κ³ν(κ°λ
μ¦λͺ
, νλ‘ν νμ
, ν
μ€νΈ, μμ° λ±)
|
1795 |
+
* 6κ°μ, 1λ
, 3λ
λ¨μμ ꡬ체μ μΈ κ°λ° μΌμ λ° μ£Όμ λ§μΌμ€ν€
|
1796 |
+
* νμν ν΅μ¬ μΈμ¬/ν κ΅¬μ± λ° μν
|
1797 |
+
* μ΄κΈ° ν¬μ μμμ‘ λ° μκΈμ‘°λ¬ μ λ΅
|
1798 |
+
* μ£Όμ ννΈλμ λ° μΈλΆ νλ ₯ νμμ¬ν
|
1799 |
+
* νμ§ κ΄λ¦¬ λ° μ±κ³Ό μΈ‘μ μ§ν
|
1800 |
+
- #### SWOT λΆμ
|
1801 |
+
* κ°μ (Strengths): μ΄ μμ΄λμ΄λ§μ λ
νΉν κ°μ 5κ°μ§ μ΄μκ³Ό κ·Έ μ΄μ
|
1802 |
+
* μ½μ (Weaknesses): μ μ¬μ μ½μ 3κ°μ§ μ΄μ λ° μ΄λ₯Ό 극볡νκΈ° μν ꡬ체μ μΈ λ°©μ
|
1803 |
+
* κΈ°ν(Opportunities): μΈλΆ νκ²½(κΈ°μ , μμ₯, μ μ±
λ±)μμ λ°μνλ κΈ°ν μμ 4κ°μ§ μ΄μ
|
1804 |
+
* μν(Threats): μ±κ³΅μ λ°©ν΄ν μ μλ μΈλΆ μμΈ 3κ°μ§ μ΄μκ³Ό κ°κ°μ λν ꡬ체μ λμμ±
|
1805 |
+
κ° μμΈν μμ±
|
1806 |
+
- κ° μμ΄λμ΄λ μ΄ κ΅¬μ‘°λ‘ 10κ° μμ΄λμ΄ λͺ¨λ λμΌνκ² μμ±νλΌ:
|
1807 |
+
|
1808 |
+
4. **λΆκ°μ ν΅μ°°** (μ νλ νλ μμν¬ λΆμ κ²°κ³Ό)
|
1809 |
+
5. **λΆκ° μμ΄λμ΄** (TOP 5μ ν΄λΉνμ§ μλ 10κ°μ§ μμ΄λμ΄, κ°κ° ν μ€λ‘ κ°κ²°νκ² μ€λͺ
νλ ν΄λΉ μμ΄λμ΄μ ν΅μ¬ κ°μΉμ νμ μ μ ν¬ν¨)
|
1810 |
+
- μ: `#### λΆκ° μμ΄λμ΄ X:\\n ν μ€λ‘ μμΈν νκΈ λ¬Έκ΅¬`
|
1811 |
+
6. **μΆμ²** (μΉκ²μ λ§ν¬, Kaggle λ°μ΄ν°μ
λ±)
|
1812 |
{cat_clause}
|
1813 |
+
μ무리 κΈΈμ΄λ μ΄ μꡬμ¬νμ μ€μνκ³ , **μ€μ§ μ΅μ’
μμ±λ λ΅λ³**λ§ μΆλ ₯νμμμ€.
|
1814 |
+
(λ΄λΆ μ¬κ³ κ³Όμ μ κ°μΆ₯λλ€.)
|
|
|
|
|
1815 |
"""
|
1816 |
return base_prompt.strip()
|
1817 |
|
1818 |
+
# ββββββββββββββββββββββββββββββββ λλ¨Έμ§ μ½λ (μΉκ²μ, kaggle, μ΄λ―Έμ§ μμ± λ±) ββββββββββββββββββββββββββ
|
1819 |
+
|
1820 |
@st.cache_data(ttl=3600)
|
1821 |
def brave_search(query: str, count: int = 20):
|
1822 |
if not BRAVE_KEY:
|
|
|
1972 |
|
1973 |
# μμ μ£Όμ
|
1974 |
example_topics = {
|
1975 |
+
"example1": "'κ³ μμ΄ μ₯λκ°' λμμΈ",
|
1976 |
+
"example2": "μ¬λ° λμ κ°λ₯ν λλ‘ λμμΈ",
|
1977 |
"example3": "μ¬μ©μ μΈν°νμ΄μ€(UI/UX) νμ μ μν μ¨μ΄λ¬λΈ κΈ°κΈ° μμ΄λμ΄"
|
1978 |
}
|
1979 |
sb.subheader("Example Topics")
|
1980 |
c1, c2, c3 = sb.columns(3)
|
1981 |
+
if c1.button("κ³ μμ΄ μ₯λκ°", key="ex1"):
|
1982 |
process_example(example_topics["example1"])
|
1983 |
+
if c2.button("μ¬λ° λμ λλ‘ ", key="ex2"):
|
1984 |
process_example(example_topics["example2"])
|
1985 |
if c3.button("UI/UX νμ ", key="ex3"):
|
1986 |
process_example(example_topics["example3"])
|
|
|
2076 |
def process_input(prompt: str, uploaded_files):
|
2077 |
"""
|
2078 |
λ©μΈ μ±ν
μ
λ ₯μ λ°μ λμμΈ/λ°λͺ
μμ΄λμ΄λ₯Ό μμ±νλ€.
|
2079 |
+
μ€νΈλ¦¬λ° μ€ν¨(RemoteProtocolError λ±) μ backoff μ¬μλ ν
|
2080 |
+
μ΅μ’
μ μΌλ‘ non-stream νΈμΆλ‘ ν΄λ°±.
|
2081 |
"""
|
2082 |
+
# βββ λν κΈ°λ‘ μ€λ³΅ λ°©μ§ ββββββββββββββββββββββββββββββ
|
2083 |
if not any(m["role"] == "user" and m["content"] == prompt for m in st.session_state.messages):
|
2084 |
st.session_state.messages.append({"role": "user", "content": prompt})
|
2085 |
with st.chat_message("user"):
|
|
|
2091 |
and st.session_state.messages[i + 1]["role"] == "assistant"):
|
2092 |
return
|
2093 |
|
2094 |
+
# βββ κ²°κ³Ό μμ± βββββββββββββββββββββββββββββββββββββββ
|
2095 |
with st.chat_message("assistant"):
|
2096 |
status = st.status("Preparing to generate invention ideasβ¦")
|
2097 |
stream_placeholder = st.empty()
|
|
|
2101 |
client = get_openai_client()
|
2102 |
status.update(label="Initializing modelβ¦")
|
2103 |
|
2104 |
+
selected_cat = st.session_state.get("category_focus", None)
|
2105 |
selected_frameworks = st.session_state.get("selected_frameworks", [])
|
|
|
|
|
2106 |
sys_prompt = get_idea_system_prompt(
|
2107 |
selected_category=selected_cat,
|
2108 |
selected_frameworks=selected_frameworks
|
|
|
2117 |
use_kaggle = st.session_state.kaggle_enabled
|
2118 |
has_uploaded = bool(uploaded_files)
|
2119 |
|
2120 |
+
search_content = kaggle_content = file_content = mil_content = None
|
|
|
|
|
2121 |
|
2122 |
+
# β μΉ κ²μ
|
2123 |
if use_web_search:
|
2124 |
status.update(label="Searching the webβ¦")
|
2125 |
with st.spinner("Searchingβ¦"):
|
|
|
2154 |
with st.spinner("Processing filesβ¦"):
|
2155 |
file_content = process_uploaded_files(uploaded_files)
|
2156 |
|
2157 |
+
# β£ κ΅°μ¬ μ μ λ°μ΄ν°
|
|
|
2158 |
if is_military_query(prompt):
|
2159 |
status.update(label="Searching military tactics datasetβ¦")
|
2160 |
with st.spinner("Loading military insightsβ¦"):
|
|
|
2169 |
f"**Defense Reasoning:** {row['defense_reasoning']}\n\n---\n"
|
2170 |
)
|
2171 |
|
2172 |
+
# βββ μ μ μ½ν
μΈ κ΅¬μ± ββββββββββββββββββββββββββ
|
2173 |
user_content = prompt
|
2174 |
+
for extra in (search_content, kaggle_content, file_content, mil_content):
|
2175 |
+
if extra:
|
2176 |
+
user_content += "\n\n" + extra
|
2177 |
+
|
2178 |
+
# βββ λ΄λΆ λΆμ βββββββββββββββββββββββββββββββ
|
|
|
|
|
|
|
|
|
|
|
2179 |
status.update(label="λΆμ μ€β¦")
|
2180 |
decision_purpose = identify_decision_purpose(prompt)
|
2181 |
relevance_scores = compute_relevance_scores(prompt, PHYS_CATEGORIES)
|
|
|
2210 |
for c, s in decision_purpose['constraints']:
|
2211 |
purpose_info += f"- **{c}** (κ΄λ ¨μ±: {s})\n"
|
2212 |
|
2213 |
+
# βββ νλ μμν¬ λΆμ (μ΅μ
) ββββββββββββββββββββ
|
2214 |
framework_contents = []
|
2215 |
for fw in selected_frameworks:
|
2216 |
if fw == "swot":
|
2217 |
+
framework_contents.append(
|
2218 |
+
format_business_framework_analysis("swot", analyze_with_swot(prompt))
|
2219 |
+
)
|
2220 |
elif fw == "porter":
|
2221 |
+
framework_contents.append(
|
2222 |
+
format_business_framework_analysis("porter", analyze_with_porter(prompt))
|
2223 |
+
)
|
2224 |
elif fw == "bcg":
|
2225 |
+
framework_contents.append(
|
2226 |
+
format_business_framework_analysis("bcg", analyze_with_bcg(prompt))
|
2227 |
+
)
|
|
|
|
|
|
|
2228 |
|
2229 |
if framework_contents:
|
2230 |
user_content += "\n\n## (Optional) κΈ°ν νλ μμν¬ λΆμ\n\n" + "\n\n".join(framework_contents)
|
|
|
2232 |
user_content += f"\n\n## μΉ΄ν
κ³ λ¦¬ λ§€νΈλ¦μ€ λΆμ{purpose_info}\n{combos_table}"
|
2233 |
|
2234 |
status.update(label="Generating final design/invention ideasβ¦")
|
2235 |
+
|
2236 |
api_messages = [
|
2237 |
{"role": "system", "content": sys_prompt},
|
2238 |
{"role": "system", "name": "category_db", "content": category_context(selected_cat)},
|
2239 |
{"role": "user", "content": user_content},
|
2240 |
]
|
2241 |
+
|
2242 |
+
# βββ OpenAI Chat νΈμΆ (backoff μ¬μλ) βββββββββββββββββ
|
2243 |
+
@backoff.on_exception(
|
2244 |
+
(RemoteProtocolError, APITimeoutError, APIError), max_tries=3
|
|
|
|
|
|
|
2245 |
)
|
2246 |
+
def safe_stream():
|
2247 |
+
return client.chat.completions.create(
|
2248 |
+
model="gpt-4.1-mini",
|
2249 |
+
messages=api_messages,
|
2250 |
+
temperature=1,
|
2251 |
+
max_tokens=MAX_TOKENS,
|
2252 |
+
top_p=1,
|
2253 |
+
stream=True
|
2254 |
+
)
|
2255 |
|
|
|
|
|
|
|
|
|
2256 |
|
2257 |
+
try:
|
2258 |
+
stream = safe_stream()
|
2259 |
+
for chunk in stream:
|
2260 |
+
if chunk.choices and chunk.choices[0].delta.content:
|
2261 |
+
full_response += chunk.choices[0].delta.content
|
2262 |
+
stream_placeholder.markdown(full_response + "β")
|
2263 |
+
except (RemoteProtocolError, APITimeoutError, APIError) as stream_err:
|
2264 |
+
logging.warning(f"μ€νΈλ¦¬λ° μ€ν¨, non-stream ν΄λ°±: {stream_err}")
|
2265 |
+
resp = client.chat.completions.create(
|
2266 |
+
model="gpt-4.1-mini",
|
2267 |
+
messages=api_messages,
|
2268 |
+
temperature=1,
|
2269 |
+
max_tokens=MAX_TOKENS,
|
2270 |
+
top_p=1,
|
2271 |
+
stream=False
|
2272 |
+
)
|
2273 |
+
full_response = resp.choices[0].message.content
|
2274 |
+
stream_placeholder.markdown(full_response)
|
2275 |
+
|
2276 |
status.update(label="Invention ideas created!", state="complete")
|
2277 |
|
2278 |
+
# βββ μ΄λ―Έμ§ μμ± ββββββββββββββββββββββββββββββββ
|
2279 |
img_data = img_caption = None
|
2280 |
if st.session_state.generate_image and full_response:
|
2281 |
match = re.search(r"###\s*μ΄λ―Έμ§\s*ν둬ννΈ\s*\n+([^\n]+)", full_response, re.I)
|
|
|
2288 |
if img_data:
|
2289 |
st.image(img_data, caption=f"Visualized Concept β {img_caption}")
|
2290 |
|
2291 |
+
# βββ μΈμ
λ©μμ§ μ μ₯ βββββββββββββββββββββββββββββ
|
2292 |
answer_msg = {"role": "assistant", "content": full_response}
|
2293 |
if img_data:
|
2294 |
answer_msg["image"] = img_data
|
|
|
2296 |
st.session_state["_skip_dup_idx"] = len(st.session_state.messages)
|
2297 |
st.session_state.messages.append(answer_msg)
|
2298 |
|
2299 |
+
# βββ λ€μ΄λ‘λ μ΅μ
ββββββββββββββββββββββββββββββ
|
2300 |
st.subheader("Download This Output")
|
2301 |
col_md, col_html = st.columns(2)
|
2302 |
col_md.download_button(
|
|
|
2324 |
{"role": "assistant", "content": f"β οΈ μ€λ₯: {e}"}
|
2325 |
)
|
2326 |
|
2327 |
+
|
2328 |
def main():
|
2329 |
idea_generator_app()
|
2330 |
|