ginipick commited on
Commit
29204f7
Β·
verified Β·
1 Parent(s): 4611d6c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +110 -97
app.py CHANGED
@@ -19,7 +19,7 @@ BRAVE_ENDPOINT = "https://api.search.brave.com/res/v1/web/search"
19
  IMAGE_API_URL = "http://211.233.58.201:7896" # 이미지 μƒμ„±μš© API
20
  MAX_TOKENS = 7999
21
 
22
- # ──────────────────────────────── Physical Transformation Categories (KR & EN) ────────────────
23
  physical_transformation_categories = {
24
  "μ„Όμ„œ κΈ°λŠ₯": [
25
  "μ‹œκ° μ„Όμ„œ/감지", "청각 μ„Όμ„œ/감지", "촉각 μ„Όμ„œ/감지", "미각 μ„Όμ„œ/감지", "후각 μ„Όμ„œ/감지",
@@ -294,6 +294,7 @@ physical_transformation_categories_en = {
294
  "Data-driven decision making / AI adoption",
295
  "Convergence of new technologies / Innovative investments"
296
  ]
 
297
  }
298
 
299
  # ──────────────────────────────── Logging ────────────────────────────────
@@ -301,6 +302,7 @@ logging.basicConfig(level=logging.INFO,
301
  format="%(asctime)s - %(levelname)s - %(message)s")
302
 
303
  # ──────────────────────────────── OpenAI Client ──────────────────────────
 
304
  @st.cache_resource
305
  def get_openai_client():
306
  """Create an OpenAI client with timeout and retry settings."""
@@ -324,6 +326,8 @@ def get_idea_system_prompt(selected_category: str | None = None) -> str:
324
  f'이 μΉ΄ν…Œκ³ λ¦¬μ˜ ν•­λͺ©λ“€μ„ 2단계와 3단계 λͺ¨λ‘μ—μ„œ μš°μ„ μ μœΌλ‘œ κ³ λ €ν•˜μ‹­μ‹œμ˜€.\n'
325
  ) if selected_category else ""
326
 
 
 
327
  prompt = f"""
328
  λ°˜λ“œμ‹œ ν•œκΈ€(ν•œκ΅­μ–΄)둜 λ‹΅λ³€ν•˜λΌ. 당신은 ν˜μ‹  μ»¨μ„€ν„΄νŠΈλ‘œμ„œ CCM(크둜슀 μΉ΄ν…Œκ³ λ¦¬ 맀트릭슀) 방법둠을 ν™œμš©ν•˜μ—¬ 창의적 아이디어λ₯Ό λ„μΆœν•©λ‹ˆλ‹€.
329
 
@@ -635,13 +639,49 @@ def keywords(text: str, top=5):
635
  cleaned = re.sub(r"[^κ°€-힣a-zA-Z0-9\s]", "", text)
636
  return " ".join(cleaned.split()[:top])
637
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
638
  # ──────────────────────────────── Streamlit UI ────────────────────────────
639
  def idea_generator_app():
640
  st.title("Creative Idea Generator")
641
 
642
  # Set default session state
643
  if "ai_model" not in st.session_state:
644
- st.session_state.ai_model = "gpt-4.1-mini"
645
  if "messages" not in st.session_state:
646
  st.session_state.messages = []
647
  if "auto_save" not in st.session_state:
@@ -664,7 +704,8 @@ def idea_generator_app():
664
  if web_search_enabled:
665
  sb.info("βœ… Web search results will be integrated.")
666
 
667
- # μ˜ˆμ‹œ μ£Όμ œλ“€
 
668
  example_topics = {
669
  "example1": "λ„μ‹œ λ¬Ό λΆ€μ‘± 문제 해결을 μœ„ν•œ ν˜μ‹ μ  λ°©μ•ˆ",
670
  "example2": "노인 λŒλ΄„ μ„œλΉ„μŠ€μ˜ λ””μ§€ν„Έ μ „ν™˜",
@@ -680,6 +721,7 @@ def idea_generator_app():
680
  index=0 # κΈ°λ³Έκ°’ "(None)"
681
  )
682
 
 
683
  sb.subheader("Example Prompts")
684
  c1, c2, c3 = sb.columns(3)
685
  if c1.button("λ„μ‹œ λ¬Ό λΆ€μ‘± 문제", key="ex1"):
@@ -797,53 +839,19 @@ def idea_generator_app():
797
  sb.markdown("---")
798
  sb.markdown("Created by [Ginigen.com](https://ginigen.com) | [YouTube](https://www.youtube.com/@ginipickaistudio)")
799
 
 
800
  def process_example(topic):
801
  """Handle example prompts."""
802
  process_input(topic, [])
803
 
804
- # ───────────────���──────────────── 헬퍼: κ²°κ³Ό κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·μžλ™μ €μž₯ ──────────
805
- def write_output(md_text: str, prompt: str):
806
- """
807
- β€’ λŒ€ν™” 기둝에 λ§ˆν¬λ‹€μš΄ λ‹΅λ³€ μ €μž₯
808
- β€’ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ(λ§ˆν¬λ‹€μš΄Β·HTML) 제곡
809
- β€’ μžλ™ JSON λ°±μ—…
810
- """
811
- # β‘  μ±„νŒ… 기둝에 μΆ”κ°€
812
- st.session_state.messages.append({"role": "assistant", "content": md_text})
813
-
814
- # β‘‘ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
815
- st.subheader("Download This Output")
816
- col_md, col_html = st.columns(2)
817
- col_md.download_button(
818
- label="Markdown",
819
- data=md_text,
820
- file_name=f"{prompt[:30]}.md",
821
- mime="text/markdown"
822
- )
823
- col_html.download_button(
824
- label="HTML",
825
- data=md_to_html(md_text, prompt[:30]),
826
- file_name=f"{prompt[:30]}.html",
827
- mime="text/html"
828
- )
829
-
830
- # β‘’ μžλ™ JSON μ €μž₯
831
- if st.session_state.auto_save:
832
- fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
833
- try:
834
- with open(fn, "w", encoding="utf-8") as fp:
835
- json.dump(st.session_state.messages, fp, ensure_ascii=False, indent=2)
836
- except Exception as e:
837
- logging.error(f"Auto-save failed: {e}")
838
-
839
- # ──────────────────────────────── process_input ────────────────────────────
840
  def process_input(prompt: str, uploaded_files):
841
  """
842
  1) μ‚¬μš©μž μž…λ ₯을 GPT-4둜 보내 창의적 아이디어 λ³΄κ³ μ„œ 생성
843
  2) μ„ νƒμ μœΌλ‘œ 이미지 생성
844
- 3) κ²°κ³Όλ₯Ό ν•œ 번만 κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·λ°±μ—… (쀑볡 좜λ ₯ λ°©μ§€)
845
  """
846
- # μ‚¬μš©μž λ©”μ‹œμ§€ 기둝
847
  if not any(m["role"] == "user" and m["content"] == prompt
848
  for m in st.session_state.messages):
849
  st.session_state.messages.append({"role": "user", "content": prompt})
@@ -851,47 +859,55 @@ def process_input(prompt: str, uploaded_files):
851
  with st.chat_message("user"):
852
  st.markdown(prompt)
853
 
854
- # GPT ν˜ΈμΆœμ„ μœ„ν•œ μ΄ˆκΈ°ν™”
855
- use_web_search = st.session_state.web_search_enabled
856
- has_uploaded = bool(uploaded_files)
857
- full_response = ""
858
-
859
  with st.chat_message("assistant"):
860
- # ν”„λ‘¬ν”„νŠΈ 좜λ ₯ μ˜μ—­ (μ‹€μ‹œκ°„ 슀트리밍)
861
  message_placeholder = st.empty()
 
 
 
 
862
 
863
  try:
864
- with st.spinner("Preparing to generate ideas..."):
865
- client = get_openai_client()
 
 
866
 
867
- # μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ ꡬ성
868
  selected_cat = st.session_state.get("category_focus", "(None)")
869
  if selected_cat == "(None)":
870
  selected_cat = None
871
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
872
 
873
- # μΉ΄ν…Œκ³ λ¦¬ 정보 (JSON ν˜•μ‹)
874
  def category_context(sel):
875
  if sel:
876
  return json.dumps(
877
  {sel: physical_transformation_categories[sel]},
878
- ensure_ascii=False
879
- )
880
- return "ALL_CATEGORIES: " + ", ".join(physical_transformation_categories.keys())
881
 
882
- # μ›Ή 검색 및 파일 λ‚΄μš© κ²°ν•©
883
- user_content = prompt
884
  if use_web_search:
885
- with st.spinner("Searching the web..."):
 
886
  search_content = do_web_search(keywords(prompt, top=5))
887
- user_content += "\n\n" + search_content
 
888
  if has_uploaded:
889
- with st.spinner("Processing uploaded files..."):
 
890
  file_content = process_uploaded_files(uploaded_files)
891
- if file_content:
892
- user_content += "\n\n" + file_content
893
 
894
- # λŒ€ν™” λ©”μ‹œμ§€ λ°°μ—΄
 
 
 
 
 
 
895
  api_messages = [
896
  {"role": "system", "content": sys_prompt},
897
  {"role": "system", "name": "category_db",
@@ -899,44 +915,40 @@ def process_input(prompt: str, uploaded_files):
899
  {"role": "user", "content": user_content},
900
  ]
901
 
902
- # GPT-4 슀트리밍 호좜
903
- with st.spinner("Generating ideas..."):
904
- stream = client.chat.completions.create(
905
- model="gpt-4.1-mini",
906
- messages=api_messages,
907
- temperature=1,
908
- max_tokens=MAX_TOKENS,
909
- top_p=1,
910
- stream=True
911
- )
912
- for chunk in stream:
913
- if chunk.choices and chunk.choices[0].delta.get("content"):
914
- token_text = chunk.choices[0].delta["content"]
915
- full_response += token_text
916
- message_placeholder.markdown(full_response + "β–Œ")
917
-
918
- # μ΅œμ’… 좜λ ₯
919
  message_placeholder.markdown(full_response)
 
920
 
921
- # 이미지 μžλ™ 생성
922
  if st.session_state.generate_image and full_response:
923
- # νŒ¨ν„΄1: "### 이미지 ν”„λ‘¬ν”„νŠΈ" ν˜•νƒœ 탐색
924
- ccm_match = re.search(r"###\s*이미지\s*ν”„λ‘¬ν”„νŠΈ\s*\n+([^\n]+)",
925
- full_response, flags=re.IGNORECASE)
926
- # νŒ¨ν„΄2: μ˜ˆμ „ ν˜•μ‹ "Image Prompt" λ“±
927
  legacy_match = None
928
  if not ccm_match:
929
  legacy_match = re.search(
930
  r"\|\s*(?:\*\*)?Image\s+Prompt(?:\*\*)?\s*\|\s*([^|\n]+)",
931
- full_response, flags=re.IGNORECASE
932
- ) or re.search(
933
- r"(?i)Image\s+Prompt\s*[:\-]\s*([^\n]+)",
934
- full_response
935
- )
936
  match = ccm_match or legacy_match
937
  if match:
938
- raw_prompt = re.sub(r"[\r\n`\"'\\]", " ", match.group(1)).strip()
939
- with st.spinner("Generating idea image..."):
 
940
  img, cap = generate_image(raw_prompt)
941
  if img:
942
  st.image(img, caption=f"아이디어 μ‹œκ°ν™” – {cap}")
@@ -947,18 +959,19 @@ def process_input(prompt: str, uploaded_files):
947
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
948
  })
949
 
950
- # κ²°κ³Ό κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·λ°±μ—…
951
  write_output(full_response, prompt)
952
 
953
  except Exception as e:
954
- logging.error("Error in process_input", exc_info=True)
955
- err_msg = f"⚠️ 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {e}"
956
- st.error(err_msg)
957
- st.session_state.messages.append({"role": "assistant", "content": err_msg})
 
958
 
959
  # ──────────────────────────────── main ────────────────────────────────────
960
  def main():
961
  idea_generator_app()
962
 
963
  if __name__ == "__main__":
964
- main()
 
19
  IMAGE_API_URL = "http://211.233.58.201:7896" # 이미지 μƒμ„±μš© API
20
  MAX_TOKENS = 7999
21
 
22
+ # ──────────────────────────────── Physical Transformation Categories (KR & EN) ─────────────────
23
  physical_transformation_categories = {
24
  "μ„Όμ„œ κΈ°λŠ₯": [
25
  "μ‹œκ° μ„Όμ„œ/감지", "청각 μ„Όμ„œ/감지", "촉각 μ„Όμ„œ/감지", "미각 μ„Όμ„œ/감지", "후각 μ„Όμ„œ/감지",
 
294
  "Data-driven decision making / AI adoption",
295
  "Convergence of new technologies / Innovative investments"
296
  ]
297
+
298
  }
299
 
300
  # ──────────────────────────────── Logging ────────────────────────────────
 
302
  format="%(asctime)s - %(levelname)s - %(message)s")
303
 
304
  # ──────────────────────────────── OpenAI Client ──────────────────────────
305
+
306
  @st.cache_resource
307
  def get_openai_client():
308
  """Create an OpenAI client with timeout and retry settings."""
 
326
  f'이 μΉ΄ν…Œκ³ λ¦¬μ˜ ν•­λͺ©λ“€μ„ 2단계와 3단계 λͺ¨λ‘μ—μ„œ μš°μ„ μ μœΌλ‘œ κ³ λ €ν•˜μ‹­μ‹œμ˜€.\n'
327
  ) if selected_category else ""
328
 
329
+
330
+
331
  prompt = f"""
332
  λ°˜λ“œμ‹œ ν•œκΈ€(ν•œκ΅­μ–΄)둜 λ‹΅λ³€ν•˜λΌ. 당신은 ν˜μ‹  μ»¨μ„€ν„΄νŠΈλ‘œμ„œ CCM(크둜슀 μΉ΄ν…Œκ³ λ¦¬ 맀트릭슀) 방법둠을 ν™œμš©ν•˜μ—¬ 창의적 아이디어λ₯Ό λ„μΆœν•©λ‹ˆλ‹€.
333
 
 
639
  cleaned = re.sub(r"[^κ°€-힣a-zA-Z0-9\s]", "", text)
640
  return " ".join(cleaned.split()[:top])
641
 
642
+ # ──────────────────────────────── 헬퍼: κ²°κ³Ό κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·μžλ™μ €μž₯ ──────────
643
+ def write_output(md_text: str, prompt: str):
644
+ """
645
+ β€’ λŒ€ν™” 기둝에 λ§ˆν¬λ‹€μš΄ λ‹΅λ³€ μ €μž₯
646
+ β€’ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ(λ§ˆν¬λ‹€μš΄Β·HTML) 제곡
647
+ β€’ μžλ™ JSON λ°±μ—…
648
+ """
649
+ # β‘  μ±„νŒ… 기둝에 μΆ”κ°€
650
+ st.session_state.messages.append(
651
+ {"role": "assistant", "content": md_text})
652
+
653
+ # β‘‘ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
654
+ st.subheader("Download This Output")
655
+ col_md, col_html = st.columns(2)
656
+ col_md.download_button(
657
+ label="Markdown",
658
+ data=md_text,
659
+ file_name=f"{prompt[:30]}.md",
660
+ mime="text/markdown"
661
+ )
662
+ col_html.download_button(
663
+ label="HTML",
664
+ data=md_to_html(md_text, prompt[:30]),
665
+ file_name=f"{prompt[:30]}.html",
666
+ mime="text/html"
667
+ )
668
+
669
+ # β‘’ μžλ™ JSON μ €μž₯
670
+ if st.session_state.auto_save:
671
+ fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
672
+ with open(fn, "w", encoding="utf-8") as fp:
673
+ json.dump(
674
+ st.session_state.messages, fp,
675
+ ensure_ascii=False, indent=2
676
+ )
677
+
678
  # ──────────────────────────────── Streamlit UI ────────────────────────────
679
  def idea_generator_app():
680
  st.title("Creative Idea Generator")
681
 
682
  # Set default session state
683
  if "ai_model" not in st.session_state:
684
+ st.session_state.ai_model = "gpt-4.1-mini"
685
  if "messages" not in st.session_state:
686
  st.session_state.messages = []
687
  if "auto_save" not in st.session_state:
 
704
  if web_search_enabled:
705
  sb.info("βœ… Web search results will be integrated.")
706
 
707
+ # μ˜ˆμ‹œ μ£Όμ œλ“€ (μ›λž˜ μ˜ˆμ‹œ λΈ”λ‘œκ·Έ ν† ν”½ -> μ΄μ œλŠ” μ˜ˆμ‹œ 아이디어 주제둜 μ „ν™˜)
708
+
709
  example_topics = {
710
  "example1": "λ„μ‹œ λ¬Ό λΆ€μ‘± 문제 해결을 μœ„ν•œ ν˜μ‹ μ  λ°©μ•ˆ",
711
  "example2": "노인 λŒλ΄„ μ„œλΉ„μŠ€μ˜ λ””μ§€ν„Έ μ „ν™˜",
 
721
  index=0 # κΈ°λ³Έκ°’ "(None)"
722
  )
723
 
724
+
725
  sb.subheader("Example Prompts")
726
  c1, c2, c3 = sb.columns(3)
727
  if c1.button("λ„μ‹œ λ¬Ό λΆ€μ‘± 문제", key="ex1"):
 
839
  sb.markdown("---")
840
  sb.markdown("Created by [Ginigen.com](https://ginigen.com) | [YouTube](https://www.youtube.com/@ginipickaistudio)")
841
 
842
+
843
  def process_example(topic):
844
  """Handle example prompts."""
845
  process_input(topic, [])
846
 
847
+ # ──────────────────────────────── process_input (쀑볡 좜λ ₯ 제거) ────────────
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
848
  def process_input(prompt: str, uploaded_files):
849
  """
850
  1) μ‚¬μš©μž μž…λ ₯을 GPT-4둜 보내 창의적 아이디어 λ³΄κ³ μ„œ 생성
851
  2) μ„ νƒμ μœΌλ‘œ 이미지 생성
852
+ 3) κ²°κ³Όλ₯Ό ν•œ 번만 μ €μž₯Β·λ‹€μš΄λ‘œλ“œΒ·λ°±μ—…
853
  """
854
+ # ── 0. μ‚¬μš©μž λ©”μ‹œμ§€ 기둝 ──────────────────────────────────────────────
855
  if not any(m["role"] == "user" and m["content"] == prompt
856
  for m in st.session_state.messages):
857
  st.session_state.messages.append({"role": "user", "content": prompt})
 
859
  with st.chat_message("user"):
860
  st.markdown(prompt)
861
 
862
+ # ── 1. μ–΄μ‹œμŠ€ν„΄νŠΈ 응닡 μ˜μ—­ ──────────────────────────────────────────
 
 
 
 
863
  with st.chat_message("assistant"):
864
+ placeholder = st.empty()
865
  message_placeholder = st.empty()
866
+ full_response = ""
867
+
868
+ use_web_search = st.session_state.web_search_enabled
869
+ has_uploaded = bool(uploaded_files)
870
 
871
  try:
872
+ # 1-A. λͺ¨λΈΒ·μƒνƒœ μ΄ˆκΈ°ν™”
873
+ status = st.status("Preparing to generate ideas…")
874
+ status.update(label="Initializing model…")
875
+ client = get_openai_client()
876
 
877
+ # 1-B. μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ + μΉ΄ν…Œκ³ λ¦¬ DB
878
  selected_cat = st.session_state.get("category_focus", "(None)")
879
  if selected_cat == "(None)":
880
  selected_cat = None
881
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
882
 
 
883
  def category_context(sel):
884
  if sel:
885
  return json.dumps(
886
  {sel: physical_transformation_categories[sel]},
887
+ ensure_ascii=False)
888
+ return "ALL_CATEGORIES: " + ", ".join(
889
+ physical_transformation_categories.keys())
890
 
891
+ # 1-C. (선택) μ›Ή 검색 + 파일 λ‚΄μš©
892
+ search_content = None
893
  if use_web_search:
894
+ status.update(label="Searching the web…")
895
+ with st.spinner("Searching…"):
896
  search_content = do_web_search(keywords(prompt, top=5))
897
+
898
+ file_content = None
899
  if has_uploaded:
900
+ status.update(label="Reading uploaded files…")
901
+ with st.spinner("Processing files…"):
902
  file_content = process_uploaded_files(uploaded_files)
 
 
903
 
904
+ # 1-D. μ‚¬μš©μž λ©”μ‹œμ§€ κ²°ν•©
905
+ user_content = prompt
906
+ if search_content:
907
+ user_content += "\n\n" + search_content
908
+ if file_content:
909
+ user_content += "\n\n" + file_content
910
+
911
  api_messages = [
912
  {"role": "system", "content": sys_prompt},
913
  {"role": "system", "name": "category_db",
 
915
  {"role": "user", "content": user_content},
916
  ]
917
 
918
+ # ── 2. GPT-4 슀트리밍 호좜 ──────────────────────────────────
919
+ status.update(label="Generating ideas…")
920
+ stream = client.chat.completions.create(
921
+ model="gpt-4.1-mini",
922
+ messages=api_messages,
923
+ temperature=1,
924
+ max_tokens=MAX_TOKENS,
925
+ top_p=1,
926
+ stream=True
927
+ )
928
+ for chunk in stream:
929
+ if chunk.choices and chunk.choices[0].delta.content:
930
+ full_response += chunk.choices[0].delta.content
931
+ message_placeholder.markdown(full_response + "β–Œ")
 
 
 
932
  message_placeholder.markdown(full_response)
933
+ status.update(label="Ideas created!", state="complete")
934
 
935
+ # ── 3. 이미지 생성 (선택) ───────────────────────────────────
936
  if st.session_state.generate_image and full_response:
937
+ ccm_match = re.search(
938
+ r"###\s*이미지\s*ν”„λ‘¬ν”„νŠΈ\s*\n+([^\n]+)",
939
+ full_response, flags=re.IGNORECASE)
 
940
  legacy_match = None
941
  if not ccm_match:
942
  legacy_match = re.search(
943
  r"\|\s*(?:\*\*)?Image\s+Prompt(?:\*\*)?\s*\|\s*([^|\n]+)",
944
+ full_response, flags=re.IGNORECASE) or \
945
+ re.search(r"(?i)Image\s+Prompt\s*[:\-]\s*([^\n]+)",
946
+ full_response)
 
 
947
  match = ccm_match or legacy_match
948
  if match:
949
+ raw_prompt = re.sub(r"[\r\n\"'\\]", " ",
950
+ match.group(1)).strip()
951
+ with st.spinner("아이디어 이미지 생성 쀑…"):
952
  img, cap = generate_image(raw_prompt)
953
  if img:
954
  st.image(img, caption=f"아이디어 μ‹œκ°ν™” – {cap}")
 
959
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
960
  })
961
 
962
+ # ── 4. κ²°κ³Όλ₯Ό **ν•œ 번만** κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·λ°±μ—… ────────────────
963
  write_output(full_response, prompt)
964
 
965
  except Exception as e:
966
+ # μ˜ˆμ™Έ λ°œμƒ μ‹œ: 둜그 기둝 + μ‚¬μš©μž μ•Œλ¦Όλ§Œ μˆ˜ν–‰
967
+ logging.error("process_input error", exc_info=True)
968
+ placeholder.error(f"⚠️ μž‘μ—… 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {e}")
969
+ st.session_state.messages.append(
970
+ {"role": "assistant", "content": f"⚠️ 였λ₯˜: {e}"})
971
 
972
  # ──────────────────────────────── main ────────────────────────────────────
973
  def main():
974
  idea_generator_app()
975
 
976
  if __name__ == "__main__":
977
+ main()