ginipick commited on
Commit
c492b2c
Β·
verified Β·
1 Parent(s): ef79581

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +71 -55
app.py CHANGED
@@ -810,14 +810,51 @@ def process_example(topic):
810
 
811
  # ── μˆ˜μ •λœ process_input 전체 ────────────────────────────────────────────
812
 
813
- # ── μˆ˜μ •λœ process_input (쀑볡 좜λ ₯ 제거 + μΉ΄ν…Œκ³ λ¦¬ DB 전달 + 이미지 ν”„λ‘¬ν”„νŠΈ 톡합) ──
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
814
  def process_input(prompt: str, uploaded_files):
815
  """
816
- 1. μ‚¬μš©μž λ©”μ‹œμ§€ 기둝 및 UI 좜λ ₯
817
- 2. μ›Ήκ²€μƒ‰Β·νŒŒμΌλ‚΄μš©Β·μΉ΄ν…Œκ³ λ¦¬ DBλ₯Ό ν¬ν•¨ν•œ GPT 호좜
818
- 3. μ΅œμ’… 응닡 슀트리밍, 이미지 생성(선택), κ²°κ³Ό μ €μž₯Β·λ‹€μš΄λ‘œλ“œ(1회만)
819
  """
820
- # ── μ‚¬μš©μž λ©”μ‹œμ§€ μ €μž₯ ───────────────────────────────────────────────
821
  if not any(m["role"] == "user" and m["content"] == prompt
822
  for m in st.session_state.messages):
823
  st.session_state.messages.append({"role": "user", "content": prompt})
@@ -825,7 +862,7 @@ def process_input(prompt: str, uploaded_files):
825
  with st.chat_message("user"):
826
  st.markdown(prompt)
827
 
828
- # ── μ–΄μ‹œμŠ€ν„΄νŠΈ 응닡 μ˜μ—­ ────────────────────────────────────────────
829
  with st.chat_message("assistant"):
830
  placeholder = st.empty()
831
  message_placeholder = st.empty()
@@ -835,12 +872,12 @@ def process_input(prompt: str, uploaded_files):
835
  has_uploaded = bool(uploaded_files)
836
 
837
  try:
838
- # β‘  λͺ¨λΈ μ΄ˆκΈ°ν™”
839
  status = st.status("Preparing to generate ideas…")
840
  status.update(label="Initializing model…")
841
  client = get_openai_client()
842
 
843
- # β‘‘ μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ + μΉ΄ν…Œκ³ λ¦¬ DB
844
  selected_cat = st.session_state.get("category_focus", "(None)")
845
  if selected_cat == "(None)":
846
  selected_cat = None
@@ -848,11 +885,13 @@ def process_input(prompt: str, uploaded_files):
848
 
849
  def category_context(sel):
850
  if sel:
851
- return json.dumps({sel: physical_transformation_categories[sel]},
852
- ensure_ascii=False)
853
- return "ALL_CATEGORIES: " + ", ".join(physical_transformation_categories.keys())
 
 
854
 
855
- # β‘’ (선택) μ›Ή 검색 Β· 파일 처리
856
  search_content = None
857
  if use_web_search:
858
  status.update(label="Searching the web…")
@@ -865,7 +904,7 @@ def process_input(prompt: str, uploaded_files):
865
  with st.spinner("Processing files…"):
866
  file_content = process_uploaded_files(uploaded_files)
867
 
868
- # β‘£ μ‚¬μš©μž λ©”μ‹œμ§€ κ²°ν•©
869
  user_content = prompt
870
  if search_content:
871
  user_content += "\n\n" + search_content
@@ -873,13 +912,13 @@ def process_input(prompt: str, uploaded_files):
873
  user_content += "\n\n" + file_content
874
 
875
  api_messages = [
876
- {"role": "system", "content": sys_prompt},
877
- {"role": "system", "name": "category_db",
878
  "content": category_context(selected_cat)},
879
- {"role": "user", "content": user_content},
880
  ]
881
 
882
- # β‘€ GPT 슀트리밍 호좜
883
  status.update(label="Generating ideas…")
884
  stream = client.chat.completions.create(
885
  model="gpt-4.1-mini",
@@ -896,20 +935,22 @@ def process_input(prompt: str, uploaded_files):
896
  message_placeholder.markdown(full_response)
897
  status.update(label="Ideas created!", state="complete")
898
 
899
- # β‘₯ 이미지 생성 (CCM ν—€λ”© or λ ˆκ±°μ‹œ νŒ¨ν„΄ μžλ™ 탐지)
900
  if st.session_state.generate_image and full_response:
901
- ccm_match = re.search(r"###\s*이미지\s*ν”„λ‘¬ν”„νŠΈ\s*\n+([^\n]+)",
902
- full_response, flags=re.IGNORECASE)
 
903
  legacy_match = None
904
  if not ccm_match:
905
  legacy_match = re.search(
906
  r"\|\s*(?:\*\*)?Image\s+Prompt(?:\*\*)?\s*\|\s*([^|\n]+)",
907
- full_response, flags=re.IGNORECASE) \
908
- or re.search(r"(?i)Image\s+Prompt\s*[:\-]\s*([^\n]+)",
909
- full_response)
910
  match = ccm_match or legacy_match
911
  if match:
912
- raw_prompt = re.sub(r"[\r\n`\"'\\]", " ", match.group(1)).strip()
 
913
  with st.spinner("아이디어 이미지 생성 쀑…"):
914
  img, cap = generate_image(raw_prompt)
915
  if img:
@@ -921,18 +962,15 @@ def process_input(prompt: str, uploaded_files):
921
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
922
  })
923
 
924
- # ⑦ κ²°κ³Όλ₯Ό ν•œ 번만 μ €μž₯Β·λ‹€μš΄λ‘œλ“œΒ·μžλ™λ°±μ—…
925
  write_output(full_response, prompt)
926
 
927
- # ── μ˜ˆμ™Έ 처리 (쀑볡 좜λ ₯ λ°©μ§€) ──────────────────────────────────
928
  except Exception as e:
929
- err = str(e)
930
- placeholder.error(f"⚠️ Error: {err}")
931
- logging.error(err)
932
- st.session_state.messages.append({
933
- "role": "assistant",
934
- "content": f"⚠️ μž‘μ—… 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {err}"
935
- })
936
 
937
 
938
  # ---- μΉ΄ν…Œκ³ λ¦¬/ν•˜μœ„ν•­λͺ© μ»¨ν…μŠ€νŠΈ μΆ”κ°€ ---------------------------
@@ -996,28 +1034,6 @@ def process_input(prompt: str, uploaded_files):
996
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
997
  })
998
 
999
- # ── 헬퍼: κ²°κ³Όλ₯Ό ν•œ 번만 κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·μ €μž₯ ─────────────────────────────
1000
- def write_output(md_text: str, prompt: str):
1001
- # β‘  μ±„νŒ… 기둝 μ €μž₯
1002
- st.session_state.messages.append({"role": "assistant", "content": md_text})
1003
-
1004
- # β‘‘ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
1005
- st.subheader("Download This Output")
1006
- col_md, col_html = st.columns(2)
1007
- col_md.download_button("Markdown", md_text,
1008
- file_name=f"{prompt[:30]}.md", mime="text/markdown")
1009
- col_html.download_button("HTML", md_to_html(md_text, prompt[:30]),
1010
- file_name=f"{prompt[:30]}.html", mime="text/html")
1011
-
1012
- # β‘’ JSON μžλ™ μ €μž₯
1013
- if st.session_state.auto_save:
1014
- fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
1015
- with open(fn, "w", encoding="utf-8") as fp:
1016
- json.dump(st.session_state.messages, fp, ensure_ascii=False, indent=2)
1017
-
1018
-
1019
-
1020
-
1021
  except Exception as e:
1022
  err = str(e)
1023
  placeholder.error(f"Error: {err}")
 
810
 
811
  # ── μˆ˜μ •λœ process_input 전체 ────────────────────────────────────────────
812
 
813
+ # ──────────────────────────────── 헬퍼: κ²°κ³Ό κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·μžλ™μ €μž₯ ──────────
814
+ def write_output(md_text: str, prompt: str):
815
+ """
816
+ β€’ λŒ€ν™” 기둝에 λ§ˆν¬λ‹€μš΄ λ‹΅λ³€ μ €μž₯
817
+ β€’ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ(λ§ˆν¬λ‹€μš΄Β·HTML) 제곡
818
+ β€’ μžλ™ JSON λ°±μ—…
819
+ """
820
+ # β‘  μ±„νŒ… 기둝에 μΆ”κ°€
821
+ st.session_state.messages.append(
822
+ {"role": "assistant", "content": md_text})
823
+
824
+ # β‘‘ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
825
+ st.subheader("Download This Output")
826
+ col_md, col_html = st.columns(2)
827
+ col_md.download_button(
828
+ label="Markdown",
829
+ data=md_text,
830
+ file_name=f"{prompt[:30]}.md",
831
+ mime="text/markdown"
832
+ )
833
+ col_html.download_button(
834
+ label="HTML",
835
+ data=md_to_html(md_text, prompt[:30]),
836
+ file_name=f"{prompt[:30]}.html",
837
+ mime="text/html"
838
+ )
839
+
840
+ # β‘’ μžλ™ JSON μ €μž₯
841
+ if st.session_state.auto_save:
842
+ fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
843
+ with open(fn, "w", encoding="utf-8") as fp:
844
+ json.dump(
845
+ st.session_state.messages, fp,
846
+ ensure_ascii=False, indent=2
847
+ )
848
+
849
+
850
+ # ──────────────────────────────── process_input (쀑볡 좜λ ₯ 제거) ────────────
851
  def process_input(prompt: str, uploaded_files):
852
  """
853
+ 1) μ‚¬μš©μž μž…λ ₯을 GPT-4둜 보내 창의적 아이디어 λ³΄κ³ μ„œ 생성
854
+ 2) μ„ νƒμ μœΌλ‘œ 이미지 생성
855
+ 3) κ²°κ³Όλ₯Ό ν•œ 번만 μ €μž₯Β·λ‹€μš΄λ‘œλ“œΒ·λ°±μ—…
856
  """
857
+ # ── 0. μ‚¬μš©μž λ©”μ‹œμ§€ 기둝 ──────────────────────────────────────────────
858
  if not any(m["role"] == "user" and m["content"] == prompt
859
  for m in st.session_state.messages):
860
  st.session_state.messages.append({"role": "user", "content": prompt})
 
862
  with st.chat_message("user"):
863
  st.markdown(prompt)
864
 
865
+ # ── 1. μ–΄μ‹œμŠ€ν„΄νŠΈ 응닡 μ˜μ—­ ──────────────────────────────────────────
866
  with st.chat_message("assistant"):
867
  placeholder = st.empty()
868
  message_placeholder = st.empty()
 
872
  has_uploaded = bool(uploaded_files)
873
 
874
  try:
875
+ # 1-A. λͺ¨λΈΒ·μƒνƒœ μ΄ˆκΈ°ν™”
876
  status = st.status("Preparing to generate ideas…")
877
  status.update(label="Initializing model…")
878
  client = get_openai_client()
879
 
880
+ # 1-B. μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ + μΉ΄ν…Œκ³ λ¦¬ DB
881
  selected_cat = st.session_state.get("category_focus", "(None)")
882
  if selected_cat == "(None)":
883
  selected_cat = None
 
885
 
886
  def category_context(sel):
887
  if sel:
888
+ return json.dumps(
889
+ {sel: physical_transformation_categories[sel]},
890
+ ensure_ascii=False)
891
+ return "ALL_CATEGORIES: " + ", ".join(
892
+ physical_transformation_categories.keys())
893
 
894
+ # 1-C. (선택) μ›Ή 검색 + 파일 λ‚΄μš©
895
  search_content = None
896
  if use_web_search:
897
  status.update(label="Searching the web…")
 
904
  with st.spinner("Processing files…"):
905
  file_content = process_uploaded_files(uploaded_files)
906
 
907
+ # 1-D. μ‚¬μš©μž λ©”μ‹œμ§€ κ²°ν•©
908
  user_content = prompt
909
  if search_content:
910
  user_content += "\n\n" + search_content
 
912
  user_content += "\n\n" + file_content
913
 
914
  api_messages = [
915
+ {"role": "system", "content": sys_prompt},
916
+ {"role": "system", "name": "category_db",
917
  "content": category_context(selected_cat)},
918
+ {"role": "user", "content": user_content},
919
  ]
920
 
921
+ # ── 2. GPT-4 슀트리밍 호좜 ──────────────────────────────────
922
  status.update(label="Generating ideas…")
923
  stream = client.chat.completions.create(
924
  model="gpt-4.1-mini",
 
935
  message_placeholder.markdown(full_response)
936
  status.update(label="Ideas created!", state="complete")
937
 
938
+ # ── 3. 이미지 생성 (선택) ───────────────────────────────────
939
  if st.session_state.generate_image and full_response:
940
+ ccm_match = re.search(
941
+ r"###\s*이미지\s*ν”„λ‘¬ν”„νŠΈ\s*\n+([^\n]+)",
942
+ full_response, flags=re.IGNORECASE)
943
  legacy_match = None
944
  if not ccm_match:
945
  legacy_match = re.search(
946
  r"\|\s*(?:\*\*)?Image\s+Prompt(?:\*\*)?\s*\|\s*([^|\n]+)",
947
+ full_response, flags=re.IGNORECASE) or \
948
+ re.search(r"(?i)Image\s+Prompt\s*[:\-]\s*([^\n]+)",
949
+ full_response)
950
  match = ccm_match or legacy_match
951
  if match:
952
+ raw_prompt = re.sub(r"[\r\n`\"'\\]", " ",
953
+ match.group(1)).strip()
954
  with st.spinner("아이디어 이미지 생성 쀑…"):
955
  img, cap = generate_image(raw_prompt)
956
  if img:
 
962
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
963
  })
964
 
965
+ # ── 4. κ²°κ³Όλ₯Ό **ν•œ 번만** κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·λ°±μ—… ────────────────
966
  write_output(full_response, prompt)
967
 
 
968
  except Exception as e:
969
+ # μ˜ˆμ™Έ λ°œμƒ μ‹œ: 둜그 기둝 + μ‚¬μš©μž μ•Œλ¦Όλ§Œ μˆ˜ν–‰
970
+ logging.error("process_input error", exc_info=True)
971
+ placeholder.error(f"⚠️ μž‘μ—… 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {e}")
972
+ st.session_state.messages.append(
973
+ {"role": "assistant", "content": f"⚠️ 였λ₯˜: {e}"})
 
 
974
 
975
 
976
  # ---- μΉ΄ν…Œκ³ λ¦¬/ν•˜μœ„ν•­λͺ© μ»¨ν…μŠ€νŠΈ μΆ”κ°€ ---------------------------
 
1034
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
1035
  })
1036
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1037
  except Exception as e:
1038
  err = str(e)
1039
  placeholder.error(f"Error: {err}")