ginipick commited on
Commit
ef79581
Β·
verified Β·
1 Parent(s): cd508a6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +95 -20
app.py CHANGED
@@ -809,8 +809,15 @@ def process_example(topic):
809
  process_input(topic, [])
810
 
811
  # ── μˆ˜μ •λœ process_input 전체 ────────────────────────────────────────────
 
 
812
  def process_input(prompt: str, uploaded_files):
813
- # μ‚¬μš©μž λ©”μ‹œμ§€ μ €μž₯
 
 
 
 
 
814
  if not any(m["role"] == "user" and m["content"] == prompt
815
  for m in st.session_state.messages):
816
  st.session_state.messages.append({"role": "user", "content": prompt})
@@ -818,27 +825,34 @@ def process_input(prompt: str, uploaded_files):
818
  with st.chat_message("user"):
819
  st.markdown(prompt)
820
 
 
821
  with st.chat_message("assistant"):
822
- placeholder = st.empty()
823
- message_placeholder = st.empty()
824
- full_response = ""
825
 
826
  use_web_search = st.session_state.web_search_enabled
827
  has_uploaded = bool(uploaded_files)
828
 
829
  try:
 
830
  status = st.status("Preparing to generate ideas…")
831
  status.update(label="Initializing model…")
832
-
833
  client = get_openai_client()
834
 
835
- # β‘  μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ
836
  selected_cat = st.session_state.get("category_focus", "(None)")
837
  if selected_cat == "(None)":
838
  selected_cat = None
839
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
840
 
841
- # β‘‘ (선택) μ›Ή 검색 & 파일
 
 
 
 
 
 
842
  search_content = None
843
  if use_web_search:
844
  status.update(label="Searching the web…")
@@ -851,13 +865,76 @@ def process_input(prompt: str, uploaded_files):
851
  with st.spinner("Processing files…"):
852
  file_content = process_uploaded_files(uploaded_files)
853
 
854
- # β‘’ μœ μ € λ©”μ‹œμ§€ ꡬ성
855
  user_content = prompt
856
  if search_content:
857
  user_content += "\n\n" + search_content
858
  if file_content:
859
  user_content += "\n\n" + file_content
860
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
861
  # ---- μΉ΄ν…Œκ³ λ¦¬/ν•˜μœ„ν•­λͺ© μ»¨ν…μŠ€νŠΈ μΆ”κ°€ ---------------------------
862
  def category_context(sel):
863
  if sel: # νŠΉμ • μΉ΄ν…Œκ³ λ¦¬ 선택 μ‹œ
@@ -919,28 +996,26 @@ def process_input(prompt: str, uploaded_files):
919
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
920
  })
921
 
922
- # ── μ‹ κ·œ 헬퍼: κ²°κ³Ό λ‹€μš΄λ‘œλ“œΒ·μ €μž₯Β·μ±„νŒ… 기둝에 μΆ”κ°€ ──────────────────────────
923
  def write_output(md_text: str, prompt: str):
924
- """μ΅œμ’… λ§ˆν¬λ‹€μš΄μ„ ν•œ 번만 UI에 μ—°κ²°ν•˜κ³ , 파일 λ‹€μš΄Β·JSON μžλ™μ €μž₯κΉŒμ§€ μˆ˜ν–‰."""
925
- # β‘  λŒ€ν™” 기둝에 λ‹΅λ³€ μ €μž₯
926
  st.session_state.messages.append({"role": "assistant", "content": md_text})
927
 
928
  # β‘‘ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
929
  st.subheader("Download This Output")
930
  col_md, col_html = st.columns(2)
931
- col_md.download_button(
932
- "Markdown", md_text,
933
- file_name=f"{prompt[:30]}.md", mime="text/markdown")
934
- col_html.download_button(
935
- "HTML", md_to_html(md_text, prompt[:30]),
936
- file_name=f"{prompt[:30]}.html", mime="text/html")
937
-
938
  # β‘’ JSON μžλ™ μ €μž₯
939
  if st.session_state.auto_save:
940
  fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
941
  with open(fn, "w", encoding="utf-8") as fp:
942
- json.dump(st.session_state.messages, fp,
943
- ensure_ascii=False, indent=2)
 
944
 
945
 
946
  except Exception as e:
 
809
  process_input(topic, [])
810
 
811
  # ── μˆ˜μ •λœ process_input 전체 ────────────────────────────────────────────
812
+
813
+ # ── μˆ˜μ •λœ process_input (쀑볡 좜λ ₯ 제거 + μΉ΄ν…Œκ³ λ¦¬ DB 전달 + 이미지 ν”„λ‘¬ν”„νŠΈ 톡합) ──
814
  def process_input(prompt: str, uploaded_files):
815
+ """
816
+ 1. μ‚¬μš©μž λ©”μ‹œμ§€ 기둝 및 UI 좜λ ₯
817
+ 2. μ›Ήκ²€μƒ‰Β·νŒŒμΌλ‚΄μš©Β·μΉ΄ν…Œκ³ λ¦¬ DBλ₯Ό ν¬ν•¨ν•œ GPT 호좜
818
+ 3. μ΅œμ’… 응닡 슀트리밍, 이미지 생성(선택), κ²°κ³Ό μ €μž₯Β·λ‹€μš΄λ‘œλ“œ(1회만)
819
+ """
820
+ # ── μ‚¬μš©μž λ©”μ‹œμ§€ μ €μž₯ ───────────────────────────────────────────────
821
  if not any(m["role"] == "user" and m["content"] == prompt
822
  for m in st.session_state.messages):
823
  st.session_state.messages.append({"role": "user", "content": prompt})
 
825
  with st.chat_message("user"):
826
  st.markdown(prompt)
827
 
828
+ # ── μ–΄μ‹œμŠ€ν„΄νŠΈ 응닡 μ˜μ—­ ────────────────────────────────────────────
829
  with st.chat_message("assistant"):
830
+ placeholder = st.empty()
831
+ message_placeholder = st.empty()
832
+ full_response = ""
833
 
834
  use_web_search = st.session_state.web_search_enabled
835
  has_uploaded = bool(uploaded_files)
836
 
837
  try:
838
+ # β‘  λͺ¨λΈ μ΄ˆκΈ°ν™”
839
  status = st.status("Preparing to generate ideas…")
840
  status.update(label="Initializing model…")
 
841
  client = get_openai_client()
842
 
843
+ # β‘‘ μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ + μΉ΄ν…Œκ³ λ¦¬ DB
844
  selected_cat = st.session_state.get("category_focus", "(None)")
845
  if selected_cat == "(None)":
846
  selected_cat = None
847
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
848
 
849
+ def category_context(sel):
850
+ if sel:
851
+ return json.dumps({sel: physical_transformation_categories[sel]},
852
+ ensure_ascii=False)
853
+ return "ALL_CATEGORIES: " + ", ".join(physical_transformation_categories.keys())
854
+
855
+ # β‘’ (선택) μ›Ή 검색 Β· 파일 처리
856
  search_content = None
857
  if use_web_search:
858
  status.update(label="Searching the web…")
 
865
  with st.spinner("Processing files…"):
866
  file_content = process_uploaded_files(uploaded_files)
867
 
868
+ # β‘£ μ‚¬μš©μž λ©”μ‹œμ§€ κ²°ν•©
869
  user_content = prompt
870
  if search_content:
871
  user_content += "\n\n" + search_content
872
  if file_content:
873
  user_content += "\n\n" + file_content
874
 
875
+ api_messages = [
876
+ {"role": "system", "content": sys_prompt},
877
+ {"role": "system", "name": "category_db",
878
+ "content": category_context(selected_cat)},
879
+ {"role": "user", "content": user_content},
880
+ ]
881
+
882
+ # β‘€ GPT 슀트리밍 호좜
883
+ status.update(label="Generating ideas…")
884
+ stream = client.chat.completions.create(
885
+ model="gpt-4.1-mini",
886
+ messages=api_messages,
887
+ temperature=1,
888
+ max_tokens=MAX_TOKENS,
889
+ top_p=1,
890
+ stream=True
891
+ )
892
+ for chunk in stream:
893
+ if chunk.choices and chunk.choices[0].delta.content:
894
+ full_response += chunk.choices[0].delta.content
895
+ message_placeholder.markdown(full_response + "β–Œ")
896
+ message_placeholder.markdown(full_response)
897
+ status.update(label="Ideas created!", state="complete")
898
+
899
+ # β‘₯ 이미지 생성 (CCM ν—€λ”© or λ ˆκ±°μ‹œ νŒ¨ν„΄ μžλ™ 탐지)
900
+ if st.session_state.generate_image and full_response:
901
+ ccm_match = re.search(r"###\s*이미지\s*ν”„λ‘¬ν”„νŠΈ\s*\n+([^\n]+)",
902
+ full_response, flags=re.IGNORECASE)
903
+ legacy_match = None
904
+ if not ccm_match:
905
+ legacy_match = re.search(
906
+ r"\|\s*(?:\*\*)?Image\s+Prompt(?:\*\*)?\s*\|\s*([^|\n]+)",
907
+ full_response, flags=re.IGNORECASE) \
908
+ or re.search(r"(?i)Image\s+Prompt\s*[:\-]\s*([^\n]+)",
909
+ full_response)
910
+ match = ccm_match or legacy_match
911
+ if match:
912
+ raw_prompt = re.sub(r"[\r\n`\"'\\]", " ", match.group(1)).strip()
913
+ with st.spinner("아이디어 이미지 생성 쀑…"):
914
+ img, cap = generate_image(raw_prompt)
915
+ if img:
916
+ st.image(img, caption=f"아이디어 μ‹œκ°ν™” – {cap}")
917
+ st.session_state.messages.append({
918
+ "role": "assistant",
919
+ "content": "",
920
+ "image": img,
921
+ "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
922
+ })
923
+
924
+ # ⑦ κ²°κ³Όλ₯Ό ν•œ 번만 μ €μž₯Β·λ‹€μš΄λ‘œλ“œΒ·μžλ™λ°±μ—…
925
+ write_output(full_response, prompt)
926
+
927
+ # ── μ˜ˆμ™Έ 처리 (쀑볡 좜λ ₯ λ°©μ§€) ──────────────────────────────────
928
+ except Exception as e:
929
+ err = str(e)
930
+ placeholder.error(f"⚠️ Error: {err}")
931
+ logging.error(err)
932
+ st.session_state.messages.append({
933
+ "role": "assistant",
934
+ "content": f"⚠️ μž‘μ—… 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {err}"
935
+ })
936
+
937
+
938
  # ---- μΉ΄ν…Œκ³ λ¦¬/ν•˜μœ„ν•­λͺ© μ»¨ν…μŠ€νŠΈ μΆ”κ°€ ---------------------------
939
  def category_context(sel):
940
  if sel: # νŠΉμ • μΉ΄ν…Œκ³ λ¦¬ 선택 μ‹œ
 
996
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
997
  })
998
 
999
+ # ── 헬퍼: κ²°κ³Όλ₯Ό ν•œ 번만 κΈ°λ‘Β·λ‹€μš΄λ‘œλ“œΒ·μ €μž₯ ─────────────────────────────
1000
  def write_output(md_text: str, prompt: str):
1001
+ # β‘  μ±„νŒ… 기둝 μ €μž₯
 
1002
  st.session_state.messages.append({"role": "assistant", "content": md_text})
1003
 
1004
  # β‘‘ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
1005
  st.subheader("Download This Output")
1006
  col_md, col_html = st.columns(2)
1007
+ col_md.download_button("Markdown", md_text,
1008
+ file_name=f"{prompt[:30]}.md", mime="text/markdown")
1009
+ col_html.download_button("HTML", md_to_html(md_text, prompt[:30]),
1010
+ file_name=f"{prompt[:30]}.html", mime="text/html")
1011
+
 
 
1012
  # β‘’ JSON μžλ™ μ €μž₯
1013
  if st.session_state.auto_save:
1014
  fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
1015
  with open(fn, "w", encoding="utf-8") as fp:
1016
+ json.dump(st.session_state.messages, fp, ensure_ascii=False, indent=2)
1017
+
1018
+
1019
 
1020
 
1021
  except Exception as e: