ginipick commited on
Commit
cd508a6
Β·
verified Β·
1 Parent(s): 42b0fff

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -50
app.py CHANGED
@@ -808,8 +808,9 @@ def process_example(topic):
808
  """Handle example prompts."""
809
  process_input(topic, [])
810
 
 
811
  def process_input(prompt: str, uploaded_files):
812
- # μ‚¬μš©μžμ˜ λ©”μ‹œμ§€ 기둝
813
  if not any(m["role"] == "user" and m["content"] == prompt
814
  for m in st.session_state.messages):
815
  st.session_state.messages.append({"role": "user", "content": prompt})
@@ -818,9 +819,9 @@ def process_input(prompt: str, uploaded_files):
818
  st.markdown(prompt)
819
 
820
  with st.chat_message("assistant"):
821
- placeholder = st.empty()
822
- message_placeholder = st.empty()
823
- full_response = ""
824
 
825
  use_web_search = st.session_state.web_search_enabled
826
  has_uploaded = bool(uploaded_files)
@@ -831,13 +832,13 @@ def process_input(prompt: str, uploaded_files):
831
 
832
  client = get_openai_client()
833
 
834
- # ── β‘  μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ
835
  selected_cat = st.session_state.get("category_focus", "(None)")
836
  if selected_cat == "(None)":
837
  selected_cat = None
838
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
839
 
840
- # ── β‘‘ (선택) μ›Ή 검색 & 파일
841
  search_content = None
842
  if use_web_search:
843
  status.update(label="Searching the web…")
@@ -850,27 +851,38 @@ def process_input(prompt: str, uploaded_files):
850
  with st.spinner("Processing files…"):
851
  file_content = process_uploaded_files(uploaded_files)
852
 
853
- # ── β‘’ λ©”μ‹œμ§€ ꡬ성
854
  user_content = prompt
855
  if search_content:
856
  user_content += "\n\n" + search_content
857
  if file_content:
858
  user_content += "\n\n" + file_content
859
 
 
 
 
 
 
 
 
 
 
 
860
  api_messages = [
861
  {"role": "system", "content": sys_prompt},
 
 
862
  {"role": "user", "content": user_content},
863
  ]
 
864
 
865
- # ── β‘£ OpenAI 호좜
866
  status.update(label="Generating ideas…")
867
  stream = client.chat.completions.create(
868
  model="gpt-4.1-mini",
869
  messages=api_messages,
870
- temperature=1,
871
- max_tokens=MAX_TOKENS,
872
- top_p=1,
873
- stream=True
874
  )
875
  for chunk in stream:
876
  if chunk.choices and chunk.choices[0].delta.content:
@@ -879,69 +891,57 @@ def process_input(prompt: str, uploaded_files):
879
  message_placeholder.markdown(full_response)
880
  status.update(label="Ideas created!", state="complete")
881
 
882
- # ── β‘€ 이미지 생성
883
- # ── β‘€ 이미지 생성 (κΈ°μ‘΄ 블둝 μ „λΆ€ ꡐ체) ─────────────────────────────
884
  if st.session_state.generate_image and full_response:
885
-
886
- # 1️⃣ ❢ CCM μŠ€νƒ€μΌ: "### 이미지 ν”„λ‘¬ν”„νŠΈ" ν—€λ”© μ•„λž˜ 쀄
887
  ccm_match = re.search(
888
  r"###\s*이미지\s*ν”„λ‘¬ν”„νŠΈ\s*\n+([^\n]+)",
889
  full_response, flags=re.IGNORECASE)
890
-
891
- # 2️⃣ ❷ 이전 μŠ€νƒ€μΌ: ν…Œμ΄λΈ” or "Image Prompt:"
892
  legacy_match = None
893
  if not ccm_match:
894
  legacy_match = re.search(
895
  r"\|\s*(?:\*\*)?Image\s+Prompt(?:\*\*)?\s*\|\s*([^|\n]+)",
896
- full_response, flags=re.IGNORECASE)
897
- if not legacy_match:
898
- legacy_match = re.search(
899
- r"(?i)Image\s+Prompt\s*[:\-]\s*([^\n]+)",
900
- full_response)
901
-
902
- # 3️⃣ μ΅œμ’… ν”„λ‘¬ν”„νŠΈ μΆ”μΆœ
903
  match = ccm_match or legacy_match
904
  if match:
905
  raw_prompt = re.sub(r"[\r\n`\"'\\]", " ",
906
  match.group(1)).strip()
907
-
908
  with st.spinner("아이디어 이미지 생성 쀑…"):
909
  img, cap = generate_image(raw_prompt)
910
-
911
  if img:
912
  st.image(img, caption=f"아이디어 μ‹œκ°ν™” – {cap}")
913
- # λŒ€ν™” 기둝에 이미지도 μ €μž₯
914
  st.session_state.messages.append({
915
  "role": "assistant",
916
  "content": "",
917
  "image": img,
918
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
919
  })
920
-
921
 
 
 
 
 
 
922
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
923
 
924
- # ── β‘₯ κ²°κ³Ό μ €μž₯
925
- st.session_state.messages.append(
926
- {"role": "assistant", "content": full_response})
927
-
928
- # ── ⑦ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
929
- st.subheader("Download This Output")
930
- c1, c2 = st.columns(2)
931
- c1.download_button("Markdown", full_response,
932
- file_name=f"{prompt[:30]}.md",
933
- mime="text/markdown")
934
- c2.download_button("HTML",
935
- md_to_html(full_response, prompt[:30]),
936
- file_name=f"{prompt[:30]}.html",
937
- mime="text/html")
938
-
939
- # ── β‘§ μžλ™ JSON μ €μž₯
940
- if st.session_state.auto_save:
941
- fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
942
- with open(fn, "w", encoding="utf-8") as fp:
943
- json.dump(st.session_state.messages, fp,
944
- ensure_ascii=False, indent=2)
945
 
946
  except Exception as e:
947
  err = str(e)
 
808
  """Handle example prompts."""
809
  process_input(topic, [])
810
 
811
+ # ── μˆ˜μ •λœ process_input 전체 ────────────────────────────────────────────
812
  def process_input(prompt: str, uploaded_files):
813
+ # μ‚¬μš©μž λ©”μ‹œμ§€ μ €μž₯
814
  if not any(m["role"] == "user" and m["content"] == prompt
815
  for m in st.session_state.messages):
816
  st.session_state.messages.append({"role": "user", "content": prompt})
 
819
  st.markdown(prompt)
820
 
821
  with st.chat_message("assistant"):
822
+ placeholder = st.empty()
823
+ message_placeholder = st.empty()
824
+ full_response = ""
825
 
826
  use_web_search = st.session_state.web_search_enabled
827
  has_uploaded = bool(uploaded_files)
 
832
 
833
  client = get_openai_client()
834
 
835
+ # β‘  μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ
836
  selected_cat = st.session_state.get("category_focus", "(None)")
837
  if selected_cat == "(None)":
838
  selected_cat = None
839
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
840
 
841
+ # β‘‘ (선택) μ›Ή 검색 & 파일
842
  search_content = None
843
  if use_web_search:
844
  status.update(label="Searching the web…")
 
851
  with st.spinner("Processing files…"):
852
  file_content = process_uploaded_files(uploaded_files)
853
 
854
+ # β‘’ μœ μ € λ©”μ‹œμ§€ ꡬ성
855
  user_content = prompt
856
  if search_content:
857
  user_content += "\n\n" + search_content
858
  if file_content:
859
  user_content += "\n\n" + file_content
860
 
861
+ # ---- μΉ΄ν…Œκ³ λ¦¬/ν•˜μœ„ν•­λͺ© μ»¨ν…μŠ€νŠΈ μΆ”κ°€ ---------------------------
862
+ def category_context(sel):
863
+ if sel: # νŠΉμ • μΉ΄ν…Œκ³ λ¦¬ 선택 μ‹œ
864
+ return json.dumps(
865
+ {sel: physical_transformation_categories[sel]},
866
+ ensure_ascii=False)
867
+ # (None) β†’ ν‚€ λͺ©λ‘λ§Œ 전달
868
+ return "ALL_CATEGORIES: " + ", ".join(
869
+ physical_transformation_categories.keys())
870
+
871
  api_messages = [
872
  {"role": "system", "content": sys_prompt},
873
+ {"role": "system", "name": "category_db",
874
+ "content": category_context(selected_cat)},
875
  {"role": "user", "content": user_content},
876
  ]
877
+ # --------------------------------------------------------------
878
 
879
+ # β‘£ OpenAI 슀트리밍 호좜
880
  status.update(label="Generating ideas…")
881
  stream = client.chat.completions.create(
882
  model="gpt-4.1-mini",
883
  messages=api_messages,
884
+ temperature=1, max_tokens=MAX_TOKENS,
885
+ top_p=1, stream=True
 
 
886
  )
887
  for chunk in stream:
888
  if chunk.choices and chunk.choices[0].delta.content:
 
891
  message_placeholder.markdown(full_response)
892
  status.update(label="Ideas created!", state="complete")
893
 
894
+ # β‘€ 이미지 생성 (CCM ν—€λ”© or λ ˆκ±°μ‹œ νŒ¨ν„΄ λͺ¨λ‘ 지원)
 
895
  if st.session_state.generate_image and full_response:
896
+ # CCM ν—€λ”©
 
897
  ccm_match = re.search(
898
  r"###\s*이미지\s*ν”„λ‘¬ν”„νŠΈ\s*\n+([^\n]+)",
899
  full_response, flags=re.IGNORECASE)
 
 
900
  legacy_match = None
901
  if not ccm_match:
902
  legacy_match = re.search(
903
  r"\|\s*(?:\*\*)?Image\s+Prompt(?:\*\*)?\s*\|\s*([^|\n]+)",
904
+ full_response, flags=re.IGNORECASE) \
905
+ or re.search(r"(?i)Image\s+Prompt\s*[:\-]\s*([^\n]+)",
906
+ full_response)
 
 
 
 
907
  match = ccm_match or legacy_match
908
  if match:
909
  raw_prompt = re.sub(r"[\r\n`\"'\\]", " ",
910
  match.group(1)).strip()
 
911
  with st.spinner("아이디어 이미지 생성 쀑…"):
912
  img, cap = generate_image(raw_prompt)
 
913
  if img:
914
  st.image(img, caption=f"아이디어 μ‹œκ°ν™” – {cap}")
 
915
  st.session_state.messages.append({
916
  "role": "assistant",
917
  "content": "",
918
  "image": img,
919
  "image_caption": f"아이디어 μ‹œκ°ν™” – {cap}"
920
  })
 
921
 
922
+ # ── μ‹ κ·œ 헬퍼: κ²°κ³Ό λ‹€μš΄λ‘œλ“œΒ·μ €μž₯Β·μ±„νŒ… 기둝에 μΆ”κ°€ ──────────────────────────
923
+ def write_output(md_text: str, prompt: str):
924
+ """μ΅œμ’… λ§ˆν¬λ‹€μš΄μ„ ν•œ 번만 UI에 μ—°κ²°ν•˜κ³ , 파일 λ‹€μš΄Β·JSON μžλ™μ €μž₯κΉŒμ§€ μˆ˜ν–‰."""
925
+ # β‘  λŒ€ν™” 기둝에 λ‹΅λ³€ μ €μž₯
926
+ st.session_state.messages.append({"role": "assistant", "content": md_text})
927
 
928
+ # β‘‘ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
929
+ st.subheader("Download This Output")
930
+ col_md, col_html = st.columns(2)
931
+ col_md.download_button(
932
+ "Markdown", md_text,
933
+ file_name=f"{prompt[:30]}.md", mime="text/markdown")
934
+ col_html.download_button(
935
+ "HTML", md_to_html(md_text, prompt[:30]),
936
+ file_name=f"{prompt[:30]}.html", mime="text/html")
937
+
938
+ # β‘’ JSON μžλ™ μ €μž₯
939
+ if st.session_state.auto_save:
940
+ fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
941
+ with open(fn, "w", encoding="utf-8") as fp:
942
+ json.dump(st.session_state.messages, fp,
943
+ ensure_ascii=False, indent=2)
944
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
945
 
946
  except Exception as e:
947
  err = str(e)