ginipick commited on
Commit
82507ef
Β·
verified Β·
1 Parent(s): eb59db6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -31
app.py CHANGED
@@ -774,7 +774,7 @@ def process_example(topic):
774
  process_input(topic, [])
775
 
776
  def process_input(prompt: str, uploaded_files):
777
- # Add user's message
778
  if not any(m["role"] == "user" and m["content"] == prompt
779
  for m in st.session_state.messages):
780
  st.session_state.messages.append({"role": "user", "content": prompt})
@@ -787,8 +787,8 @@ def process_input(prompt: str, uploaded_files):
787
  message_placeholder = st.empty()
788
  full_response = ""
789
 
790
- use_web_search = st.session_state.web_search_enabled
791
- has_uploaded = bool(uploaded_files)
792
 
793
  try:
794
  status = st.status("Preparing to generate ideas…")
@@ -796,13 +796,13 @@ def process_input(prompt: str, uploaded_files):
796
 
797
  client = get_openai_client()
798
 
799
- # ── β‘  μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ ──────────────────────────────
800
  selected_cat = st.session_state.get("category_focus", "(None)")
801
  if selected_cat == "(None)":
802
  selected_cat = None
803
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
804
 
805
- # ── β‘‘ (선택) μ›Ή 검색 & νŒŒμΌλ‚΄μš© ───────────────────
806
  search_content = None
807
  if use_web_search:
808
  status.update(label="Searching the web…")
@@ -815,7 +815,7 @@ def process_input(prompt: str, uploaded_files):
815
  with st.spinner("Processing files…"):
816
  file_content = process_uploaded_files(uploaded_files)
817
 
818
- # ── β‘’ λŒ€ν™” λ©”μ‹œμ§€ ꡬ성 ─────────────────────────────
819
  user_content = prompt
820
  if search_content:
821
  user_content += "\n\n" + search_content
@@ -827,37 +827,34 @@ def process_input(prompt: str, uploaded_files):
827
  {"role": "user", "content": user_content},
828
  ]
829
 
830
- # ── β‘£ OpenAI 슀트리밍 호좜 ────────────────────────
831
  status.update(label="Generating ideas…")
832
  stream = client.chat.completions.create(
833
- model = "gpt-4.1-mini",
834
- messages = api_messages,
835
- temperature = 1,
836
- max_tokens = MAX_TOKENS,
837
- top_p = 1,
838
- stream = True
839
  )
840
  for chunk in stream:
841
- if (chunk.choices
842
- and chunk.choices[0].delta.content is not None):
843
  full_response += chunk.choices[0].delta.content
844
  message_placeholder.markdown(full_response + "β–Œ")
845
  message_placeholder.markdown(full_response)
846
  status.update(label="Ideas created!", state="complete")
847
 
848
- # ── β‘€ 이미지 생성 ────────────────────────────────
849
  if st.session_state.generate_image and full_response:
850
  idea_sections = re.split(r"(## Idea \d+:)", full_response)
851
  pairs = [(idea_sections[i].strip(),
852
  idea_sections[i+1].strip() if i+1 < len(idea_sections) else "")
853
  for i in range(1, len(idea_sections), 2)]
854
 
855
- for idx, (title, text_block) in enumerate(pairs, start=1):
856
- # ν‘œ ν˜•νƒœ: | **Image Prompt** | prompt |
857
  table_match = re.search(
858
  r"\|\s*\*\*Image\s+Prompt\*\*\s*\|\s*([^\n\|]+)",
859
  text_block, flags=re.IGNORECASE)
860
- # λ°±μ—… ν˜•νƒœ: Image Prompt: …
861
  if not table_match:
862
  table_match = re.search(
863
  r"(?i)Image\s+Prompt\s*[:|-]\s*([^\n]+)", text_block)
@@ -880,10 +877,11 @@ def process_input(prompt: str, uploaded_files):
880
  "image_caption": f"{title} – {cap}"
881
  })
882
 
883
- # ── β‘₯ κ²°κ³Ό μ €μž₯ & λ‹€μš΄λ‘œλ“œ ─────────────────────────
884
  st.session_state.messages.append(
885
  {"role": "assistant", "content": full_response})
886
 
 
887
  st.subheader("Download This Output")
888
  c1, c2 = st.columns(2)
889
  c1.download_button("Markdown", full_response,
@@ -894,7 +892,7 @@ def process_input(prompt: str, uploaded_files):
894
  file_name=f"{prompt[:30]}.html",
895
  mime="text/html")
896
 
897
- # ── ⑦ μžλ™ μ €μž₯ ─────────────────────────────────
898
  if st.session_state.auto_save:
899
  fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
900
  with open(fn, "w", encoding="utf-8") as fp:
@@ -905,17 +903,11 @@ def process_input(prompt: str, uploaded_files):
905
  err = str(e)
906
  placeholder.error(f"Error: {err}")
907
  logging.error(err)
908
- st.session_state.messages.append(
909
- {"role": "assistant",
910
- "content": f"⚠️ μž‘μ—… 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {err}"})
911
-
912
- # 3개 이미지 생성 ν”„λ‘œμ„ΈμŠ€λ₯Ό 마친 ν›„ μ΅œμ’… ν…μŠ€νŠΈλ₯Ό μ €μž₯
913
- st.session_state.messages.append({"role": "assistant", "content": full_response})
914
- answer_entry_saved = True
915
 
916
- if not answer_entry_saved and full_response:
917
- # 이미지 생성이 λΉ„ν™œμ„±ν™”κ±°λ‚˜ μ‹€νŒ¨ν–ˆμ„ 경우 ν…μŠ€νŠΈλ§Œ μ €μž₯
918
- st.session_state.messages.append({"role": "assistant", "content": full_response})
919
 
920
  # Download buttons
921
  if full_response:
 
774
  process_input(topic, [])
775
 
776
  def process_input(prompt: str, uploaded_files):
777
+ # μ‚¬μš©μžμ˜ λ©”μ‹œμ§€ 기둝
778
  if not any(m["role"] == "user" and m["content"] == prompt
779
  for m in st.session_state.messages):
780
  st.session_state.messages.append({"role": "user", "content": prompt})
 
787
  message_placeholder = st.empty()
788
  full_response = ""
789
 
790
+ use_web_search = st.session_state.web_search_enabled
791
+ has_uploaded = bool(uploaded_files)
792
 
793
  try:
794
  status = st.status("Preparing to generate ideas…")
 
796
 
797
  client = get_openai_client()
798
 
799
+ # ── β‘  μ‹œμŠ€ν…œ ν”„λ‘¬ν”„νŠΈ
800
  selected_cat = st.session_state.get("category_focus", "(None)")
801
  if selected_cat == "(None)":
802
  selected_cat = None
803
  sys_prompt = get_idea_system_prompt(selected_category=selected_cat)
804
 
805
+ # ── β‘‘ (선택) μ›Ή 검색 & 파일
806
  search_content = None
807
  if use_web_search:
808
  status.update(label="Searching the web…")
 
815
  with st.spinner("Processing files…"):
816
  file_content = process_uploaded_files(uploaded_files)
817
 
818
+ # ── β‘’ λ©”μ‹œμ§€ ꡬ성
819
  user_content = prompt
820
  if search_content:
821
  user_content += "\n\n" + search_content
 
827
  {"role": "user", "content": user_content},
828
  ]
829
 
830
+ # ── β‘£ OpenAI 호좜
831
  status.update(label="Generating ideas…")
832
  stream = client.chat.completions.create(
833
+ model="gpt-4.1-mini",
834
+ messages=api_messages,
835
+ temperature=1,
836
+ max_tokens=MAX_TOKENS,
837
+ top_p=1,
838
+ stream=True
839
  )
840
  for chunk in stream:
841
+ if chunk.choices and chunk.choices[0].delta.content:
 
842
  full_response += chunk.choices[0].delta.content
843
  message_placeholder.markdown(full_response + "β–Œ")
844
  message_placeholder.markdown(full_response)
845
  status.update(label="Ideas created!", state="complete")
846
 
847
+ # ── β‘€ 이미지 생성
848
  if st.session_state.generate_image and full_response:
849
  idea_sections = re.split(r"(## Idea \d+:)", full_response)
850
  pairs = [(idea_sections[i].strip(),
851
  idea_sections[i+1].strip() if i+1 < len(idea_sections) else "")
852
  for i in range(1, len(idea_sections), 2)]
853
 
854
+ for title, text_block in pairs:
 
855
  table_match = re.search(
856
  r"\|\s*\*\*Image\s+Prompt\*\*\s*\|\s*([^\n\|]+)",
857
  text_block, flags=re.IGNORECASE)
 
858
  if not table_match:
859
  table_match = re.search(
860
  r"(?i)Image\s+Prompt\s*[:|-]\s*([^\n]+)", text_block)
 
877
  "image_caption": f"{title} – {cap}"
878
  })
879
 
880
+ # ── β‘₯ κ²°κ³Ό μ €μž₯
881
  st.session_state.messages.append(
882
  {"role": "assistant", "content": full_response})
883
 
884
+ # ── ⑦ λ‹€μš΄λ‘œλ“œ λ²„νŠΌ
885
  st.subheader("Download This Output")
886
  c1, c2 = st.columns(2)
887
  c1.download_button("Markdown", full_response,
 
892
  file_name=f"{prompt[:30]}.html",
893
  mime="text/html")
894
 
895
+ # ── β‘§ μžλ™ JSON μ €μž₯
896
  if st.session_state.auto_save:
897
  fn = f"chat_history_auto_{datetime.now():%Y%m%d_%H%M%S}.json"
898
  with open(fn, "w", encoding="utf-8") as fp:
 
903
  err = str(e)
904
  placeholder.error(f"Error: {err}")
905
  logging.error(err)
906
+ st.session_state.messages.append({
907
+ "role": "assistant",
908
+ "content": f"⚠️ μž‘μ—… 쀑 였λ₯˜κ°€ λ°œμƒν–ˆμŠ΅λ‹ˆλ‹€: {err}"
909
+ })
 
 
 
910
 
 
 
 
911
 
912
  # Download buttons
913
  if full_response: