aiqtech commited on
Commit
1fb86bb
ยท
verified ยท
1 Parent(s): d3a088e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +63 -1040
app.py CHANGED
@@ -614,10 +614,15 @@ class DataQualityPipeline:
614
  # ์‹œ์Šคํ…œ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
615
  llm_system = LLMCollaborativeSystem()
616
 
617
- def process_query_streaming(user_query: str, history: List):
 
 
 
618
  """์ŠคํŠธ๋ฆฌ๋ฐ์„ ์ง€์›ํ•˜๋Š” ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ"""
 
 
619
  if not user_query:
620
- return history, "", "", "", "", "โŒ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
621
 
622
  conversation_log = []
623
  all_responses = {"supervisor": [], "researcher": [], "executor": []}
@@ -634,7 +639,7 @@ def process_query_streaming(user_query: str, history: List):
634
  ):
635
  supervisor_initial_response += chunk
636
  supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_initial_response}"
637
- yield history, supervisor_text, "", "", "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๋ถ„์„ ์ค‘..."
638
 
639
  all_responses["supervisor"].append(supervisor_initial_response)
640
 
@@ -644,7 +649,7 @@ def process_query_streaming(user_query: str, history: List):
644
 
645
  # 2๋‹จ๊ณ„: ๋ธŒ๋ ˆ์ด๋ธŒ ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰
646
  researcher_text = "[์›น ๊ฒ€์ƒ‰] ๐Ÿ” ๊ฒ€์ƒ‰ ์ค‘...\n"
647
- yield history, supervisor_text, researcher_text, "", "", "๐Ÿ” ์›น ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰ ์ค‘..."
648
 
649
  search_results = {}
650
  for keyword in keywords:
@@ -652,7 +657,7 @@ def process_query_streaming(user_query: str, history: List):
652
  if results:
653
  search_results[keyword] = results
654
  researcher_text += f"โœ“ '{keyword}' ๊ฒ€์ƒ‰ ์™„๋ฃŒ\n"
655
- yield history, supervisor_text, researcher_text, "", "", f"๐Ÿ” '{keyword}' ๊ฒ€์ƒ‰ ์ค‘..."
656
 
657
  # 3๋‹จ๊ณ„: ์กฐ์‚ฌ์ž AI๊ฐ€ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ •๋ฆฌ
658
  researcher_prompt = llm_system.create_researcher_prompt(user_query, supervisor_initial_response, search_results)
@@ -665,7 +670,7 @@ def process_query_streaming(user_query: str, history: List):
665
  ):
666
  researcher_response += chunk
667
  researcher_text = f"[์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ •๋ฆฌ] - {datetime.now().strftime('%H:%M:%S')}\n{researcher_response}"
668
- yield history, supervisor_text, researcher_text, "", "", "๐Ÿ“ ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌ ์ค‘..."
669
 
670
  all_responses["researcher"].append(researcher_response)
671
 
@@ -681,7 +686,7 @@ def process_query_streaming(user_query: str, history: List):
681
  supervisor_execution_response += chunk
682
  temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_execution_response}"
683
  supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
684
- yield history, supervisor_text, researcher_text, "", "", "๐ŸŽฏ ๊ฐ๋…์ž AI๊ฐ€ ์ง€์‹œ ์ค‘..."
685
 
686
  all_responses["supervisor"].append(supervisor_execution_response)
687
 
@@ -696,7 +701,7 @@ def process_query_streaming(user_query: str, history: List):
696
  ):
697
  executor_response += chunk
698
  executor_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{executor_response}"
699
- yield history, supervisor_text, researcher_text, executor_text, "", "๐Ÿ”ง ์‹คํ–‰์ž AI๊ฐ€ ๊ตฌํ˜„ ์ค‘..."
700
 
701
  all_responses["executor"].append(executor_response)
702
 
@@ -720,7 +725,7 @@ def process_query_streaming(user_query: str, history: List):
720
  review_response += chunk
721
  temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒ€ํ†  ๋ฐ ํ”ผ๋“œ๋ฐฑ] - {datetime.now().strftime('%H:%M:%S')}\n{review_response}"
722
  supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
723
- yield history, supervisor_text, researcher_text, executor_text, "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๊ฒ€ํ†  ์ค‘..."
724
 
725
  all_responses["supervisor"].append(review_response)
726
 
@@ -741,7 +746,7 @@ def process_query_streaming(user_query: str, history: List):
741
  final_executor_response += chunk
742
  temp_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['executor'][0]}\n\n---\n\n[์ตœ์ข… ๋ณด๊ณ ์„œ] - {datetime.now().strftime('%H:%M:%S')}\n{final_executor_response}"
743
  executor_text = temp_text
744
- yield history, supervisor_text, researcher_text, executor_text, "", "๐Ÿ“„ ์ตœ์ข… ๋ณด๊ณ ์„œ ์ž‘์„ฑ ์ค‘..."
745
 
746
  all_responses["executor"].append(final_executor_response)
747
 
@@ -779,18 +784,20 @@ def process_query_streaming(user_query: str, history: List):
779
  ---
780
  *์ด ๋ณด๊ณ ์„œ๋Š” ์›น ๊ฒ€์ƒ‰์„ ํ†ตํ•œ ์ตœ์‹  ์ •๋ณด์™€ AI๋“ค์˜ ํ˜‘๋ ฅ, ๊ทธ๋ฆฌ๊ณ  ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜์„ ํ†ตํ•ด ์ž‘์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.*"""
781
 
782
- # ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
783
- new_history = history + [(user_query, final_summary)]
784
 
785
- yield new_history, supervisor_text, researcher_text, executor_text, final_summary, "โœ… ์ตœ์ข… ๋ณด๊ณ ์„œ ์™„์„ฑ!"
786
 
787
  except Exception as e:
788
  error_msg = f"โŒ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {str(e)}"
789
- yield history, "", "", "", error_msg, error_msg
790
 
791
  def clear_all():
792
  """๋ชจ๋“  ๋‚ด์šฉ ์ดˆ๊ธฐํ™”"""
793
- return [], "", "", "", "", "๐Ÿ”„ ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
 
 
794
 
795
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค
796
  css = """
@@ -833,16 +840,9 @@ with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ", theme=gr.themes.Soft(), css=css)
833
  """
834
  )
835
 
 
836
  with gr.Row():
837
- # ์™ผ์ชฝ: ์ž…๋ ฅ ๋ฐ ์ฑ„ํŒ… ๊ธฐ๋ก
838
- with gr.Column(scale=1):
839
- chatbot = gr.Chatbot(
840
- label="๐Ÿ’ฌ ๋Œ€ํ™” ๊ธฐ๋ก",
841
- height=600,
842
- show_copy_button=True,
843
- bubble_full_width=False
844
- )
845
-
846
  user_input = gr.Textbox(
847
  label="์งˆ๋ฌธ ์ž…๋ ฅ",
848
  placeholder="์˜ˆ: ๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ๋ฐฉ๋ฒ•์€?",
@@ -859,1026 +859,49 @@ with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ", theme=gr.themes.Soft(), css=css)
859
  value="๋Œ€๊ธฐ ์ค‘...",
860
  max_lines=1
861
  )
862
-
863
- # ์˜ค๋ฅธ์ชฝ: AI ์ถœ๋ ฅ
864
- with gr.Column(scale=2):
865
- # ์ตœ์ข… ๊ฒฐ๊ณผ
866
  with gr.Accordion("๐Ÿ“Š ์ตœ์ข… ์ข…ํ•ฉ ๊ฒฐ๊ณผ", open=True):
867
  final_output = gr.Markdown(
868
  value="*์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜๋ฉด ๊ฒฐ๊ณผ๊ฐ€ ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*"
869
  )
870
-
871
- # AI ์ถœ๋ ฅ๋“ค
872
- with gr.Row():
873
- # ๊ฐ๋…์ž AI ์ถœ๋ ฅ
874
- with gr.Column():
875
- gr.Markdown("### ๐Ÿง  ๊ฐ๋…์ž AI (๊ฑฐ์‹œ์  ๋ถ„์„)")
876
- supervisor_output = gr.Textbox(
877
- label="",
878
- lines=12,
879
- max_lines=15,
880
- interactive=False,
881
- elem_classes=["supervisor-box"]
882
- )
883
-
884
- with gr.Row():
885
- # ์กฐ์‚ฌ์ž AI ์ถœ๋ ฅ
886
- with gr.Column():
887
- gr.Markdown("### ๐Ÿ” ์กฐ์‚ฌ์ž AI (์›น ๊ฒ€์ƒ‰ & ์ •๋ฆฌ)")
888
- researcher_output = gr.Textbox(
889
- label="",
890
- lines=12,
891
- max_lines=15,
892
- interactive=False,
893
- elem_classes=["researcher-box"]
894
- )
895
-
896
- # ์‹คํ–‰์ž AI ์ถœ๋ ฅ
897
- with gr.Column():
898
- gr.Markdown("### ๐Ÿ‘๏ธ ์‹คํ–‰์ž AI (๋ฏธ์‹œ์  ๊ตฌํ˜„)")
899
- executor_output = gr.Textbox(
900
- label="",
901
- lines=12,
902
- max_lines=15,
903
- interactive=False,
904
- elem_classes=["executor-box"]
905
- )
906
-
907
- # ์˜ˆ์ œ
908
- gr.Examples(
909
- examples=[
910
- "๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ์ตœ์‹  ๋ฐฉ๋ฒ•์€?",
911
- "2024๋…„ ํšจ๊ณผ์ ์ธ ํ”„๋กœ์ ํŠธ ๊ด€๋ฆฌ ๋„๊ตฌ์™€ ์ „๋žต์€?",
912
- "์ง€์† ๊ฐ€๋Šฅํ•œ ๋น„์ฆˆ๋‹ˆ์Šค ๋ชจ๋ธ์˜ ์ตœ์‹  ํŠธ๋ Œ๋“œ๋Š”?",
913
- "์ตœ์‹  ๋ฐ์ดํ„ฐ ์‹œ๊ฐํ™” ๋„๊ตฌ์™€ ๊ธฐ๋ฒ•์€?",
914
- "์›๊ฒฉ ํŒ€์˜ ์ƒ์‚ฐ์„ฑ์„ ๋†’์ด๋Š” ๊ฒ€์ฆ๋œ ๋ฐฉ๋ฒ•์€?"
915
- ],
916
- inputs=user_input,
917
- label="๐Ÿ’ก ์˜ˆ์ œ ์งˆ๋ฌธ"
918
- )
919
-
920
- # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
921
- submit_btn.click(
922
- fn=process_query_streaming,
923
- inputs=[user_input, chatbot],
924
- outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
925
- ).then(
926
- fn=lambda: "",
927
- outputs=[user_input]
928
- )
929
-
930
- user_input.submit(
931
- fn=process_query_streaming,
932
- inputs=[user_input, chatbot],
933
- outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
934
- ).then(
935
- fn=lambda: "",
936
- outputs=[user_input]
937
- )
938
-
939
- clear_btn.click(
940
- fn=clear_all,
941
- outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
942
- )
943
-
944
- gr.Markdown(
945
- """
946
- ---
947
- ### ๐Ÿ“ ์‚ฌ์šฉ ๋ฐฉ๋ฒ•
948
- 1. ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜๊ณ  Enter ๋˜๋Š” '๋ถ„์„ ์‹œ์ž‘' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•˜์„ธ์š”.
949
- 2. 7๋‹จ๊ณ„ ํ˜‘๋ ฅ ํ”„๋กœ์„ธ์Šค๊ฐ€ ์ง„ํ–‰๋ฉ๋‹ˆ๋‹ค:
950
- - ๊ฐ๋…์ž ์ดˆ๊ธฐ ๋ถ„์„ โ†’ ์›น ๊ฒ€์ƒ‰ โ†’ ์กฐ์‚ฌ ์ •๋ฆฌ โ†’ ์‹คํ–‰ ์ง€์‹œ โ†’ ์ดˆ๊ธฐ ๊ตฌํ˜„ โ†’ ํ”ผ๋“œ๋ฐฑ โ†’ ์ตœ์ข… ๋ณด๊ณ ์„œ
951
- 3. ๊ฐ AI์˜ ์ž‘์—… ๊ณผ์ •์„ ์‹ค์‹œ๊ฐ„์œผ๋กœ ํ™•์ธํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
952
- 4. ์ตœ์ข… ๋ณด๊ณ ์„œ๊ฐ€ ์ƒ๋‹จ์— ํ‘œ์‹œ๋˜๋ฉฐ, ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ •์€ ์ ‘์„ ์ˆ˜ ์žˆ๋Š” ํ˜•ํƒœ๋กœ ์ œ๊ณต๋ฉ๋‹ˆ๋‹ค.
953
-
954
- ### โš™๏ธ ํ™˜๊ฒฝ ์„ค์ •
955
- - **LLM API**: `export FRIENDLI_TOKEN="your_token"`
956
- - **Brave Search API**: `export BAPI_TOKEN="your_brave_api_token"`
957
- - **ํ…Œ์ŠคํŠธ ๋ชจ๋“œ**: `export TEST_MODE=true` (API ์—†์ด ์ž‘๋™)
958
-
959
- ### ๐Ÿ”— API ํ‚ค ํš๋“
960
- - Friendli API: [https://friendli.ai](https://friendli.ai)
961
- - Brave Search API: [https://brave.com/search/api/](https://brave.com/search/api/)
962
-
963
- ### ๐Ÿ’ก ํŠน์ง•
964
- - ์™„์ „ํ•œ ํ”ผ๋“œ๋ฐฑ ๋ฃจํ”„: ๊ฐ๋…์ž์˜ ํ”ผ๋“œ๋ฐฑ์ด ์‹คํ–‰์ž์—๊ฒŒ ์ „๋‹ฌ๋˜์–ด ์ตœ์ข… ๊ฐœ์„ 
965
- - ์›น ๊ฒ€์ƒ‰ ๊ธฐ๋ฐ˜: ์ตœ์‹  ์ •๋ณด์™€ ์‚ฌ๋ก€๋ฅผ ํ™œ์šฉํ•œ ์‹ค์šฉ์  ๋‹ต๋ณ€
966
- - ์ „๋ฌธ ๋ณด๊ณ ์„œ ํ˜•์‹: ์‹ค๋ฌด์—์„œ ๋ฐ”๋กœ ํ™œ์šฉ ๊ฐ€๋Šฅํ•œ ๊ตฌ์กฐํ™”๋œ ๊ฒฐ๊ณผ๋ฌผ
967
- """
968
- )
969
-
970
- if __name__ == "__main__":
971
- app.queue() # ์ŠคํŠธ๋ฆฌ๋ฐ์„ ์œ„ํ•œ ํ ํ™œ์„ฑํ™”
972
- app.launch(
973
- server_name="0.0.0.0",
974
- server_port=7860,
975
- share=True,
976
- show_error=True
977
- )import gradio as gr
978
- import os
979
- import json
980
- import requests
981
- from datetime import datetime
982
- import time
983
- from typing import List, Dict, Any, Generator, Tuple
984
- import logging
985
- import re
986
-
987
- # ๋กœ๊น… ์„ค์ •
988
- logging.basicConfig(level=logging.INFO)
989
- logger = logging.getLogger(__name__)
990
-
991
- # ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ ํ† ํฐ ๊ฐ€์ ธ์˜ค๊ธฐ
992
- FRIENDLI_TOKEN = os.getenv("FRIENDLI_TOKEN", "YOUR_FRIENDLI_TOKEN")
993
- BAPI_TOKEN = os.getenv("BAPI_TOKEN", "YOUR_BRAVE_API_TOKEN")
994
- API_URL = "https://api.friendli.ai/dedicated/v1/chat/completions"
995
- BRAVE_SEARCH_URL = "https://api.search.brave.com/res/v1/web/search"
996
- MODEL_ID = "dep89a2fld32mcm"
997
- TEST_MODE = os.getenv("TEST_MODE", "false").lower() == "true"
998
-
999
- # ์ „์—ญ ๋ณ€์ˆ˜
1000
- conversation_history = []
1001
-
1002
- class LLMCollaborativeSystem:
1003
- def __init__(self):
1004
- self.token = FRIENDLI_TOKEN
1005
- self.bapi_token = BAPI_TOKEN
1006
- self.api_url = API_URL
1007
- self.brave_url = BRAVE_SEARCH_URL
1008
- self.model_id = MODEL_ID
1009
- self.test_mode = TEST_MODE or (self.token == "YOUR_FRIENDLI_TOKEN")
1010
-
1011
- if self.test_mode:
1012
- logger.warning("ํ…Œ์ŠคํŠธ ๋ชจ๋“œ๋กœ ์‹คํ–‰๋ฉ๋‹ˆ๋‹ค.")
1013
- if self.bapi_token == "YOUR_BRAVE_API_TOKEN":
1014
- logger.warning("Brave API ํ† ํฐ์ด ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
1015
-
1016
- def create_headers(self):
1017
- """API ํ—ค๋” ์ƒ์„ฑ"""
1018
- return {
1019
- "Authorization": f"Bearer {self.token}",
1020
- "Content-Type": "application/json"
1021
- }
1022
-
1023
- def create_brave_headers(self):
1024
- """Brave API ํ—ค๋” ์ƒ์„ฑ"""
1025
- return {
1026
- "Accept": "application/json",
1027
- "Accept-Encoding": "gzip",
1028
- "X-Subscription-Token": self.bapi_token
1029
- }
1030
-
1031
- def create_supervisor_initial_prompt(self, user_query: str) -> str:
1032
- """๊ฐ๋…์ž AI ์ดˆ๊ธฐ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
1033
- return f"""๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.
1034
-
1035
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1036
-
1037
- ์ด ์งˆ๋ฌธ์— ๋Œ€ํ•ด:
1038
- 1. ์ „์ฒด์ ์ธ ์ ‘๊ทผ ๋ฐฉํ–ฅ๊ณผ ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
1039
- 2. ํ•ต์‹ฌ ์š”์†Œ์™€ ๊ณ ๋ ค์‚ฌํ•ญ์„ ๊ตฌ์กฐํ™”ํ•˜์—ฌ ์„ค๋ช…ํ•˜์„ธ์š”
1040
- 3. ์ด ์ฃผ์ œ์— ๋Œ€ํ•ด ์กฐ์‚ฌ๊ฐ€ ํ•„์š”ํ•œ 5-7๊ฐœ์˜ ๊ตฌ์ฒด์ ์ธ ํ‚ค์›Œ๋“œ๋‚˜ ๊ฒ€์ƒ‰์–ด๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
1041
-
1042
- ํ‚ค์›Œ๋“œ๋Š” ๋‹ค์Œ ํ˜•์‹์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”:
1043
- [๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ]: ํ‚ค์›Œ๋“œ1, ํ‚ค์›Œ๋“œ2, ํ‚ค์›Œ๋“œ3, ํ‚ค์›Œ๋“œ4, ํ‚ค์›Œ๋“œ5"""
1044
-
1045
- def create_researcher_prompt(self, user_query: str, supervisor_guidance: str, search_results: Dict[str, List[Dict]]) -> str:
1046
- """์กฐ์‚ฌ์ž AI ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
1047
- search_summary = ""
1048
- for keyword, results in search_results.items():
1049
- search_summary += f"\n\n**{keyword}์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ:**\n"
1050
- for i, result in enumerate(results[:3], 1):
1051
- search_summary += f"{i}. {result.get('title', 'N/A')}\n"
1052
- search_summary += f" - {result.get('description', 'N/A')}\n"
1053
- search_summary += f" - ์ถœ์ฒ˜: {result.get('url', 'N/A')}\n"
1054
-
1055
- return f"""๋‹น์‹ ์€ ์ •๋ณด๋ฅผ ์กฐ์‚ฌํ•˜๊ณ  ์ •๋ฆฌํ•˜๋Š” ์กฐ์‚ฌ์ž AI์ž…๋‹ˆ๋‹ค.
1056
-
1057
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1058
-
1059
- ๊ฐ๋…์ž AI์˜ ์ง€์นจ:
1060
- {supervisor_guidance}
1061
-
1062
- ๋ธŒ๋ ˆ์ด๋ธŒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ:
1063
- {search_summary}
1064
-
1065
- ์œ„ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ:
1066
- 1. ๊ฐ ํ‚ค์›Œ๋“œ๋ณ„๋กœ ์ค‘์š”ํ•œ ์ •๋ณด๋ฅผ ์ •๋ฆฌํ•˜์„ธ์š”
1067
- 2. ์‹ ๋ขฐํ•  ์ˆ˜ ์žˆ๋Š” ์ถœ์ฒ˜๋ฅผ ๋ช…์‹œํ•˜์„ธ์š”
1068
- 3. ์‹คํ–‰์ž AI๊ฐ€ ํ™œ์šฉํ•  ์ˆ˜ ์žˆ๋Š” ๊ตฌ์ฒด์ ์ธ ๋ฐ์ดํ„ฐ์™€ ์‚ฌ์‹ค์„ ์ถ”์ถœํ•˜์„ธ์š”
1069
- 4. ์ตœ์‹  ํŠธ๋ Œ๋“œ๋‚˜ ์ค‘์š”ํ•œ ํ†ต๊ณ„๊ฐ€ ์žˆ๋‹ค๋ฉด ๊ฐ•์กฐํ•˜์„ธ์š”"""
1070
-
1071
- def create_supervisor_execution_prompt(self, user_query: str, research_summary: str) -> str:
1072
- """๊ฐ๋…์ž AI์˜ ์‹คํ–‰ ์ง€์‹œ ํ”„๋กฌํ”„ํŠธ"""
1073
- return f"""๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.
1074
-
1075
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1076
-
1077
- ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌํ•œ ์กฐ์‚ฌ ๋‚ด์šฉ:
1078
- {research_summary}
1079
-
1080
- ์œ„ ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์‹คํ–‰์ž AI์—๊ฒŒ ์•„์ฃผ ๊ตฌ์ฒด์ ์ธ ์ง€์‹œ๋ฅผ ๋‚ด๋ ค์ฃผ์„ธ์š”:
1081
- 1. ์กฐ์‚ฌ๋œ ์ •๋ณด๋ฅผ ์–ด๋–ป๊ฒŒ ํ™œ์šฉํ• ์ง€ ๋ช…ํ™•ํžˆ ์ง€์‹œํ•˜์„ธ์š”
1082
- 2. ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ๋‹จ๊ณ„๋ณ„ ์ž‘์—…์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”
1083
- 3. ๊ฐ ๋‹จ๊ณ„์—์„œ ์ฐธ๊ณ ํ•ด์•ผ ํ•  ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋ช…์‹œํ•˜์„ธ์š”
1084
- 4. ์˜ˆ์ƒ๋˜๋Š” ๊ฒฐ๊ณผ๋ฌผ์˜ ํ˜•ํƒœ๋ฅผ ๊ตฌ์ฒด์ ์œผ๋กœ ์„ค๋ช…ํ•˜์„ธ์š”"""
1085
-
1086
- def create_executor_prompt(self, user_query: str, supervisor_guidance: str, research_summary: str) -> str:
1087
- """์‹คํ–‰์ž AI ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
1088
- return f"""๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค.
1089
-
1090
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1091
-
1092
- ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌํ•œ ์กฐ์‚ฌ ๋‚ด์šฉ:
1093
- {research_summary}
1094
-
1095
- ๊ฐ๋…์ž AI์˜ ๊ตฌ์ฒด์ ์ธ ์ง€์‹œ:
1096
- {supervisor_guidance}
1097
-
1098
- ์œ„ ์กฐ์‚ฌ ๋‚ด์šฉ๊ณผ ์ง€์‹œ์‚ฌํ•ญ์„ ๋ฐ”ํƒ•์œผ๋กœ:
1099
- 1. ์กฐ์‚ฌ๋œ ์ •๋ณด๋ฅผ ์ ๊ทน ํ™œ์šฉํ•˜์—ฌ ๊ตฌ์ฒด์ ์ธ ์‹คํ–‰ ๊ณ„ํš์„ ์ž‘์„ฑํ•˜์„ธ์š”
1100
- 2. ๊ฐ ๋‹จ๊ณ„๋ณ„๋กœ ์ฐธ๊ณ ํ•œ ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋ช…์‹œํ•˜์„ธ์š”
1101
- 3. ์‹ค์ œ๋กœ ์ ์šฉ ๊ฐ€๋Šฅํ•œ ๊ตฌ์ฒด์ ์ธ ๋ฐฉ๋ฒ•๋ก ์„ ์ œ์‹œํ•˜์„ธ์š”
1102
- 4. ์˜ˆ์ƒ๋˜๋Š” ์„ฑ๊ณผ์™€ ์ธก์ • ๋ฐฉ๋ฒ•์„ ํฌํ•จํ•˜์„ธ์š”"""
1103
-
1104
- def create_executor_final_prompt(self, user_query: str, initial_response: str, supervisor_feedback: str, research_summary: str) -> str:
1105
- """์‹คํ–‰์ž AI ์ตœ์ข… ๋ณด๊ณ ์„œ ํ”„๋กฌํ”„ํŠธ"""
1106
- return f"""๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค.
1107
-
1108
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1109
-
1110
- ์กฐ์‚ฌ์ž AI์˜ ์กฐ์‚ฌ ๋‚ด์šฉ:
1111
- {research_summary}
1112
-
1113
- ๋‹น์‹ ์˜ ์ดˆ๊ธฐ ๋‹ต๋ณ€:
1114
- {initial_response}
1115
-
1116
- ๊ฐ๋…์ž AI์˜ ํ”ผ๋“œ๋ฐฑ ๋ฐ ๊ฐœ์„ ์‚ฌํ•ญ:
1117
- {supervisor_feedback}
1118
-
1119
- ์œ„ ํ”ผ๋“œ๋ฐฑ์„ ์™„์ „ํžˆ ๋ฐ˜์˜ํ•˜์—ฌ ์ตœ์ข… ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•˜์„ธ์š”:
1120
- 1. ๊ฐ๋…์ž์˜ ๋ชจ๋“  ๊ฐœ์„ ์‚ฌํ•ญ์„ ๋ฐ˜์˜ํ•˜์„ธ์š”
1121
- 2. ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋”์šฑ ๊ตฌ์ฒด์ ์œผ๋กœ ํ™œ์šฉํ•˜์„ธ์š”
1122
- 3. ์‹คํ–‰ ๊ฐ€๋Šฅ์„ฑ์„ ๋†’์ด๋Š” ์„ธ๋ถ€ ๊ณ„ํš์„ ํฌํ•จํ•˜์„ธ์š”
1123
- 4. ๋ช…ํ™•ํ•œ ๊ฒฐ๋ก ๊ณผ ๋‹ค์Œ ๋‹จ๊ณ„๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
1124
- 5. ์ „๋ฌธ์ ์ด๊ณ  ์™„์„ฑ๋„ ๋†’์€ ์ตœ์ข… ๋ณด๊ณ ์„œ ํ˜•์‹์œผ๋กœ ์ž‘์„ฑํ•˜์„ธ์š”"""
1125
 
1126
- def extract_keywords(self, supervisor_response: str) -> List[str]:
1127
- """๊ฐ๋…์ž ์‘๋‹ต์—์„œ ํ‚ค์›Œ๋“œ ์ถ”์ถœ"""
1128
- keywords = []
1129
-
1130
- # [๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ]: ํ˜•์‹์œผ๋กœ ํ‚ค์›Œ๋“œ ์ฐพ๊ธฐ
1131
- keyword_match = re.search(r'\[๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ\]:\s*(.+)', supervisor_response, re.IGNORECASE)
1132
- if keyword_match:
1133
- keyword_str = keyword_match.group(1)
1134
- keywords = [k.strip() for k in keyword_str.split(',') if k.strip()]
 
 
 
1135
 
1136
- # ํ‚ค์›Œ๋“œ๊ฐ€ ์—†์œผ๋ฉด ๊ธฐ๋ณธ ํ‚ค์›Œ๋“œ ์ƒ์„ฑ
1137
- if not keywords:
1138
- keywords = ["best practices", "implementation guide", "case studies", "latest trends", "success factors"]
 
 
 
 
 
 
 
1139
 
1140
- return keywords[:7] # ์ตœ๋Œ€ 7๊ฐœ๋กœ ์ œํ•œ
1141
-
1142
- def brave_search(self, query: str) -> List[Dict]:
1143
- """Brave Search API ํ˜ธ์ถœ"""
1144
- if self.test_mode or self.bapi_token == "YOUR_BRAVE_API_TOKEN":
1145
- # ํ…Œ์ŠคํŠธ ๋ชจ๋“œ์—์„œ๋Š” ์‹œ๋ฎฌ๋ ˆ์ด์…˜๋œ ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜
1146
- return [
1147
- {
1148
- "title": f"Best Practices for {query}",
1149
- "description": f"Comprehensive guide on implementing {query} with proven methodologies and real-world examples.",
1150
- "url": f"https://example.com/{query.replace(' ', '-')}"
1151
- },
1152
- {
1153
- "title": f"Latest Trends in {query}",
1154
- "description": f"Analysis of current trends and future directions in {query}, including market insights and expert opinions.",
1155
- "url": f"https://trends.example.com/{query.replace(' ', '-')}"
1156
- },
1157
- {
1158
- "title": f"{query}: Case Studies and Success Stories",
1159
- "description": f"Real-world implementations of {query} across various industries with measurable results.",
1160
- "url": f"https://casestudies.example.com/{query.replace(' ', '-')}"
1161
- }
1162
- ]
1163
-
1164
- try:
1165
- params = {
1166
- "q": query,
1167
- "count": 5,
1168
- "safesearch": "moderate",
1169
- "freshness": "pw" # Past week for recent results
1170
- }
1171
-
1172
- response = requests.get(
1173
- self.brave_url,
1174
- headers=self.create_brave_headers(),
1175
- params=params,
1176
- timeout=10
1177
- )
1178
-
1179
- if response.status_code == 200:
1180
- data = response.json()
1181
- results = []
1182
- for item in data.get("web", {}).get("results", [])[:5]:
1183
- results.append({
1184
- "title": item.get("title", ""),
1185
- "description": item.get("description", ""),
1186
- "url": item.get("url", "")
1187
- })
1188
- return results
1189
- else:
1190
- logger.error(f"Brave API ์˜ค๋ฅ˜: {response.status_code}")
1191
- return []
1192
-
1193
- except Exception as e:
1194
- logger.error(f"Brave ๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜: {str(e)}")
1195
- return []
1196
-
1197
- def simulate_streaming(self, text: str, role: str) -> Generator[str, None, None]:
1198
- """ํ…Œ์ŠคํŠธ ๋ชจ๋“œ์—์„œ ์ŠคํŠธ๋ฆฌ๋ฐ ์‹œ๋ฎฌ๋ ˆ์ด์…˜"""
1199
- words = text.split()
1200
- for i in range(0, len(words), 3):
1201
- chunk = " ".join(words[i:i+3])
1202
- yield chunk + " "
1203
- time.sleep(0.05)
1204
-
1205
- def call_llm_streaming(self, messages: List[Dict[str, str]], role: str) -> Generator[str, None, None]:
1206
- """์ŠคํŠธ๋ฆฌ๋ฐ LLM API ํ˜ธ์ถœ"""
1207
-
1208
- # ํ…Œ์ŠคํŠธ ๋ชจ๋“œ
1209
- if self.test_mode:
1210
- logger.info(f"ํ…Œ์ŠคํŠธ ๋ชจ๋“œ ์ŠคํŠธ๋ฆฌ๋ฐ - Role: {role}")
1211
- test_responses = {
1212
- "supervisor_initial": """์ด ์งˆ๋ฌธ์— ๋Œ€ํ•œ ๊ฑฐ์‹œ์  ๋ถ„์„์„ ์ œ์‹œํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค.
1213
-
1214
- 1. **ํ•ต์‹ฌ ๊ฐœ๋… ํŒŒ์•…**
1215
- - ์งˆ๋ฌธ์˜ ๋ณธ์งˆ์  ์š”์†Œ๋ฅผ ์‹ฌ์ธต ๋ถ„์„ํ•ฉ๋‹ˆ๋‹ค
1216
- - ๊ด€๋ จ๋œ ์ฃผ์š” ์ด๋ก ๊ณผ ์›์น™์„ ๊ฒ€ํ† ํ•ฉ๋‹ˆ๋‹ค
1217
- - ๋‹ค์–‘ํ•œ ๊ด€์ ์—์„œ์˜ ์ ‘๊ทผ ๋ฐฉ๋ฒ•์„ ๊ณ ๋ คํ•ฉ๋‹ˆ๋‹ค
1218
-
1219
- 2. **์ „๋žต์  ์ ‘๊ทผ ๋ฐฉํ–ฅ**
1220
- - ์ฒด๊ณ„์ ์ด๊ณ  ๋‹จ๊ณ„๋ณ„ ํ•ด๊ฒฐ ๋ฐฉ์•ˆ์„ ์ˆ˜๋ฆฝํ•ฉ๋‹ˆ๋‹ค
1221
- - ์žฅ๋‹จ๊ธฐ ๋ชฉํ‘œ๋ฅผ ๋ช…ํ™•ํžˆ ์„ค์ •ํ•ฉ๋‹ˆ๋‹ค
1222
- - ๋ฆฌ์Šคํฌ ์š”์ธ๊ณผ ๋Œ€์‘ ๋ฐฉ์•ˆ์„ ๋งˆ๋ จํ•ฉ๋‹ˆ๋‹ค
1223
-
1224
- 3. **๊ธฐ๋Œ€ ํšจ๊ณผ์™€ ๊ณผ์ œ**
1225
- - ์˜ˆ์ƒ๋˜๋Š” ๊ธ์ •์  ์„ฑ๊ณผ๋ฅผ ๋ถ„์„ํ•ฉ๋‹ˆ๋‹ค
1226
- - ์ž ์žฌ์  ๋„์ „ ๊ณผ์ œ๋ฅผ ์‹๋ณ„ํ•ฉ๋‹ˆ๋‹ค
1227
- - ์ง€์†๊ฐ€๋Šฅํ•œ ๋ฐœ์ „ ๋ฐฉํ–ฅ์„ ์ œ์‹œํ•ฉ๋‹ˆ๋‹ค
1228
-
1229
- [๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ]: machine learning optimization, performance improvement strategies, model efficiency techniques, hyperparameter tuning best practices, latest ML trends 2024""",
1230
-
1231
- "researcher": """์กฐ์‚ฌ ๊ฒฐ๊ณผ๋ฅผ ์ข…ํ•ฉํ•˜์—ฌ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ •๋ฆฌํ–ˆ์Šต๋‹ˆ๋‹ค.
1232
-
1233
- **1. Machine Learning Optimization**
1234
- - ์ตœ์‹  ์—ฐ๊ตฌ์— ๋”ฐ๋ฅด๋ฉด ๋ชจ๋ธ ์ตœ์ ํ™”์˜ ํ•ต์‹ฌ์€ ์•„ํ‚คํ…์ฒ˜ ์„ค๊ณ„์™€ ํ›ˆ๋ จ ์ „๋žต์˜ ๊ท ํ˜•์ž…๋‹ˆ๋‹ค
1235
- - AutoML ๋„๊ตฌ๋“ค์ด ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ํŠœ๋‹์„ ์ž๋™ํ™”ํ•˜์—ฌ ํšจ์œจ์„ฑ์„ ํฌ๊ฒŒ ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค
1236
- - ์ถœ์ฒ˜: ML Conference 2024, Google Research
1237
-
1238
- **2. Performance Improvement Strategies**
1239
- - ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„ ์ด ๋ชจ๋ธ ์„ฑ๋Šฅ ํ–ฅ์ƒ์˜ 80%๋ฅผ ์ฐจ์ง€ํ•œ๋‹ค๋Š” ์—ฐ๊ตฌ ๊ฒฐ๊ณผ
1240
- - ์•™์ƒ๋ธ” ๊ธฐ๋ฒ•๊ณผ ์ „์ดํ•™์Šต์ด ์ฃผ์š” ์„ฑ๋Šฅ ๊ฐœ์„  ๋ฐฉ๋ฒ•์œผ๋กœ ์ž…์ฆ๋จ
1241
- - ๋ฒค์น˜๋งˆํฌ: ImageNet์—์„œ 95% ์ด์ƒ์˜ ์ •ํ™•๋„ ๋‹ฌ์„ฑ ์‚ฌ๋ก€
1242
-
1243
- **3. Model Efficiency Techniques**
1244
- - ๋ชจ๋ธ ๊ฒฝ๋Ÿ‰ํ™”(Pruning, Quantization)๋กœ ์ถ”๋ก  ์†๋„ 10๋ฐฐ ํ–ฅ์ƒ ๊ฐ€๋Šฅ
1245
- - Knowledge Distillation์œผ๋กœ ๋ชจ๋ธ ํฌ๊ธฐ 90% ๊ฐ์†Œ, ์„ฑ๋Šฅ ์œ ์ง€
1246
- - ์ตœ์‹  ํŠธ๋ Œ๋“œ: Efficient Transformers, Neural Architecture Search
1247
-
1248
- **4. ์‹ค์ œ ์ ์šฉ ์‚ฌ๋ก€**
1249
- - Netflix: ์ถ”์ฒœ ์‹œ์Šคํ…œ ๊ฐœ์„ ์œผ๋กœ ์‚ฌ์šฉ์ž ๋งŒ์กฑ๋„ 35% ํ–ฅ์ƒ
1250
- - Tesla: ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ์ธ์‹ ์†๋„ 50% ๊ฐœ์„ 
1251
- - OpenAI: GPT ๋ชจ๋ธ ํšจ์œจ์„ฑ ๊ฐœ์„ ์œผ๋กœ ๋น„์šฉ 70% ์ ˆ๊ฐ""",
1252
-
1253
- "supervisor_execution": """์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋ฐ”ํƒ•์œผ๋กœ ์‹คํ–‰์ž AI์—๊ฒŒ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๊ตฌ์ฒด์ ์œผ๋กœ ์ง€์‹œํ•ฉ๋‹ˆ๋‹ค.
1254
-
1255
- **1๋‹จ๊ณ„: ํ˜„์žฌ ๋ชจ๋ธ ์ง„๋‹จ (1์ฃผ์ฐจ)**
1256
- - ์กฐ์‚ฌ๋œ ๋ฒค์น˜๋งˆํฌ ๊ธฐ์ค€์œผ๋กœ ํ˜„์žฌ ๋ชจ๋ธ ์„ฑ๋Šฅ ํ‰๊ฐ€
1257
- - Netflix ์‚ฌ๋ก€๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ์ฃผ์š” ๋ณ‘๋ชฉ ์ง€์  ์‹๋ณ„
1258
- - AutoML ๋„๊ตฌ๋ฅผ ํ™œ์šฉํ•œ ์ดˆ๊ธฐ ์ตœ์ ํ™” ๊ฐ€๋Šฅ์„ฑ ํƒ์ƒ‰
1259
-
1260
- **2๋‹จ๊ณ„: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„  (2-3์ฃผ์ฐจ)**
1261
- - ์กฐ์‚ฌ ๊ฒฐ๊ณผ์˜ "80% ๊ทœ์น™"์— ๋”ฐ๋ผ ๋ฐ์ดํ„ฐ ์ •์ œ ์šฐ์„  ์‹คํ–‰
1262
- - ๋ฐ์ดํ„ฐ ์ฆ๊ฐ• ๊ธฐ๋ฒ• ์ ์šฉ (์กฐ์‚ฌ๋œ ์ตœ์‹  ๊ธฐ๋ฒ• ํ™œ์šฉ)
1263
- - A/B ํ…Œ์ŠคํŠธ๋กœ ๊ฐœ์„  ํšจ๊ณผ ์ธก์ •
1264
-
1265
- **3๋‹จ๊ณ„: ๋ชจ๋ธ ์ตœ์ ํ™” ๊ตฌํ˜„ (4-6์ฃผ์ฐจ)**
1266
- - Knowledge Distillation ์ ์šฉํ•˜์—ฌ ๋ชจ๋ธ ๊ฒฝ๋Ÿ‰ํ™”
1267
- - ์กฐ์‚ฌ๋œ Pruning ๊ธฐ๋ฒ•์œผ๋กœ ์ถ”๋ก  ์†๋„ ๊ฐœ์„ 
1268
- - Tesla ์‚ฌ๋ก€์˜ ์‹ค์‹œ๊ฐ„ ์ฒ˜๋ฆฌ ์ตœ์ ํ™” ๊ธฐ๋ฒ• ๋ฒค์น˜๋งˆํ‚น
1269
-
1270
- **4๋‹จ๊ณ„: ์„ฑ๊ณผ ๊ฒ€์ฆ ๋ฐ ๋ฐฐํฌ (7-8์ฃผ์ฐจ)**
1271
- - OpenAI ์‚ฌ๋ก€์˜ ๋น„์šฉ ์ ˆ๊ฐ ์ง€ํ‘œ ์ ์šฉ
1272
- - ์กฐ์‚ฌ๋œ ์„ฑ๋Šฅ ์ง€ํ‘œ๋กœ ๊ฐœ์„ ์œจ ์ธก์ •
1273
- - ๋‹จ๊ณ„์  ๋ฐฐํฌ ์ „๋žต ์ˆ˜๋ฆฝ""",
1274
-
1275
- "executor": """๊ฐ๋…์ž์˜ ์ง€์‹œ์™€ ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ๊ตฌ์ฒด์ ์ธ ์‹คํ–‰ ๊ณ„ํš์„ ์ˆ˜๋ฆฝํ•ฉ๋‹ˆ๋‹ค.
1276
-
1277
- **1๋‹จ๊ณ„: ํ˜„์žฌ ๋ชจ๋ธ ์ง„๋‹จ (1์ฃผ์ฐจ)**
1278
- - ์›”์š”์ผ-ํ™”์š”์ผ: MLflow๋ฅผ ์‚ฌ์šฉํ•œ ํ˜„์žฌ ๋ชจ๋ธ ๋ฉ”ํŠธ๋ฆญ ์ˆ˜์ง‘
1279
- * ์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ฐธ๊ณ : Netflix๊ฐ€ ์‚ฌ์šฉํ•œ ํ•ต์‹ฌ ์ง€ํ‘œ (์ •ํ™•๋„, ์ง€์—ฐ์‹œ๊ฐ„, ์ฒ˜๋ฆฌ๋Ÿ‰)
1280
- - ์ˆ˜์š”์ผ-๋ชฉ์š”์ผ: AutoML ๋„๊ตฌ (Optuna, Ray Tune) ์„ค์ • ๋ฐ ์ดˆ๊ธฐ ์‹คํ–‰
1281
- * ์กฐ์‚ฌ๋œ best practice์— ๋”ฐ๋ผ search space ์ •์˜
1282
- - ๊ธˆ์š”์ผ: ์ง„๋‹จ ๋ณด๊ณ ์„œ ์ž‘์„ฑ ๋ฐ ๊ฐœ์„  ์šฐ์„ ์ˆœ์œ„ ๊ฒฐ์ •
1283
-
1284
- **2๋‹จ๊ณ„: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„  (2-3์ฃผ์ฐจ)**
1285
- - ๋ฐ์ดํ„ฐ ์ •์ œ ํŒŒ์ดํ”„๋ผ์ธ ๊ตฌ์ถ•
1286
- * ์กฐ์‚ฌ ๊ฒฐ๊ณผ์˜ "80% ๊ทœ์น™" ์ ์šฉ: ๋ˆ„๋ฝ๊ฐ’, ์ด์ƒ์น˜, ๋ ˆ์ด๋ธ” ์˜ค๋ฅ˜ ์ฒ˜๋ฆฌ
1287
- * ์ฝ”๋“œ ์˜ˆ์‹œ: `data_quality_pipeline.py` ๊ตฌํ˜„
1288
- - ๋ฐ์ดํ„ฐ ์ฆ๊ฐ• ๊ตฌํ˜„
1289
- * ์ตœ์‹  ๊ธฐ๋ฒ• ์ ์šฉ: MixUp, CutMix, AutoAugment
1290
- * ๊ฒ€์ฆ ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ํšจ๊ณผ ์ธก์ • (๋ชฉํ‘œ: 15% ์„ฑ๋Šฅ ํ–ฅ์ƒ)
1291
-
1292
- **3๋‹จ๊ณ„: ๋ชจ๋ธ ์ตœ์ ํ™” ๊ตฌํ˜„ (4-6์ฃผ์ฐจ)**
1293
- - Knowledge Distillation ๊ตฌํ˜„
1294
- * Teacher ๋ชจ๋ธ: ํ˜„์žฌ ๋Œ€๊ทœ๋ชจ ๋ชจ๋ธ
1295
- * Student ๋ชจ๋ธ: 90% ์ž‘์€ ํฌ๊ธฐ ๋ชฉํ‘œ (์กฐ์‚ฌ ๊ฒฐ๊ณผ ๊ธฐ๋ฐ˜)
1296
- * ๊ตฌํ˜„ ํ”„๋ ˆ์ž„์›Œํฌ: PyTorch/TensorFlow
1297
- - Pruning ๋ฐ Quantization ์ ์šฉ
1298
- * ๊ตฌ์กฐ์  pruning์œผ๋กœ 50% ํŒŒ๋ผ๋ฏธํ„ฐ ์ œ๊ฑฐ
1299
- * INT8 quantization์œผ๋กœ ์ถ”๊ฐ€ 4๋ฐฐ ์†๋„ ํ–ฅ์ƒ
1300
- * Tesla ์‚ฌ๋ก€ ์ฐธ๊ณ : TensorRT ์ตœ์ ํ™” ์ ์šฉ
1301
-
1302
- **4๋‹จ๊ณ„: ์„ฑ๊ณผ ๊ฒ€์ฆ ๋ฐ ๋ฐฐํฌ (7-8์ฃผ์ฐจ)**
1303
- - ์„ฑ๊ณผ ์ง€ํ‘œ ์ธก์ •
1304
- * ์ถ”๋ก  ์†๋„: ๋ชฉํ‘œ 10๋ฐฐ ํ–ฅ์ƒ (์กฐ์‚ฌ ๊ฒฐ๊ณผ ๊ธฐ๋ฐ˜)
1305
- * ์ •ํ™•๋„ ์†์‹ค: ์ตœ๋Œ€ 2% ์ด๋‚ด ์œ ์ง€
1306
- * ๋น„์šฉ ์ ˆ๊ฐ: 70% ๋ชฉํ‘œ (OpenAI ์‚ฌ๋ก€ ์ฐธ๊ณ )
1307
- - ๋ฐฐํฌ ์ „๋žต
1308
- * A/B ํ…Œ์ŠคํŠธ: 10% ํŠธ๋ž˜ํ”ฝ์œผ๋กœ ์‹œ์ž‘
1309
- * ๋ชจ๋‹ˆํ„ฐ๋ง: Prometheus + Grafana ๋Œ€์‹œ๋ณด๋“œ
1310
- * ๋กค๋ฐฑ ๊ณ„ํš: ์„ฑ๋Šฅ ์ €ํ•˜ ์‹œ ์ž๋™ ๋กค๋ฐฑ
1311
-
1312
- **์˜ˆ์ƒ ๊ฒฐ๊ณผ๋ฌผ**
1313
- - ์ตœ์ ํ™”๋œ ๋ชจ๋ธ (ํฌ๊ธฐ 90% ๊ฐ์†Œ, ์†๋„ 10๋ฐฐ ํ–ฅ์ƒ)
1314
- - ์ƒ์„ธ ์„ฑ๋Šฅ ๋ฒค์น˜๋งˆํฌ ๋ณด๊ณ ์„œ
1315
- - ํ”„๋กœ๋•์…˜ ๋ฐฐํฌ ๊ฐ€์ด๋“œ ๋ฐ ๋ชจ๋‹ˆํ„ฐ๋ง ๋Œ€์‹œ๋ณด๋“œ
1316
- - ์žฌํ˜„ ๊ฐ€๋Šฅํ•œ ์ตœ์ ํ™” ํŒŒ์ดํ”„๋ผ์ธ ์ฝ”๋“œ""",
1317
-
1318
- "supervisor_review": """์‹คํ–‰์ž AI์˜ ๊ณ„ํš์„ ๊ฒ€ํ† ํ•œ ๊ฒฐ๊ณผ, ์กฐ์‚ฌ ๋‚ด์šฉ์ด ์ž˜ ๋ฐ˜์˜๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ฐœ์„ ์‚ฌํ•ญ์„ ์ œ์•ˆํ•ฉ๋‹ˆ๋‹ค.
1319
-
1320
- **๊ฐ•์ **
1321
- - ์กฐ์‚ฌ๋œ ์‚ฌ๋ก€๋“ค(Netflix, Tesla, OpenAI)์ด ๊ฐ ๋‹จ๊ณ„์— ์ ์ ˆํžˆ ํ™œ์šฉ๋จ
1322
- - ๊ตฌ์ฒด์ ์ธ ๋„๊ตฌ์™€ ๊ธฐ๋ฒ•์ด ๋ช…์‹œ๋˜์–ด ์‹คํ–‰ ๊ฐ€๋Šฅ์„ฑ์ด ๋†’์Œ
1323
- - ์ธก์ • ๊ฐ€๋Šฅํ•œ ๋ชฉํ‘œ๊ฐ€ ์กฐ์‚ฌ ๊ฒฐ๊ณผ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์„ค์ •๋จ
1324
-
1325
- **๊ฐœ์„  ํ•„์š”์‚ฌํ•ญ**
1326
- 1. **๋ฆฌ์Šคํฌ ๊ด€๋ฆฌ ๊ฐ•ํ™”**
1327
- - ๊ฐ ๋‹จ๊ณ„๋ณ„ ์‹คํŒจ ์‹œ๋‚˜๋ฆฌ์˜ค์™€ ๋Œ€์‘ ๋ฐฉ์•ˆ ์ถ”๊ฐ€ ํ•„์š”
1328
- - ๊ธฐ์ˆ ์  ๋ฌธ์ œ ๋ฐœ์ƒ ์‹œ ๋ฐฑ์—… ๊ณ„ํš ์ˆ˜๋ฆฝ
1329
-
1330
- 2. **๋น„์šฉ ๋ถ„์„ ๊ตฌ์ฒดํ™”**
1331
- - OpenAI ์‚ฌ๋ก€์˜ 70% ์ ˆ๊ฐ์„ ์œ„ํ•œ ๊ตฌ์ฒด์ ์ธ ๋น„์šฉ ๊ณ„์‚ฐ
1332
- - ROI ๋ถ„์„ ๋ฐ ํˆฌ์ž ๋Œ€๋น„ ํšจ๊ณผ ์ธก์ • ๋ฐฉ๋ฒ•
1333
-
1334
- 3. **ํŒ€ ํ˜‘์—… ์ฒด๊ณ„ํ™”**
1335
- - ๋ฐ์ดํ„ฐ ๊ณผํ•™์ž, ML ์—”์ง€๋‹ˆ์–ด, DevOps ๊ฐ„ ์—ญํ•  ๋ถ„๋‹ด ๋ช…ํ™•ํ™”
1336
- - ์ฃผ๊ฐ„ ์ง„ํ–‰ ์ƒํ™ฉ ๊ณต์œ  ๋ฐ ์ด์Šˆ ํŠธ๋ž˜ํ‚น ํ”„๋กœ์„ธ์Šค
1337
-
1338
- **์ถ”๊ฐ€ ๊ถŒ์žฅ์‚ฌํ•ญ**
1339
- - ์ตœ์‹  ์—ฐ๊ตฌ ๋™ํ–ฅ ๋ชจ๋‹ˆํ„ฐ๋ง ์ฒด๊ณ„ ๊ตฌ์ถ•
1340
- - ๊ฒฝ์Ÿ์‚ฌ ๋ฒค์น˜๋งˆํ‚น์„ ์œ„ํ•œ ์ •๊ธฐ์ ์ธ ์กฐ์‚ฌ ํ”„๋กœ์„ธ์Šค
1341
- - ๋‚ด๋ถ€ ์ง€์‹ ๊ณต์œ ๋ฅผ ์œ„ํ•œ ๋ฌธ์„œํ™” ๋ฐ ์„ธ๋ฏธ๋‚˜ ๊ณ„ํš
1342
- - ์‹คํŒจ ์‚ฌ๋ก€์—์„œ ๋ฐฐ์šด ๊ตํ›ˆ์„ ์ถ•์ ํ•˜๋Š” ์‹œ์Šคํ…œ ๊ตฌ์ถ•""",
1343
-
1344
- "executor_final": """๊ฐ๋…์ž AI์˜ ํ”ผ๋“œ๋ฐฑ์„ ์™„์ „ํžˆ ๋ฐ˜์˜ํ•˜์—ฌ ์ตœ์ข… ์‹คํ–‰ ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•ฉ๋‹ˆ๋‹ค.
1345
-
1346
- # ๐ŸŽฏ ๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ ์„ฑ๋Šฅ ํ–ฅ์ƒ ์ตœ์ข… ์‹คํ–‰ ๋ณด๊ณ ์„œ
1347
-
1348
- ## ๐Ÿ“‹ Executive Summary
1349
- ๋ณธ ๋ณด๊ณ ์„œ๋Š” ์›น ๊ฒ€์ƒ‰์„ ํ†ตํ•ด ์ˆ˜์ง‘๋œ ์ตœ์‹  ์‚ฌ๋ก€์™€ ๊ฐ๋…์ž AI์˜ ์ „๋žต์  ์ง€์นจ์„ ๋ฐ”ํƒ•์œผ๋กœ, 8์ฃผ๊ฐ„์˜ ์ฒด๊ณ„์ ์ธ ๋ชจ๋ธ ์ตœ์ ํ™” ํ”„๋กœ์ ํŠธ๋ฅผ ์ œ์‹œํ•ฉ๋‹ˆ๋‹ค. ๋ชฉํ‘œ๋Š” ๋ชจ๋ธ ํฌ๊ธฐ 90% ๊ฐ์†Œ, ์ถ”๋ก  ์†๋„ 10๋ฐฐ ํ–ฅ์ƒ, ์šด์˜ ๋น„์šฉ 70% ์ ˆ๊ฐ์ž…๋‹ˆ๋‹ค.
1350
-
1351
- ## ๐Ÿ“Š 1๋‹จ๊ณ„: ํ˜„์žฌ ๋ชจ๋ธ ์ง„๋‹จ ๋ฐ ๋ฒ ์ด์Šค๋ผ์ธ ์„ค์ • (1์ฃผ์ฐจ)
1352
-
1353
- ### ์‹คํ–‰ ๊ณ„ํš
1354
- **์›”-ํ™”์š”์ผ: ์„ฑ๋Šฅ ๋ฉ”ํŠธ๋ฆญ ์ˆ˜์ง‘**
1355
- - MLflow๋ฅผ ํ†ตํ•œ ํ˜„์žฌ ๋ชจ๋ธ ์ „์ฒด ๋ถ„์„
1356
- - Netflix ์‚ฌ๋ก€ ๊ธฐ๋ฐ˜ ํ•ต์‹ฌ ์ง€ํ‘œ: ์ •ํ™•๋„(92%), ์ง€์—ฐ์‹œ๊ฐ„(45ms), ์ฒ˜๋ฆฌ๋Ÿ‰(1,000 req/s)
1357
- - ๋ฆฌ์†Œ์Šค ์‚ฌ์šฉ๋Ÿ‰: GPU ๋ฉ”๋ชจ๋ฆฌ 8GB, ์ถ”๋ก  ์‹œ CPU ์‚ฌ์šฉ๋ฅ  85%
1358
-
1359
- **์ˆ˜-๋ชฉ์š”์ผ: AutoML ์ดˆ๊ธฐ ํƒ์ƒ‰**
1360
- - Optuna๋กœ ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ์ตœ์ ํ™” (200ํšŒ ์‹œ๋„)
1361
- - Ray Tune์œผ๋กœ ๋ถ„์‚ฐ ํ•™์Šต ํ™˜๊ฒฝ ๊ตฌ์ถ•
1362
- - ์ดˆ๊ธฐ ๊ฐœ์„  ๊ฐ€๋Šฅ์„ฑ: 15-20% ์„ฑ๋Šฅ ํ–ฅ์ƒ ์˜ˆ์ƒ
1363
-
1364
- **๊ธˆ์š”์ผ: ์ง„๋‹จ ๋ณด๊ณ ์„œ ๋ฐ ๋ฆฌ์Šคํฌ ๋ถ„์„**
1365
- - ์ฃผ์š” ๋ณ‘๋ชฉ: ๋ชจ๋ธ ํฌ๊ธฐ(2.5GB), ๋ฐฐ์น˜ ์ฒ˜๋ฆฌ ๋น„ํšจ์œจ์„ฑ
1366
- - ๋ฆฌ์Šคํฌ: ๋ฐ์ดํ„ฐ ๋“œ๋ฆฌํ”„ํŠธ, ํ•˜๋“œ์›จ์–ด ์ œ์•ฝ
1367
- - ๋ฐฑ์—… ๊ณ„ํš: ํด๋ผ์šฐ๋“œ GPU ์ธ์Šคํ„ด์Šค ํ™•๋ณด
1368
-
1369
- ### ์˜ˆ์ƒ ์‚ฐ์ถœ๋ฌผ
1370
- - ์ƒ์„ธ ์„ฑ๋Šฅ ๋ฒ ์ด์Šค๋ผ์ธ ๋ฌธ์„œ
1371
- - ๊ฐœ์„  ๊ธฐํšŒ ์šฐ์„ ์ˆœ์œ„ ๋งคํŠธ๋ฆญ์Šค
1372
- - ๋ฆฌ์Šคํฌ ๋ ˆ์ง€์Šคํ„ฐ ๋ฐ ๋Œ€์‘ ๊ณ„ํš
1373
-
1374
- ## ๐Ÿ“Š 2๋‹จ๊ณ„: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„  (2-3์ฃผ์ฐจ)
1375
-
1376
- ### ์‹คํ–‰ ๊ณ„ํš
1377
- **2์ฃผ์ฐจ: ๋ฐ์ดํ„ฐ ์ •์ œ ํŒŒ์ดํ”„๋ผ์ธ**
1378
- ```python
1379
- # data_quality_pipeline.py ์ฃผ์š” ๊ตฌ์„ฑ
1380
- class DataQualityPipeline:
1381
- def __init__(self):
1382
- self.validators = [
1383
- MissingValueHandler(threshold=0.05),
1384
- OutlierDetector(method='isolation_forest'),
1385
- LabelConsistencyChecker(),
1386
- DataDriftMonitor()
1387
- ]
1388
-
1389
- def process(self, data):
1390
- # 80% ๊ทœ์น™ ์ ์šฉ: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ์ด ์„ฑ๋Šฅ์˜ 80% ๊ฒฐ์ •
1391
- for validator in self.validators:
1392
- data = validator.transform(data)
1393
- self.log_metrics(validator.get_stats())
1394
- return data
1395
- ```
1396
-
1397
- **3์ฃผ์ฐจ: ๊ณ ๊ธ‰ ๋ฐ์ดํ„ฐ ์ฆ๊ฐ•**
1398
- - MixUp: 15% ์ •ํ™•๋„ ํ–ฅ์ƒ ์˜ˆ์ƒ
1399
- - CutMix: ๊ฒฝ๊ณ„ ๊ฒ€์ถœ ์„ฑ๋Šฅ 20% ๊ฐœ์„ 
1400
- - AutoAugment: ์ž๋™ ์ตœ์  ์ฆ๊ฐ• ์ •์ฑ… ํƒ์ƒ‰
1401
- - A/B ํ…Œ์ŠคํŠธ: ๊ฐ ๊ธฐ๋ฒ•๋ณ„ ํšจ๊ณผ ์ธก์ •
1402
-
1403
- ### ๋ฆฌ์Šคํฌ ๋Œ€์‘
1404
- - ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ์ €ํ•˜ ์‹œ: ๋กค๋ฐฑ ๋ฉ”์ปค๋‹ˆ์ฆ˜ ๊ตฌํ˜„
1405
- - ์ฆ๊ฐ• ๊ณผ์ ํ•ฉ ๋ฐฉ์ง€: ๊ฒ€์ฆ์…‹ ๋ถ„๋ฆฌ ๋ฐ ๊ต์ฐจ ๊ฒ€์ฆ
1406
-
1407
- ### ์˜ˆ์ƒ ์‚ฐ์ถœ๋ฌผ
1408
- - ์ž๋™ํ™”๋œ ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ํŒŒ์ดํ”„๋ผ์ธ
1409
- - ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๋Œ€์‹œ๋ณด๋“œ (Grafana)
1410
- - 15% ์ด์ƒ ์„ฑ๋Šฅ ํ–ฅ์ƒ ๊ฒ€์ฆ ๋ณด๊ณ ์„œ
1411
-
1412
- ## ๐Ÿ“Š 3๋‹จ๊ณ„: ๋ชจ๋ธ ์ตœ์ ํ™” ๊ตฌํ˜„ (4-6์ฃผ์ฐจ)
1413
-
1414
- ### ์‹คํ–‰ ๊ณ„ํš
1415
- **4-5์ฃผ์ฐจ: Knowledge Distillation**
1416
- - Teacher ๋ชจ๋ธ: ํ˜„์žฌ 2.5GB ๋ชจ๋ธ
1417
- - Student ๋ชจ๋ธ ์•„ํ‚คํ…์ฒ˜:
1418
- * ํŒŒ๋ผ๋ฏธํ„ฐ ์ˆ˜: 250M โ†’ 25M (90% ๊ฐ์†Œ)
1419
- * ๋ ˆ์ด์–ด ์ˆ˜: 24 โ†’ 6
1420
- * Hidden dimension: 1024 โ†’ 256
1421
- - ํ›ˆ๋ จ ์ „๋žต:
1422
- * Temperature: 5.0
1423
- * Alpha (KD loss weight): 0.7
1424
- * ํ›ˆ๋ จ ์—ํญ: 50
1425
-
1426
- **6์ฃผ์ฐจ: Pruning & Quantization**
1427
- - ๊ตฌ์กฐ์  Pruning:
1428
- * Magnitude ๊ธฐ๋ฐ˜ 50% ์ฑ„๋„ ์ œ๊ฑฐ
1429
- * Fine-tuning: 10 ์—ํญ
1430
- - INT8 Quantization:
1431
- * Post-training quantization
1432
- * Calibration dataset: 1,000 ์ƒ˜ํ”Œ
1433
- - TensorRT ์ตœ์ ํ™” (Tesla ์‚ฌ๋ก€ ์ ์šฉ):
1434
- * FP16 ์ถ”๋ก  ํ™œ์„ฑํ™”
1435
- * ๋™์  ๋ฐฐ์น˜ ์ตœ์ ํ™”
1436
-
1437
- ### ํŒ€ ํ˜‘์—… ์ฒด๊ณ„
1438
- - ML ์—”์ง€๋‹ˆ์–ด: ๋ชจ๋ธ ์•„ํ‚คํ…์ฒ˜ ๋ฐ ํ›ˆ๋ จ
1439
- - DevOps: ์ธํ”„๋ผ ๋ฐ ๋ฐฐํฌ ํŒŒ์ดํ”„๋ผ์ธ
1440
- - ๋ฐ์ดํ„ฐ ๊ณผํ•™์ž: ์„ฑ๋Šฅ ๋ถ„์„ ๋ฐ ๊ฒ€์ฆ
1441
- - ์ฃผ๊ฐ„ ์Šคํƒ ๋“œ์—… ๋ฏธํŒ… ๋ฐ Jira ์ด์Šˆ ํŠธ๋ž˜ํ‚น
1442
-
1443
- ### ์˜ˆ์ƒ ์‚ฐ์ถœ๋ฌผ
1444
- - ์ตœ์ ํ™”๋œ ๋ชจ๋ธ ์ฒดํฌํฌ์ธํŠธ
1445
- - ์„ฑ๋Šฅ ๋ฒค์น˜๋งˆํฌ ์ƒ์„ธ ๋ณด๊ณ ์„œ
1446
- - ๋ชจ๋ธ ๋ณ€ํ™˜ ์ž๋™ํ™” ์Šคํฌ๋ฆฝํŠธ
1447
-
1448
- ## ๐Ÿ“Š 4๋‹จ๊ณ„: ์„ฑ๊ณผ ๊ฒ€์ฆ ๋ฐ ํ”„๋กœ๋•์…˜ ๋ฐฐํฌ (7-8์ฃผ์ฐจ)
1449
-
1450
- ### ์‹คํ–‰ ๊ณ„ํš
1451
- **7์ฃผ์ฐจ: ์ข…ํ•ฉ ์„ฑ๋Šฅ ๊ฒ€์ฆ**
1452
- - ์„ฑ๋Šฅ ์ง€ํ‘œ ๋‹ฌ์„ฑ๋„:
1453
- * ์ถ”๋ก  ์†๋„: 45ms โ†’ 4.5ms (10๋ฐฐ ํ–ฅ์ƒ) โœ“
1454
- * ๋ชจ๋ธ ํฌ๊ธฐ: 2.5GB โ†’ 250MB (90% ๊ฐ์†Œ) โœ“
1455
- * ์ •ํ™•๋„ ์†์‹ค: 92% โ†’ 90.5% (1.5% ์†์‹ค) โœ“
1456
- - ๋น„์šฉ ๋ถ„์„:
1457
- * GPU ์ธ์Šคํ„ด์Šค: $2,000/์›” โ†’ $600/์›”
1458
- * ์ฒ˜๋ฆฌ๋Ÿ‰ ์ฆ๊ฐ€๋กœ ์ธํ•œ ์„œ๋ฒ„ ์ˆ˜ ๊ฐ์†Œ: 10๋Œ€ โ†’ 3๋Œ€
1459
- * ์ด ๋น„์šฉ ์ ˆ๊ฐ: 70% ๋‹ฌ์„ฑ โœ“
1460
-
1461
- **8์ฃผ์ฐจ: ๋‹จ๊ณ„์  ๋ฐฐํฌ**
1462
- - Canary ๋ฐฐํฌ:
1463
- * 1์ผ์ฐจ: 1% ํŠธ๋ž˜ํ”ฝ
1464
- * 3์ผ์ฐจ: 10% ํŠธ๋ž˜ํ”ฝ
1465
- * 7์ผ์ฐจ: 50% ํŠธ๋ž˜ํ”ฝ
1466
- * 14์ผ์ฐจ: 100% ์ „ํ™˜
1467
- - ๋ชจ๋‹ˆํ„ฐ๋ง ์„ค์ •:
1468
- * Prometheus + Grafana ๋Œ€์‹œ๋ณด๋“œ
1469
- * ์•Œ๋ฆผ ์ž„๊ณ„๊ฐ’: ์ง€์—ฐ์‹œ๊ฐ„ >10ms, ์˜ค๋ฅ˜์œจ >0.1%
1470
- - ๋กค๋ฐฑ ๊ณ„ํš:
1471
- * ์ž๋™ ๋กค๋ฐฑ ํŠธ๋ฆฌ๊ฑฐ ์„ค์ •
1472
- * Blue-Green ๋ฐฐํฌ๋กœ ์ฆ‰์‹œ ์ „ํ™˜ ๊ฐ€๋Šฅ
1473
-
1474
- ### ROI ๋ถ„์„
1475
- - ์ดˆ๊ธฐ ํˆฌ์ž: $50,000 (์ธ๊ฑด๋น„ + ์ธํ”„๋ผ)
1476
- - ์›”๊ฐ„ ์ ˆ๊ฐ์•ก: $14,000
1477
- - ํˆฌ์ž ํšŒ์ˆ˜ ๊ธฐ๊ฐ„: 3.6๊ฐœ์›”
1478
- - 1๋…„ ์ˆœ์ด์ต: $118,000
1479
-
1480
- ### ์˜ˆ์ƒ ์‚ฐ์ถœ๋ฌผ
1481
- - ํ”„๋กœ๋•์…˜ ๋ฐฐํฌ ์™„๋ฃŒ
1482
- - ์‹ค์‹œ๊ฐ„ ๋ชจ๋‹ˆํ„ฐ๋ง ๋Œ€์‹œ๋ณด๋“œ
1483
- - ROI ๋ถ„์„ ๋ณด๊ณ ์„œ
1484
- - ์šด์˜ ๊ฐ€์ด๋“œ ๋ฌธ์„œ
1485
-
1486
- ## ๐Ÿ“ˆ ์ง€์†์  ๊ฐœ์„  ๊ณ„ํš
1487
-
1488
- ### ๋ชจ๋‹ˆํ„ฐ๋ง ๋ฐ ์œ ์ง€๋ณด์ˆ˜
1489
- - ์›”๊ฐ„ ์„ฑ๋Šฅ ๋ฆฌ๋ทฐ ๋ฏธํŒ…
1490
- - ๋ถ„๊ธฐ๋ณ„ ์žฌํ›ˆ๋ จ ๊ณ„ํš
1491
- - ์‹ ๊ธฐ์ˆ  ๋„์ž… ๊ฒ€ํ†  (Sparse Models, MoE)
1492
-
1493
- ### ์ง€์‹ ๊ณต์œ 
1494
- - ๋‚ด๋ถ€ ๊ธฐ์ˆ  ์„ธ๋ฏธ๋‚˜ (์›” 1ํšŒ)
1495
- - ์™ธ๋ถ€ ์ปจํผ๋Ÿฐ์Šค ๋ฐœํ‘œ ์ค€๋น„
1496
- - ์˜คํ”ˆ์†Œ์Šค ๊ธฐ์—ฌ ๊ณ„ํš
1497
-
1498
- ### ์ฐจ๊ธฐ ํ”„๋กœ์ ํŠธ
1499
- - ์—ฃ์ง€ ๋””๋ฐ”์ด์Šค ๋ฐฐํฌ ์ตœ์ ํ™”
1500
- - ์—ฐํ•ฉ ํ•™์Šต(Federated Learning) ๋„์ž…
1501
- - AutoML ํ”Œ๋žซํผ ๊ตฌ์ถ•
1502
-
1503
- ## ๐Ÿ“ ๊ฒฐ๋ก 
1504
- ๋ณธ ํ”„๋กœ์ ํŠธ๋Š” ์ตœ์‹  ์—ฐ๊ตฌ ๊ฒฐ๊ณผ์™€ ์—…๊ณ„ ๋ฒ ์ŠคํŠธ ํ”„๋ž™ํ‹ฐ์Šค๋ฅผ ์ ์šฉํ•˜์—ฌ, 8์ฃผ ๋งŒ์— ๋ชจ๋ธ ์„ฑ๋Šฅ์„ ํš๊ธฐ์ ์œผ๋กœ ๊ฐœ์„ ํ•˜๊ณ  ์šด์˜ ๋น„์šฉ์„ 70% ์ ˆ๊ฐํ•˜๋Š” ์„ฑ๊ณผ๋ฅผ ๋‹ฌ์„ฑํ•  ๊ฒƒ์œผ๋กœ ์˜ˆ์ƒ๋ฉ๋‹ˆ๋‹ค. ์ฒด๊ณ„์ ์ธ ์ ‘๊ทผ๊ณผ ๋ฆฌ์Šคํฌ ๊ด€๋ฆฌ, ๊ทธ๋ฆฌ๊ณ  ์ง€์†์ ์ธ ๊ฐœ์„  ๊ณ„ํš์„ ํ†ตํ•ด ์žฅ๊ธฐ์ ์ธ ๊ฒฝ์Ÿ๋ ฅ์„ ํ™•๋ณดํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
1505
-
1506
- ---
1507
- *์ž‘์„ฑ์ผ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}*
1508
- *์ž‘์„ฑ์ž: ํ˜‘๋ ฅ์  AI ์‹œ์Šคํ…œ (๊ฐ๋…์ž, ์กฐ์‚ฌ์ž, ์‹คํ–‰์ž AI)*"""
1509
- }
1510
-
1511
- # ํ”„๋กฌํ”„ํŠธ ๋‚ด์šฉ์— ๋”ฐ๋ผ ์ ์ ˆํ•œ ์‘๋‹ต ์„ ํƒ
1512
- if role == "supervisor" and "์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌํ•œ" in messages[0]["content"]:
1513
- response = test_responses["supervisor_execution"]
1514
- elif role == "supervisor" and messages[0]["content"].find("์‹คํ–‰์ž AI์˜ ๋‹ต๋ณ€") > -1:
1515
- response = test_responses["supervisor_review"]
1516
- elif role == "supervisor":
1517
- response = test_responses["supervisor_initial"]
1518
- elif role == "researcher":
1519
- response = test_responses["researcher"]
1520
- elif role == "executor" and "์ตœ์ข… ๋ณด๊ณ ์„œ" in messages[0]["content"]:
1521
- response = test_responses["executor_final"]
1522
- else:
1523
- response = test_responses["executor"]
1524
-
1525
- yield from self.simulate_streaming(response, role)
1526
- return
1527
-
1528
- # ์‹ค์ œ API ํ˜ธ์ถœ
1529
- try:
1530
- system_prompts = {
1531
- "supervisor": "๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.",
1532
- "researcher": "๋‹น์‹ ์€ ์ •๋ณด๋ฅผ ์กฐ์‚ฌํ•˜๊ณ  ์ฒด๊ณ„์ ์œผ๋กœ ์ •๋ฆฌํ•˜๋Š” ์กฐ์‚ฌ์ž AI์ž…๋‹ˆ๋‹ค.",
1533
- "executor": "๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค."
1534
- }
1535
-
1536
- full_messages = [
1537
- {"role": "system", "content": system_prompts.get(role, "")},
1538
- *messages
1539
- ]
1540
-
1541
- payload = {
1542
- "model": self.model_id,
1543
- "messages": full_messages,
1544
- "max_tokens": 2048,
1545
- "temperature": 0.7,
1546
- "top_p": 0.8,
1547
- "stream": True,
1548
- "stream_options": {"include_usage": True}
1549
- }
1550
-
1551
- logger.info(f"API ์ŠคํŠธ๋ฆฌ๋ฐ ํ˜ธ์ถœ ์‹œ์ž‘ - Role: {role}")
1552
-
1553
- response = requests.post(
1554
- self.api_url,
1555
- headers=self.create_headers(),
1556
- json=payload,
1557
- stream=True,
1558
- timeout=10
1559
- )
1560
-
1561
- if response.status_code != 200:
1562
- logger.error(f"API ์˜ค๋ฅ˜: {response.status_code}")
1563
- yield f"โŒ API ์˜ค๋ฅ˜ ({response.status_code}): {response.text[:200]}"
1564
- return
1565
-
1566
- for line in response.iter_lines():
1567
- if line:
1568
- line = line.decode('utf-8')
1569
- if line.startswith("data: "):
1570
- data = line[6:]
1571
- if data == "[DONE]":
1572
- break
1573
- try:
1574
- chunk = json.loads(data)
1575
- if "choices" in chunk and chunk["choices"]:
1576
- content = chunk["choices"][0].get("delta", {}).get("content", "")
1577
- if content:
1578
- yield content
1579
- except json.JSONDecodeError:
1580
- continue
1581
-
1582
- except requests.exceptions.Timeout:
1583
- yield "โฑ๏ธ API ํ˜ธ์ถœ ์‹œ๊ฐ„์ด ์ดˆ๊ณผ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”."
1584
- except requests.exceptions.ConnectionError:
1585
- yield "๐Ÿ”Œ API ์„œ๋ฒ„์— ์—ฐ๊ฒฐํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ์ธํ„ฐ๋„ท ์—ฐ๊ฒฐ์„ ํ™•์ธํ•ด์ฃผ์„ธ์š”."
1586
- except Exception as e:
1587
- logger.error(f"์ŠคํŠธ๋ฆฌ๋ฐ ์ค‘ ์˜ค๋ฅ˜: {str(e)}")
1588
- yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
1589
-
1590
- # ์‹œ์Šคํ…œ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
1591
- llm_system = LLMCollaborativeSystem()
1592
-
1593
- def process_query_streaming(user_query: str, history: List):
1594
- """์ŠคํŠธ๋ฆฌ๋ฐ์„ ์ง€์›ํ•˜๋Š” ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ"""
1595
- if not user_query:
1596
- return history, "", "", "", "", "โŒ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
1597
-
1598
- conversation_log = []
1599
- all_responses = {"supervisor": [], "researcher": [], "executor": []}
1600
-
1601
- try:
1602
- # 1๋‹จ๊ณ„: ๊ฐ๋…์ž AI ์ดˆ๊ธฐ ๋ถ„์„ ๋ฐ ํ‚ค์›Œ๋“œ ์ถ”์ถœ
1603
- supervisor_prompt = llm_system.create_supervisor_initial_prompt(user_query)
1604
- supervisor_initial_response = ""
1605
-
1606
- supervisor_text = "[์ดˆ๊ธฐ ๋ถ„์„] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1607
- for chunk in llm_system.call_llm_streaming(
1608
- [{"role": "user", "content": supervisor_prompt}],
1609
- "supervisor"
1610
- ):
1611
- supervisor_initial_response += chunk
1612
- supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_initial_response}"
1613
- yield history, supervisor_text, "", "", "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๋ถ„์„ ์ค‘..."
1614
-
1615
- all_responses["supervisor"].append(supervisor_initial_response)
1616
-
1617
- # ํ‚ค์›Œ๋“œ ์ถ”์ถœ
1618
- keywords = llm_system.extract_keywords(supervisor_initial_response)
1619
- logger.info(f"์ถ”์ถœ๋œ ํ‚ค์›Œ๋“œ: {keywords}")
1620
-
1621
- # 2๋‹จ๊ณ„: ๋ธŒ๋ ˆ์ด๋ธŒ ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰
1622
- researcher_text = "[์›น ๊ฒ€์ƒ‰] ๐Ÿ” ๊ฒ€์ƒ‰ ์ค‘...\n"
1623
- yield history, supervisor_text, researcher_text, "", "", "๐Ÿ” ์›น ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰ ์ค‘..."
1624
-
1625
- search_results = {}
1626
- for keyword in keywords:
1627
- results = llm_system.brave_search(keyword)
1628
- if results:
1629
- search_results[keyword] = results
1630
- researcher_text += f"โœ“ '{keyword}' ๊ฒ€์ƒ‰ ์™„๋ฃŒ\n"
1631
- yield history, supervisor_text, researcher_text, "", "", f"๐Ÿ” '{keyword}' ๊ฒ€์ƒ‰ ์ค‘..."
1632
-
1633
- # 3๋‹จ๊ณ„: ์กฐ์‚ฌ์ž AI๊ฐ€ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ •๋ฆฌ
1634
- researcher_prompt = llm_system.create_researcher_prompt(user_query, supervisor_initial_response, search_results)
1635
- researcher_response = ""
1636
-
1637
- researcher_text = "[์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ •๋ฆฌ] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1638
- for chunk in llm_system.call_llm_streaming(
1639
- [{"role": "user", "content": researcher_prompt}],
1640
- "researcher"
1641
- ):
1642
- researcher_response += chunk
1643
- researcher_text = f"[์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ •๋ฆฌ] - {datetime.now().strftime('%H:%M:%S')}\n{researcher_response}"
1644
- yield history, supervisor_text, researcher_text, "", "", "๐Ÿ“ ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌ ์ค‘..."
1645
-
1646
- all_responses["researcher"].append(researcher_response)
1647
-
1648
- # 4๋‹จ๊ณ„: ๊ฐ๋…์ž AI๊ฐ€ ์กฐ์‚ฌ ๋‚ด์šฉ ๊ธฐ๋ฐ˜์œผ๋กœ ์‹คํ–‰ ์ง€์‹œ
1649
- supervisor_execution_prompt = llm_system.create_supervisor_execution_prompt(user_query, researcher_response)
1650
- supervisor_execution_response = ""
1651
-
1652
- supervisor_text += "\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1653
- for chunk in llm_system.call_llm_streaming(
1654
- [{"role": "user", "content": supervisor_execution_prompt}],
1655
- "supervisor"
1656
- ):
1657
- supervisor_execution_response += chunk
1658
- temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_execution_response}"
1659
- supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
1660
- yield history, supervisor_text, researcher_text, "", "", "๐ŸŽฏ ๊ฐ๋…์ž AI๊ฐ€ ์ง€์‹œ ์ค‘..."
1661
-
1662
- all_responses["supervisor"].append(supervisor_execution_response)
1663
-
1664
- # 5๋‹จ๊ณ„: ์‹คํ–‰์ž AI๊ฐ€ ์กฐ์‚ฌ ๋‚ด์šฉ๊ณผ ์ง€์‹œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์ดˆ๊ธฐ ๊ตฌํ˜„
1665
- executor_prompt = llm_system.create_executor_prompt(user_query, supervisor_execution_response, researcher_response)
1666
- executor_response = ""
1667
-
1668
- executor_text = "[์ดˆ๊ธฐ ๊ตฌํ˜„] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1669
- for chunk in llm_system.call_llm_streaming(
1670
- [{"role": "user", "content": executor_prompt}],
1671
- "executor"
1672
- ):
1673
- executor_response += chunk
1674
- executor_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{executor_response}"
1675
- yield history, supervisor_text, researcher_text, executor_text, "", "๐Ÿ”ง ์‹คํ–‰์ž AI๊ฐ€ ๊ตฌํ˜„ ์ค‘..."
1676
-
1677
- all_responses["executor"].append(executor_response)
1678
-
1679
- # 6๋‹จ๊ณ„: ๊ฐ๋…์ž AI ๊ฒ€ํ†  ๋ฐ ํ”ผ๋“œ๋ฐฑ
1680
- review_prompt = f"""๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.
1681
-
1682
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1683
-
1684
- ์‹คํ–‰์ž AI์˜ ๋‹ต๋ณ€:
1685
- {executor_response}
1686
-
1687
- ์ด ๋‹ต๋ณ€์„ ๊ฒ€ํ† ํ•˜๊ณ  ๊ฐœ์„ ์ ๊ณผ ์ถ”๊ฐ€ ๊ณ ๋ ค์‚ฌํ•ญ์„ ์ œ์‹œํ•ด์ฃผ์„ธ์š”. ๊ตฌ์ฒด์ ์ด๊ณ  ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ๊ฐœ์„  ๋ฐฉ์•ˆ์„ ์ œ์‹œํ•˜์„ธ์š”."""
1688
-
1689
- review_response = ""
1690
- supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒ€ํ†  ๋ฐ ํ”ผ๋“œ๋ฐฑ] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1691
-
1692
- for chunk in llm_system.call_llm_streaming(
1693
- [{"role": "user", "content": review_prompt}],
1694
- "supervisor"
1695
- ):
1696
- review_response += chunk
1697
- temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒ€ํ†  ๋ฐ ํ”ผ๋“œ๋ฐฑ] - {datetime.now().strftime('%H:%M:%S')}\n{review_response}"
1698
- supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
1699
- yield history, supervisor_text, researcher_text, executor_text, "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๊ฒ€ํ†  ์ค‘..."
1700
-
1701
- all_responses["supervisor"].append(review_response)
1702
-
1703
- # 7๋‹จ๊ณ„: ์‹คํ–‰์ž AI ์ตœ์ข… ๋ณด๊ณ ์„œ (ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜)
1704
- final_executor_prompt = llm_system.create_executor_final_prompt(
1705
- user_query,
1706
- executor_response,
1707
- review_response,
1708
- researcher_response
1709
- )
1710
- final_executor_response = ""
1711
-
1712
- executor_text += "\n\n---\n\n[์ตœ์ข… ๋ณด๊ณ ์„œ] ๐Ÿ”„ ์ž‘์„ฑ ์ค‘...\n"
1713
- for chunk in llm_system.call_llm_streaming(
1714
- [{"role": "user", "content": final_executor_prompt}],
1715
- "executor"
1716
- ):
1717
- final_executor_response += chunk
1718
- temp_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['executor'][0]}\n\n---\n\n[์ตœ์ข… ๋ณด๊ณ ์„œ] - {datetime.now().strftime('%H:%M:%S')}\n{final_executor_response}"
1719
- executor_text = temp_text
1720
- yield history, supervisor_text, researcher_text, executor_text, "", "๐Ÿ“„ ์ตœ์ข… ๋ณด๊ณ ์„œ ์ž‘์„ฑ ์ค‘..."
1721
-
1722
- all_responses["executor"].append(final_executor_response)
1723
-
1724
- # ์ตœ์ข… ๊ฒฐ๊ณผ ์ƒ์„ฑ (์ตœ์ข… ๋ณด๊ณ ์„œ๋ฅผ ๋ฉ”์ธ์œผ๋กœ)
1725
- final_summary = f"""## ๐ŸŽฏ ์ตœ์ข… ์ข…ํ•ฉ ๋ณด๊ณ ์„œ
1726
-
1727
- ### ๐Ÿ“Œ ์‚ฌ์šฉ์ž ์งˆ๋ฌธ
1728
- {user_query}
1729
-
1730
- ### ๐Ÿ“„ ์ตœ์ข… ๋ณด๊ณ ์„œ (์‹คํ–‰์ž AI - ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜)
1731
- {final_executor_response}
1732
-
1733
- ---
1734
-
1735
- <details>
1736
- <summary>๐Ÿ“‹ ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ • ๋ณด๊ธฐ</summary>
1737
-
1738
- #### ๐Ÿ” ๊ฑฐ์‹œ์  ๋ถ„์„ (๊ฐ๋…์ž AI)
1739
- {all_responses['supervisor'][0]}
1740
-
1741
- #### ๐Ÿ“š ์กฐ์‚ฌ ๊ฒฐ๊ณผ (์กฐ์‚ฌ์ž AI)
1742
- {researcher_response}
1743
-
1744
- #### ๐ŸŽฏ ์‹คํ–‰ ์ง€์‹œ (๊ฐ๋…์ž AI)
1745
- {all_responses['supervisor'][1]}
1746
-
1747
- #### ๐Ÿ’ก ์ดˆ๊ธฐ ๊ตฌํ˜„ (์‹คํ–‰์ž AI)
1748
- {executor_response}
1749
-
1750
- #### โœจ ๊ฒ€ํ†  ๋ฐ ๊ฐœ์„ ์‚ฌํ•ญ (๊ฐ๋…์ž AI)
1751
- {review_response}
1752
-
1753
- </details>
1754
-
1755
- ---
1756
- *์ด ๋ณด๊ณ ์„œ๋Š” ์›น ๊ฒ€์ƒ‰์„ ํ†ตํ•œ ์ตœ์‹  ์ •๋ณด์™€ AI๋“ค์˜ ํ˜‘๋ ฅ, ๊ทธ๋ฆฌ๊ณ  ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜์„ ํ†ตํ•ด ์ž‘์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.*"""
1757
-
1758
- # ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ
1759
- new_history = history + [(user_query, final_summary)]
1760
-
1761
- yield new_history, supervisor_text, researcher_text, executor_text, final_summary, "โœ… ์ตœ์ข… ๋ณด๊ณ ์„œ ์™„์„ฑ!"
1762
-
1763
- except Exception as e:
1764
- error_msg = f"โŒ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {str(e)}"
1765
- yield history, "", "", "", error_msg, error_msg
1766
-
1767
- def clear_all():
1768
- """๋ชจ๋“  ๋‚ด์šฉ ์ดˆ๊ธฐํ™”"""
1769
- return [], "", "", "", "", "๐Ÿ”„ ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
1770
-
1771
- # Gradio ์ธํ„ฐํŽ˜์ด์Šค
1772
- css = """
1773
- .gradio-container {
1774
- font-family: 'Arial', sans-serif;
1775
- }
1776
- .supervisor-box textarea {
1777
- border-left: 4px solid #667eea !important;
1778
- padding-left: 10px !important;
1779
- }
1780
- .researcher-box textarea {
1781
- border-left: 4px solid #10b981 !important;
1782
- padding-left: 10px !important;
1783
- }
1784
- .executor-box textarea {
1785
- border-left: 4px solid #764ba2 !important;
1786
- padding-left: 10px !important;
1787
- }
1788
- """
1789
-
1790
- with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ", theme=gr.themes.Soft(), css=css) as app:
1791
- gr.Markdown(
1792
- f"""
1793
- # ๐Ÿค ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ (์กฐ์‚ฌ์ž ํฌํ•จ + ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜)
1794
-
1795
- > ๊ฐ๋…์ž, ์กฐ์‚ฌ์ž, ์‹คํ–‰์ž AI๊ฐ€ ํ˜‘๋ ฅํ•˜์—ฌ ํ”ผ๋“œ๋ฐฑ์„ ๋ฐ˜์˜ํ•œ ์™„์ „ํ•œ ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•ฉ๋‹ˆ๋‹ค.
1796
-
1797
- **์ƒํƒœ**:
1798
- - LLM: {'๐ŸŸข ์‹ค์ œ ๋ชจ๋“œ' if not llm_system.test_mode else '๐ŸŸก ํ…Œ์ŠคํŠธ ๋ชจ๋“œ'}
1799
- - Brave Search: {'๐ŸŸข ํ™œ์„ฑํ™”' if llm_system.bapi_token != "YOUR_BRAVE_API_TOKEN" else '๐ŸŸก ํ…Œ์ŠคํŠธ ๋ชจ๋“œ'}
1800
-
1801
- **7๋‹จ๊ณ„ ํ˜‘๋ ฅ ํ”„๋กœ์„ธ์Šค:**
1802
- 1. ๐Ÿง  **๊ฐ๋…์ž**: ๊ฑฐ์‹œ์  ๋ถ„์„ ๋ฐ ๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ ์ถ”์ถœ
1803
- 2. ๐Ÿ” **์กฐ์‚ฌ์ž**: ๋ธŒ๋ ˆ์ด๋ธŒ ๊ฒ€์ƒ‰์œผ๋กœ ์ตœ์‹  ์ •๋ณด ์ˆ˜์ง‘
1804
- 3. ๐Ÿง  **๊ฐ๋…์ž**: ์กฐ์‚ฌ ๋‚ด์šฉ ๊ธฐ๋ฐ˜ ๊ตฌ์ฒด์  ์‹คํ–‰ ์ง€์‹œ
1805
- 4. ๐Ÿ‘๏ธ **์‹คํ–‰์ž**: ์ดˆ๊ธฐ ์‹คํ–‰ ๊ณ„ํš ์ž‘์„ฑ
1806
- 5. ๐Ÿง  **๊ฐ๋…์ž**: ๊ฒ€ํ†  ๋ฐ ๊ฐœ์„ ์‚ฌํ•ญ ํ”ผ๋“œ๋ฐฑ
1807
- 6. ๐Ÿ‘๏ธ **์‹คํ–‰์ž**: ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜ํ•œ ์ตœ์ข… ๋ณด๊ณ ์„œ ์ž‘์„ฑ
1808
- 7. ๐Ÿ“„ **์ตœ์ข… ์‚ฐ์ถœ๋ฌผ**: ์™„์ „ํ•œ ์‹คํ–‰ ๋ณด๊ณ ์„œ
1809
- """
1810
- )
1811
-
1812
- with gr.Row():
1813
- # ์™ผ์ชฝ: ์ž…๋ ฅ ๋ฐ ์ฑ„ํŒ… ๊ธฐ๋ก
1814
- with gr.Column(scale=1):
1815
- chatbot = gr.Chatbot(
1816
- label="๐Ÿ’ฌ ๋Œ€ํ™” ๊ธฐ๋ก",
1817
- height=600,
1818
- show_copy_button=True,
1819
- bubble_full_width=False
1820
- )
1821
-
1822
- user_input = gr.Textbox(
1823
- label="์งˆ๋ฌธ ์ž…๋ ฅ",
1824
- placeholder="์˜ˆ: ๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ๋ฐฉ๋ฒ•์€?",
1825
- lines=3
1826
- )
1827
-
1828
- with gr.Row():
1829
- submit_btn = gr.Button("๐Ÿš€ ๋ถ„์„ ์‹œ์ž‘", variant="primary", scale=2)
1830
- clear_btn = gr.Button("๐Ÿ—‘๏ธ ์ดˆ๊ธฐํ™”", scale=1)
1831
-
1832
- status_text = gr.Textbox(
1833
- label="์ƒํƒœ",
1834
  interactive=False,
1835
- value="๋Œ€๊ธฐ ์ค‘...",
1836
- max_lines=1
1837
  )
1838
-
1839
- # ์˜ค๋ฅธ์ชฝ: AI ์ถœ๋ ฅ
1840
- with gr.Column(scale=2):
1841
- # ์ตœ์ข… ๊ฒฐ๊ณผ
1842
- with gr.Accordion("๐Ÿ“Š ์ตœ์ข… ์ข…ํ•ฉ ๊ฒฐ๊ณผ", open=True):
1843
- final_output = gr.Markdown(
1844
- value="*์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜๋ฉด ๊ฒฐ๊ณผ๊ฐ€ ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*"
1845
- )
1846
-
1847
- # AI ์ถœ๋ ฅ๋“ค
1848
- with gr.Row():
1849
- # ๊ฐ๋…์ž AI ์ถœ๋ ฅ
1850
- with gr.Column():
1851
- gr.Markdown("### ๐Ÿง  ๊ฐ๋…์ž AI (๊ฑฐ์‹œ์  ๋ถ„์„)")
1852
- supervisor_output = gr.Textbox(
1853
- label="",
1854
- lines=12,
1855
- max_lines=15,
1856
- interactive=False,
1857
- elem_classes=["supervisor-box"]
1858
- )
1859
-
1860
- with gr.Row():
1861
- # ์กฐ์‚ฌ์ž AI ์ถœ๋ ฅ
1862
- with gr.Column():
1863
- gr.Markdown("### ๐Ÿ” ์กฐ์‚ฌ์ž AI (์›น ๊ฒ€์ƒ‰ & ์ •๋ฆฌ)")
1864
- researcher_output = gr.Textbox(
1865
- label="",
1866
- lines=12,
1867
- max_lines=15,
1868
- interactive=False,
1869
- elem_classes=["researcher-box"]
1870
- )
1871
-
1872
- # ์‹คํ–‰์ž AI ์ถœ๋ ฅ
1873
- with gr.Column():
1874
- gr.Markdown("### ๐Ÿ‘๏ธ ์‹คํ–‰์ž AI (๋ฏธ์‹œ์  ๊ตฌํ˜„)")
1875
- executor_output = gr.Textbox(
1876
- label="",
1877
- lines=12,
1878
- max_lines=15,
1879
- interactive=False,
1880
- elem_classes=["executor-box"]
1881
- )
1882
 
1883
  # ์˜ˆ์ œ
1884
  gr.Examples(
@@ -1896,8 +919,8 @@ with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ", theme=gr.themes.Soft(), css=css)
1896
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
1897
  submit_btn.click(
1898
  fn=process_query_streaming,
1899
- inputs=[user_input, chatbot],
1900
- outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
1901
  ).then(
1902
  fn=lambda: "",
1903
  outputs=[user_input]
@@ -1905,8 +928,8 @@ with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ", theme=gr.themes.Soft(), css=css)
1905
 
1906
  user_input.submit(
1907
  fn=process_query_streaming,
1908
- inputs=[user_input, chatbot],
1909
- outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
1910
  ).then(
1911
  fn=lambda: "",
1912
  outputs=[user_input]
@@ -1914,7 +937,7 @@ with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ", theme=gr.themes.Soft(), css=css)
1914
 
1915
  clear_btn.click(
1916
  fn=clear_all,
1917
- outputs=[chatbot, supervisor_output, researcher_output, executor_output, final_output, status_text]
1918
  )
1919
 
1920
  gr.Markdown(
 
614
  # ์‹œ์Šคํ…œ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
615
  llm_system = LLMCollaborativeSystem()
616
 
617
+ # ๋‚ด๋ถ€ ํžˆ์Šคํ† ๋ฆฌ ๊ด€๋ฆฌ (UI์—๋Š” ํ‘œ์‹œํ•˜์ง€ ์•Š์Œ)
618
+ internal_history = []
619
+
620
+ def process_query_streaming(user_query: str):
621
  """์ŠคํŠธ๋ฆฌ๋ฐ์„ ์ง€์›ํ•˜๋Š” ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ"""
622
+ global internal_history
623
+
624
  if not user_query:
625
+ return "", "", "", "", "โŒ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
626
 
627
  conversation_log = []
628
  all_responses = {"supervisor": [], "researcher": [], "executor": []}
 
639
  ):
640
  supervisor_initial_response += chunk
641
  supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_initial_response}"
642
+ yield supervisor_text, "", "", "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๋ถ„์„ ์ค‘..."
643
 
644
  all_responses["supervisor"].append(supervisor_initial_response)
645
 
 
649
 
650
  # 2๋‹จ๊ณ„: ๋ธŒ๋ ˆ์ด๋ธŒ ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰
651
  researcher_text = "[์›น ๊ฒ€์ƒ‰] ๐Ÿ” ๊ฒ€์ƒ‰ ์ค‘...\n"
652
+ yield supervisor_text, researcher_text, "", "", "๐Ÿ” ์›น ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰ ์ค‘..."
653
 
654
  search_results = {}
655
  for keyword in keywords:
 
657
  if results:
658
  search_results[keyword] = results
659
  researcher_text += f"โœ“ '{keyword}' ๊ฒ€์ƒ‰ ์™„๋ฃŒ\n"
660
+ yield supervisor_text, researcher_text, "", "", f"๐Ÿ” '{keyword}' ๊ฒ€์ƒ‰ ์ค‘..."
661
 
662
  # 3๋‹จ๊ณ„: ์กฐ์‚ฌ์ž AI๊ฐ€ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ •๋ฆฌ
663
  researcher_prompt = llm_system.create_researcher_prompt(user_query, supervisor_initial_response, search_results)
 
670
  ):
671
  researcher_response += chunk
672
  researcher_text = f"[์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ •๋ฆฌ] - {datetime.now().strftime('%H:%M:%S')}\n{researcher_response}"
673
+ yield supervisor_text, researcher_text, "", "", "๐Ÿ“ ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌ ์ค‘..."
674
 
675
  all_responses["researcher"].append(researcher_response)
676
 
 
686
  supervisor_execution_response += chunk
687
  temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_execution_response}"
688
  supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
689
+ yield supervisor_text, researcher_text, "", "", "๐ŸŽฏ ๊ฐ๋…์ž AI๊ฐ€ ์ง€์‹œ ์ค‘..."
690
 
691
  all_responses["supervisor"].append(supervisor_execution_response)
692
 
 
701
  ):
702
  executor_response += chunk
703
  executor_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{executor_response}"
704
+ yield supervisor_text, researcher_text, executor_text, "", "๐Ÿ”ง ์‹คํ–‰์ž AI๊ฐ€ ๊ตฌํ˜„ ์ค‘..."
705
 
706
  all_responses["executor"].append(executor_response)
707
 
 
725
  review_response += chunk
726
  temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฒ€ํ†  ๋ฐ ํ”ผ๋“œ๋ฐฑ] - {datetime.now().strftime('%H:%M:%S')}\n{review_response}"
727
  supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
728
+ yield supervisor_text, researcher_text, executor_text, "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๊ฒ€ํ†  ์ค‘..."
729
 
730
  all_responses["supervisor"].append(review_response)
731
 
 
746
  final_executor_response += chunk
747
  temp_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['executor'][0]}\n\n---\n\n[์ตœ์ข… ๋ณด๊ณ ์„œ] - {datetime.now().strftime('%H:%M:%S')}\n{final_executor_response}"
748
  executor_text = temp_text
749
+ yield supervisor_text, researcher_text, executor_text, "", "๐Ÿ“„ ์ตœ์ข… ๋ณด๊ณ ์„œ ์ž‘์„ฑ ์ค‘..."
750
 
751
  all_responses["executor"].append(final_executor_response)
752
 
 
784
  ---
785
  *์ด ๋ณด๊ณ ์„œ๋Š” ์›น ๊ฒ€์ƒ‰์„ ํ†ตํ•œ ์ตœ์‹  ์ •๋ณด์™€ AI๋“ค์˜ ํ˜‘๋ ฅ, ๊ทธ๋ฆฌ๊ณ  ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜์„ ํ†ตํ•ด ์ž‘์„ฑ๋˜์—ˆ์Šต๋‹ˆ๋‹ค.*"""
786
 
787
+ # ๋‚ด๋ถ€ ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ (UI์—๋Š” ํ‘œ์‹œํ•˜์ง€ ์•Š์Œ)
788
+ internal_history.append((user_query, final_summary))
789
 
790
+ yield supervisor_text, researcher_text, executor_text, final_summary, "โœ… ์ตœ์ข… ๋ณด๊ณ ์„œ ์™„์„ฑ!"
791
 
792
  except Exception as e:
793
  error_msg = f"โŒ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {str(e)}"
794
+ yield "", "", "", error_msg, error_msg
795
 
796
  def clear_all():
797
  """๋ชจ๋“  ๋‚ด์šฉ ์ดˆ๊ธฐํ™”"""
798
+ global internal_history
799
+ internal_history = []
800
+ return "", "", "", "", "๐Ÿ”„ ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
801
 
802
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค
803
  css = """
 
840
  """
841
  )
842
 
843
+ # ์ž…๋ ฅ ์„น์…˜
844
  with gr.Row():
845
+ with gr.Column():
 
 
 
 
 
 
 
 
846
  user_input = gr.Textbox(
847
  label="์งˆ๋ฌธ ์ž…๋ ฅ",
848
  placeholder="์˜ˆ: ๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ๋ฐฉ๋ฒ•์€?",
 
859
  value="๋Œ€๊ธฐ ์ค‘...",
860
  max_lines=1
861
  )
862
+
863
+ # ์ตœ์ข… ๊ฒฐ๊ณผ
864
+ with gr.Row():
865
+ with gr.Column():
866
  with gr.Accordion("๐Ÿ“Š ์ตœ์ข… ์ข…ํ•ฉ ๊ฒฐ๊ณผ", open=True):
867
  final_output = gr.Markdown(
868
  value="*์งˆ๋ฌธ์„ ์ž…๋ ฅํ•˜๋ฉด ๊ฒฐ๊ณผ๊ฐ€ ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*"
869
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
870
 
871
+ # AI ์ถœ๋ ฅ๋“ค - ํ•œ ์ค„์— ๋‚˜๋ž€ํžˆ ๋ฐฐ์น˜
872
+ with gr.Row():
873
+ # ๊ฐ๋…์ž AI ์ถœ๋ ฅ
874
+ with gr.Column():
875
+ gr.Markdown("### ๐Ÿง  ๊ฐ๋…์ž AI (๊ฑฐ์‹œ์  ๋ถ„์„)")
876
+ supervisor_output = gr.Textbox(
877
+ label="",
878
+ lines=20,
879
+ max_lines=25,
880
+ interactive=False,
881
+ elem_classes=["supervisor-box"]
882
+ )
883
 
884
+ # ์กฐ์‚ฌ์ž AI ์ถœ๋ ฅ
885
+ with gr.Column():
886
+ gr.Markdown("### ๐Ÿ” ์กฐ์‚ฌ์ž AI (์›น ๊ฒ€์ƒ‰ & ์ •๋ฆฌ)")
887
+ researcher_output = gr.Textbox(
888
+ label="",
889
+ lines=20,
890
+ max_lines=25,
891
+ interactive=False,
892
+ elem_classes=["researcher-box"]
893
+ )
894
 
895
+ # ์‹คํ–‰์ž AI ์ถœ๋ ฅ
896
+ with gr.Column():
897
+ gr.Markdown("### ๐Ÿ‘๏ธ ์‹คํ–‰์ž AI (๋ฏธ์‹œ์  ๊ตฌํ˜„)")
898
+ executor_output = gr.Textbox(
899
+ label="",
900
+ lines=20,
901
+ max_lines=25,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
902
  interactive=False,
903
+ elem_classes=["executor-box"]
 
904
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
905
 
906
  # ์˜ˆ์ œ
907
  gr.Examples(
 
919
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
920
  submit_btn.click(
921
  fn=process_query_streaming,
922
+ inputs=[user_input],
923
+ outputs=[supervisor_output, researcher_output, executor_output, final_output, status_text]
924
  ).then(
925
  fn=lambda: "",
926
  outputs=[user_input]
 
928
 
929
  user_input.submit(
930
  fn=process_query_streaming,
931
+ inputs=[user_input],
932
+ outputs=[supervisor_output, researcher_output, executor_output, final_output, status_text]
933
  ).then(
934
  fn=lambda: "",
935
  outputs=[user_input]
 
937
 
938
  clear_btn.click(
939
  fn=clear_all,
940
+ outputs=[supervisor_output, researcher_output, executor_output, final_output, status_text]
941
  )
942
 
943
  gr.Markdown(