aiqtech commited on
Commit
6b8ef5e
ยท
verified ยท
1 Parent(s): 35872f2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +454 -1238
app.py CHANGED
@@ -38,7 +38,9 @@ TEST_MODE = os.getenv("TEST_MODE", "false").lower() == "true"
38
  # ์ „์—ญ ๋ณ€์ˆ˜
39
  conversation_history = []
40
 
41
- class LLMCollaborativeSystem:
 
 
42
  def __init__(self):
43
  self.token = FRIENDLI_TOKEN
44
  self.bapi_token = BAPI_TOKEN
@@ -50,15 +52,55 @@ class LLMCollaborativeSystem:
50
  self.use_gemini = False
51
  self.gemini_client = None
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  if self.test_mode:
54
  logger.warning("ํ…Œ์ŠคํŠธ ๋ชจ๋“œ๋กœ ์‹คํ–‰๋ฉ๋‹ˆ๋‹ค.")
55
- if self.bapi_token == "YOUR_BRAVE_API_TOKEN":
56
- logger.warning("Brave API ํ† ํฐ์ด ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
57
- if self.gemini_api_key == "YOUR_GEMINI_API_KEY":
58
- logger.warning("Gemini API ํ† ํฐ์ด ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.")
59
 
60
  def set_llm_mode(self, mode: str):
61
- """LLM ๋ชจ๋“œ ์„ค์ • (default ๋˜๋Š” commercial)"""
62
  if mode == "commercial" and GEMINI_AVAILABLE and self.gemini_api_key != "YOUR_GEMINI_API_KEY":
63
  self.use_gemini = True
64
  if not self.gemini_client:
@@ -67,7 +109,7 @@ class LLMCollaborativeSystem:
67
  else:
68
  self.use_gemini = False
69
  logger.info("๊ธฐ๋ณธ LLM ๋ชจ๋“œ๋กœ ์ „ํ™˜๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
70
-
71
  def create_headers(self):
72
  """API ํ—ค๋” ์ƒ์„ฑ"""
73
  return {
@@ -83,323 +125,229 @@ class LLMCollaborativeSystem:
83
  "X-Subscription-Token": self.bapi_token
84
  }
85
 
86
- def create_supervisor_initial_prompt(self, user_query: str) -> str:
87
- """๊ฐ๋…์ž AI ์ดˆ๊ธฐ ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
88
- return f"""๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.
 
89
 
90
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
91
 
92
- ์ด ์งˆ๋ฌธ์— ๋Œ€ํ•ด:
93
- 1. ์ „์ฒด์ ์ธ ์ ‘๊ทผ ๋ฐฉํ–ฅ๊ณผ ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
94
- 2. ํ•ต์‹ฌ ์š”์†Œ์™€ ๊ณ ๋ ค์‚ฌํ•ญ์„ ๊ตฌ์กฐํ™”ํ•˜์—ฌ ์„ค๋ช…ํ•˜์„ธ์š”
95
- 3. ์ด ์ฃผ์ œ์— ๋Œ€ํ•ด ์กฐ์‚ฌ๊ฐ€ ํ•„์š”ํ•œ 5-7๊ฐœ์˜ ๊ตฌ์ฒด์ ์ธ ํ‚ค์›Œ๋“œ๋‚˜ ๊ฒ€์ƒ‰์–ด๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
 
96
 
97
- ํ‚ค์›Œ๋“œ๋Š” ๋‹ค์Œ ํ˜•์‹์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”:
98
- [๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ]: ํ‚ค์›Œ๋“œ1, ํ‚ค์›Œ๋“œ2, ํ‚ค์›Œ๋“œ3, ํ‚ค์›Œ๋“œ4, ํ‚ค์›Œ๋“œ5"""
99
 
100
- def create_researcher_prompt(self, user_query: str, supervisor_guidance: str, search_results: Dict[str, List[Dict]]) -> str:
101
- """์กฐ์‚ฌ์ž AI ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
102
- search_summary = ""
103
- all_results = []
104
-
105
- for keyword, results in search_results.items():
106
- search_summary += f"\n\n**{keyword}์— ๋Œ€ํ•œ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ:**\n"
107
- for i, result in enumerate(results[:10], 1): # ์ƒ์œ„ 10๊ฐœ๋งŒ ํ‘œ์‹œ
108
- search_summary += f"{i}. {result.get('title', 'N/A')} (์‹ ๋ขฐ๋„: {result.get('credibility_score', 0):.2f})\n"
109
- search_summary += f" - {result.get('description', 'N/A')}\n"
110
- search_summary += f" - ์ถœ์ฒ˜: {result.get('url', 'N/A')}\n"
111
- if result.get('published'):
112
- search_summary += f" - ๊ฒŒ์‹œ์ผ: {result.get('published')}\n"
113
-
114
- all_results.extend(results)
115
-
116
- # ๋ชจ์ˆœ ๊ฐ์ง€
117
- contradictions = self.detect_contradictions(all_results)
118
- contradiction_text = ""
119
- if contradictions:
120
- contradiction_text = "\n\n**๋ฐœ๊ฒฌ๋œ ์ •๋ณด ๋ชจ์ˆœ:**\n"
121
- for cont in contradictions[:3]: # ์ตœ๋Œ€ 3๊ฐœ๋งŒ ํ‘œ์‹œ
122
- contradiction_text += f"- {cont['type']}: {cont['source1']} vs {cont['source2']}\n"
123
-
124
- return f"""๋‹น์‹ ์€ ์ •๋ณด๋ฅผ ์กฐ์‚ฌํ•˜๊ณ  ์ •๋ฆฌํ•˜๋Š” ์กฐ์‚ฌ์ž AI์ž…๋‹ˆ๋‹ค.
125
 
126
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
127
 
128
- ๊ฐ๋…์ž AI์˜ ์ง€์นจ:
129
- {supervisor_guidance}
130
 
131
- ๋ธŒ๋ ˆ์ด๋ธŒ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ (์‹ ๋ขฐ๋„ ์ ์ˆ˜ ํฌํ•จ):
132
- {search_summary}
133
- {contradiction_text}
134
 
135
- ์œ„ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ:
136
- 1. ๊ฐ ํ‚ค์›Œ๋“œ๋ณ„๋กœ ์ค‘์š”ํ•œ ์ •๋ณด๋ฅผ ์ •๋ฆฌํ•˜์„ธ์š”
137
- 2. ์‹ ๋ขฐํ•  ์ˆ˜ ์žˆ๋Š” ์ถœ์ฒ˜(์‹ ๋ขฐ๋„ 0.7 ์ด์ƒ)๋ฅผ ์šฐ์„ ์ ์œผ๋กœ ์ฐธ๊ณ ํ•˜์„ธ์š”
138
- 3. ์ถœ์ฒ˜๋ฅผ ๋ช…ํ™•ํžˆ ํ‘œ๊ธฐํ•˜์—ฌ ์‹คํ–‰์ž AI๊ฐ€ ๊ฒ€์ฆํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•˜์„ธ์š”
139
- 4. ์ •๋ณด์˜ ๋ชจ์ˆœ์ด ์žˆ๋‹ค๋ฉด ์–‘์ชฝ ๊ด€์ ์„ ๋ชจ๋‘ ์ œ์‹œํ•˜์„ธ์š”
140
- 5. ์ตœ์‹  ํŠธ๋ Œ๋“œ๋‚˜ ์ค‘์š”ํ•œ ํ†ต๊ณ„๊ฐ€ ์žˆ๋‹ค๋ฉด ๊ฐ•์กฐํ•˜์„ธ์š”
141
- 6. ์‹ ๋ขฐ๋„๊ฐ€ ๋‚ฎ์€ ์ •๋ณด๋Š” ์ฃผ์˜ ํ‘œ์‹œ์™€ ํ•จ๊ป˜ ํฌํ•จํ•˜์„ธ์š”"""
142
 
143
- def create_supervisor_execution_prompt(self, user_query: str, research_summary: str) -> str:
144
- """๊ฐ๋…์ž AI์˜ ์‹คํ–‰ ์ง€์‹œ ํ”„๋กฌํ”„ํŠธ"""
145
- return f"""๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.
 
146
 
147
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
148
 
149
- ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌํ•œ ์กฐ์‚ฌ ๋‚ด์šฉ:
150
- {research_summary}
 
 
 
 
 
 
151
 
152
- ์œ„ ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ์‹คํ–‰์ž AI์—๊ฒŒ ์•„์ฃผ ๊ตฌ์ฒด์ ์ธ ์ง€์‹œ๋ฅผ ๋‚ด๋ ค์ฃผ์„ธ์š”:
153
- 1. ์กฐ์‚ฌ๋œ ์ •๋ณด๋ฅผ ์–ด๋–ป๊ฒŒ ํ™œ์šฉํ• ์ง€ ๋ช…ํ™•ํžˆ ์ง€์‹œํ•˜์„ธ์š”
154
- 2. ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ๋‹จ๊ณ„๋ณ„ ์ž‘์—…์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”
155
- 3. ๊ฐ ๋‹จ๊ณ„์—์„œ ์ฐธ๊ณ ํ•ด์•ผ ํ•  ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋ช…์‹œํ•˜์„ธ์š”
156
- 4. ์˜ˆ์ƒ๋˜๋Š” ๊ฒฐ๊ณผ๋ฌผ์˜ ํ˜•ํƒœ๋ฅผ ๊ตฌ์ฒด์ ์œผ๋กœ ์„ค๋ช…ํ•˜์„ธ์š”"""
 
157
 
158
- def create_executor_prompt(self, user_query: str, supervisor_guidance: str, research_summary: str) -> str:
159
- """์‹คํ–‰์ž AI ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
160
- return f"""๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค.
 
161
 
162
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
163
 
164
- ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌํ•œ ์กฐ์‚ฌ ๋‚ด์šฉ:
165
- {research_summary}
166
 
167
- ๊ฐ๋…์ž AI์˜ ๊ตฌ์ฒด์ ์ธ ์ง€์‹œ:
168
- {supervisor_guidance}
169
 
170
- ์œ„ ์กฐ์‚ฌ ๋‚ด์šฉ๊ณผ ์ง€์‹œ์‚ฌํ•ญ์„ ๋ฐ”ํƒ•์œผ๋กœ:
171
- 1. ์กฐ์‚ฌ๋œ ์ •๋ณด๋ฅผ ์ ๊ทน ํ™œ์šฉํ•˜์—ฌ ๊ตฌ์ฒด์ ์ธ ์‹คํ–‰ ๊ณ„ํš์„ ์ž‘์„ฑํ•˜์„ธ์š”
172
- 2. ๊ฐ ๋‹จ๊ณ„๋ณ„๋กœ ์ฐธ๊ณ ํ•œ ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋ช…์‹œํ•˜์„ธ์š”
173
- 3. ์‹ค์ œ๋กœ ์ ์šฉ ๊ฐ€๋Šฅํ•œ ๊ตฌ์ฒด์ ์ธ ๋ฐฉ๋ฒ•๋ก ์„ ์ œ์‹œํ•˜์„ธ์š”
174
- 4. ์˜ˆ์ƒ๋˜๋Š” ์„ฑ๊ณผ์™€ ์ธก์ • ๋ฐฉ๋ฒ•์„ ํฌํ•จํ•˜์„ธ์š”"""
 
175
 
176
- def create_executor_final_prompt(self, user_query: str, initial_response: str, supervisor_feedback: str, research_summary: str) -> str:
177
- """์‹คํ–‰์ž AI ์ตœ์ข… ๋ณด๊ณ ์„œ ํ”„๋กฌํ”„ํŠธ"""
178
- return f"""๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค.
 
179
 
180
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
181
 
182
- ์กฐ์‚ฌ์ž AI์˜ ์กฐ์‚ฌ ๋‚ด์šฉ:
183
- {research_summary}
184
 
185
- ๋‹น์‹ ์˜ ์ดˆ๊ธฐ ๋‹ต๋ณ€:
186
- {initial_response}
187
 
188
- ๊ฐ๋…์ž AI์˜ ํ”ผ๋“œ๋ฐฑ ๋ฐ ๊ฐœ์„ ์‚ฌํ•ญ:
189
- {supervisor_feedback}
190
 
191
- ์œ„ ํ”ผ๋“œ๋ฐฑ์„ ์™„์ „ํžˆ ๋ฐ˜์˜ํ•˜์—ฌ ์ตœ์ข… ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•˜์„ธ์š”:
192
- 1. ๊ฐ๋…์ž์˜ ๋ชจ๋“  ๊ฐœ์„ ์‚ฌํ•ญ์„ ๋ฐ˜์˜ํ•˜์„ธ์š”
193
- 2. ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋”์šฑ ๊ตฌ์ฒด์ ์œผ๋กœ ํ™œ์šฉํ•˜์„ธ์š”
194
- 3. ์‹คํ–‰ ๊ฐ€๋Šฅ์„ฑ์„ ๋†’์ด๋Š” ์„ธ๋ถ€ ๊ณ„ํš์„ ํฌํ•จํ•˜์„ธ์š”
195
- 4. ๋ช…ํ™•ํ•œ ๊ฒฐ๋ก ๊ณผ ๋‹ค์Œ ๋‹จ๊ณ„๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
196
- 5. ์ „๋ฌธ์ ์ด๊ณ  ์™„์„ฑ๋„ ๋†’์€ ์ตœ์ข… ๋ณด๊ณ ์„œ ํ˜•์‹์œผ๋กœ ์ž‘์„ฑํ•˜์„ธ์š”"""
197
 
198
- def create_evaluator_prompt(self, user_query: str, supervisor_responses: List[str], researcher_response: str, executor_responses: List[str], evaluator_responses: List[str] = None) -> str:
199
- """ํ‰๊ฐ€์ž AI ํ”„๋กฌํ”„ํŠธ ์ƒ์„ฑ"""
200
- evaluator_history = ""
201
- if evaluator_responses and len(evaluator_responses) > 0:
202
- evaluator_history = f"""
203
- ํ‰๊ฐ€์ž AI์˜ ์ด์ „ ํ‰๊ฐ€๋“ค:
204
- - ์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€: {evaluator_responses[0] if len(evaluator_responses) > 0 else 'N/A'}
205
- - ์ดˆ๊ธฐ ๊ตฌํ˜„ ํ‰๊ฐ€: {evaluator_responses[1] if len(evaluator_responses) > 1 else 'N/A'}
206
- """
207
-
208
- return f"""๋‹น์‹ ์€ ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ •๊ณผ ๊ฒฐ๊ณผ๋ฅผ ํ‰๊ฐ€ํ•˜๋Š” ํ‰๊ฐ€์ž AI์ž…๋‹ˆ๋‹ค.
209
 
210
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
211
 
212
- ๊ฐ๋…์ž AI์˜ ๋ถ„์„ ๋ฐ ์ง€์‹œ:
213
- - ์ดˆ๊ธฐ ๋ถ„์„: {supervisor_responses[0]}
214
- - ์‹คํ–‰ ์ง€์‹œ: {supervisor_responses[1] if len(supervisor_responses) > 1 else 'N/A'}
215
- - ๊ฐœ์„  ์ง€์‹œ: {supervisor_responses[2] if len(supervisor_responses) > 2 else 'N/A'}
 
216
 
217
- ์กฐ์‚ฌ์ž AI์˜ ์กฐ์‚ฌ ๊ฒฐ๊ณผ:
218
- {researcher_response}
219
 
220
- ์‹คํ–‰์ž AI์˜ ๊ตฌํ˜„:
221
- - ์ดˆ๊ธฐ ๊ตฌํ˜„: {executor_responses[0]}
222
- - ์ตœ์ข… ๋ณด๊ณ ์„œ: {executor_responses[1] if len(executor_responses) > 1 else 'N/A'}
223
- {evaluator_history}
224
- ์œ„ ์ „์ฒด ๊ณผ์ •์„ ํ‰๊ฐ€ํ•˜์—ฌ:
225
- 1. **ํ’ˆ์งˆ ํ‰๊ฐ€**: ๊ฐ AI์˜ ๋‹ต๋ณ€ ํ’ˆ์งˆ๊ณผ ์—ญํ•  ์ˆ˜ํ–‰๋„๋ฅผ ํ‰๊ฐ€ํ•˜์„ธ์š” (10์  ๋งŒ์ )
226
- 2. **ํ˜‘๋ ฅ ํšจ๊ณผ์„ฑ**: AI ๊ฐ„ ํ˜‘๋ ฅ์ด ์–ผ๋งˆ๋‚˜ ํšจ๊ณผ์ ์ด์—ˆ๋Š”์ง€ ํ‰๊ฐ€ํ•˜์„ธ์š”
227
- 3. **์ •๋ณด ํ™œ์šฉ๋„**: ์›น ๊ฒ€์ƒ‰ ์ •๋ณด๊ฐ€ ์–ผ๋งˆ๋‚˜ ์ž˜ ํ™œ์šฉ๋˜์—ˆ๋Š”์ง€ ํ‰๊ฐ€ํ•˜์„ธ์š”
228
- 4. **๊ฐœ์„ ์ **: ํ–ฅํ›„ ๊ฐœ์„ ์ด ํ•„์š”ํ•œ ๋ถ€๋ถ„์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”
229
- 5. **์ตœ์ข… ํ‰์ **: ์ „์ฒด ํ”„๋กœ์„ธ์Šค์— ๋Œ€ํ•œ ์ข…ํ•ฉ ํ‰๊ฐ€๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
230
 
231
- ํ‰๊ฐ€๋Š” ๊ตฌ์ฒด์ ์ด๊ณ  ๊ฑด์„ค์ ์œผ๋กœ ์ž‘์„ฑํ•˜์„ธ์š”."""
232
 
233
- def extract_keywords(self, supervisor_response: str) -> List[str]:
234
- """๊ฐ๋…์ž ์‘๋‹ต์—์„œ ํ‚ค์›Œ๋“œ ์ถ”์ถœ"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
235
  keywords = []
236
 
237
- # [๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ]: ํ˜•์‹์œผ๋กœ ํ‚ค์›Œ๋“œ ์ฐพ๊ธฐ
238
- keyword_match = re.search(r'\[๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ\]:\s*(.+)', supervisor_response, re.IGNORECASE)
239
  if keyword_match:
240
  keyword_str = keyword_match.group(1)
241
  keywords = [k.strip() for k in keyword_str.split(',') if k.strip()]
242
 
243
- # ํ‚ค์›Œ๋“œ๊ฐ€ ์—†์œผ๋ฉด ๊ธฐ๋ณธ ํ‚ค์›Œ๋“œ ์ƒ์„ฑ
244
  if not keywords:
245
- keywords = ["best practices", "implementation guide", "case studies", "latest trends", "success factors"]
246
 
247
- return keywords[:7] # ์ตœ๋Œ€ 7๊ฐœ๋กœ ์ œํ•œ
248
-
249
- def generate_synonyms(self, keyword: str) -> List[str]:
250
- """ํ‚ค์›Œ๋“œ์˜ ๋™์˜์–ด/์œ ์‚ฌ์–ด ์ƒ์„ฑ"""
251
- synonyms = {
252
- "optimization": ["improvement", "enhancement", "efficiency", "tuning"],
253
- "performance": ["speed", "efficiency", "throughput", "latency"],
254
- "strategy": ["approach", "method", "technique", "plan"],
255
- "implementation": ["deployment", "execution", "development", "integration"],
256
- "analysis": ["evaluation", "assessment", "study", "research"],
257
- "management": ["administration", "governance", "control", "supervision"],
258
- "best practices": ["proven methods", "industry standards", "guidelines", "recommendations"],
259
- "trends": ["developments", "innovations", "emerging", "future"],
260
- "machine learning": ["ML", "AI", "deep learning", "neural networks"],
261
- "ํ”„๋กœ์ ํŠธ": ["project", "์‚ฌ์—…", "์—…๋ฌด", "์ž‘์—…"]
262
- }
263
-
264
- # ํ‚ค์›Œ๋“œ ์ •๊ทœํ™”
265
- keyword_lower = keyword.lower()
266
-
267
- # ์ง์ ‘ ๋งค์นญ๋˜๋Š” ๋™์˜์–ด๊ฐ€ ์žˆ์œผ๋ฉด ๋ฐ˜ํ™˜
268
- if keyword_lower in synonyms:
269
- return synonyms[keyword_lower][:2] # ์ตœ๋Œ€ 2๊ฐœ
270
-
271
- # ๋ถ€๋ถ„ ๋งค์นญ ํ™•์ธ
272
- for key, values in synonyms.items():
273
- if key in keyword_lower or keyword_lower in key:
274
- return values[:2]
275
-
276
- # ๋™์˜์–ด๊ฐ€ ์—†์œผ๋ฉด ๋นˆ ๋ฆฌ์ŠคํŠธ
277
- return []
278
 
279
  def calculate_credibility_score(self, result: Dict) -> float:
280
- """๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์˜ ์‹ ๋ขฐ๋„ ์ ์ˆ˜ ๊ณ„์‚ฐ (0-1)"""
281
- score = 0.5 # ๊ธฐ๋ณธ ์ ์ˆ˜
282
-
283
  url = result.get('url', '')
284
- title = result.get('title', '')
285
- description = result.get('description', '')
286
 
287
- # URL ๊ธฐ๋ฐ˜ ์ ์ˆ˜
288
- trusted_domains = [
289
- '.edu', '.gov', '.org', 'wikipedia.org', 'nature.com',
290
- 'sciencedirect.com', 'ieee.org', 'acm.org', 'springer.com',
291
- 'harvard.edu', 'mit.edu', 'stanford.edu', 'github.com'
292
- ]
293
 
294
  for domain in trusted_domains:
295
  if domain in url:
296
  score += 0.2
297
  break
298
 
299
- # HTTPS ์‚ฌ์šฉ ์—ฌ๋ถ€
300
  if url.startswith('https://'):
301
  score += 0.1
302
 
303
- # ์ œ๋ชฉ๊ณผ ์„ค๋ช…์˜ ๊ธธ์ด (๋„ˆ๋ฌด ์งง์œผ๋ฉด ์‹ ๋ขฐ๋„ ๊ฐ์†Œ)
304
- if len(title) > 20:
305
  score += 0.05
306
- if len(description) > 50:
307
  score += 0.05
308
 
309
- # ๊ด‘๊ณ /์ŠคํŒธ ํ‚ค์›Œ๋“œ ์ฒดํฌ
310
- spam_keywords = ['buy now', 'sale', 'discount', 'click here', '100% free']
311
- if any(spam in (title + description).lower() for spam in spam_keywords):
312
  score -= 0.3
313
 
314
- # ๋‚ ์งœ ์ •๋ณด๊ฐ€ ์žˆ์œผ๋ฉด ๊ฐ€์‚ฐ์ 
315
- if any(year in description for year in ['2024', '2023', '2022']):
316
- score += 0.1
317
-
318
- return max(0, min(1, score)) # 0-1 ๋ฒ”์œ„๋กœ ์ œํ•œ
319
-
320
- def fetch_url_content(self, url: str, max_length: int = 2000) -> str:
321
- """URL์—์„œ ์ฝ˜ํ…์ธ  ์ถ”์ถœ"""
322
- try:
323
- # User-Agent ์„ค์ •
324
- headers = {
325
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
326
- }
327
-
328
- req = urllib.request.Request(url, headers=headers)
329
-
330
- with urllib.request.urlopen(req, timeout=5) as response:
331
- html = response.read().decode('utf-8', errors='ignore')
332
-
333
- soup = BeautifulSoup(html, 'html.parser')
334
-
335
- # ์Šคํฌ๋ฆฝํŠธ์™€ ์Šคํƒ€์ผ ์ œ๊ฑฐ
336
- for script in soup(["script", "style"]):
337
- script.decompose()
338
-
339
- # ๋ณธ๋ฌธ ํ…์ŠคํŠธ ์ถ”์ถœ
340
- text = soup.get_text()
341
-
342
- # ๊ณต๋ฐฑ ์ •๋ฆฌ
343
- lines = (line.strip() for line in text.splitlines())
344
- chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
345
- text = ' '.join(chunk for chunk in chunks if chunk)
346
-
347
- # ๊ธธ์ด ์ œํ•œ
348
- if len(text) > max_length:
349
- text = text[:max_length] + "..."
350
-
351
- return text
352
-
353
- except Exception as e:
354
- logger.error(f"URL ์ฝ˜ํ…์ธ  ๊ฐ€์ ธ์˜ค๊ธฐ ์‹คํŒจ {url}: {str(e)}")
355
- return ""
356
-
357
- def detect_contradictions(self, results: List[Dict]) -> List[Dict]:
358
- """๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๊ฐ„ ๋ชจ์ˆœ ๊ฐ์ง€"""
359
- contradictions = []
360
-
361
- # ๊ฐ„๋‹จํ•œ ๋ชจ์ˆœ ๊ฐ์ง€ ํŒจํ„ด
362
- opposite_pairs = [
363
- ("increase", "decrease"),
364
- ("improve", "worsen"),
365
- ("effective", "ineffective"),
366
- ("success", "failure"),
367
- ("benefit", "harm"),
368
- ("positive", "negative"),
369
- ("growth", "decline")
370
- ]
371
-
372
- # ๊ฒฐ๊ณผ๋“ค์„ ๋น„๊ต
373
- for i in range(len(results)):
374
- for j in range(i + 1, len(results)):
375
- desc1 = results[i].get('description', '').lower()
376
- desc2 = results[j].get('description', '').lower()
377
-
378
- # ๋ฐ˜๋Œ€ ๊ฐœ๋…์ด ํฌํ•จ๋˜์–ด ์žˆ๋Š”์ง€ ํ™•์ธ
379
- for word1, word2 in opposite_pairs:
380
- if (word1 in desc1 and word2 in desc2) or (word2 in desc1 and word1 in desc2):
381
- # ๊ฐ™์€ ์ฃผ์ œ์— ๋Œ€ํ•ด ๋ฐ˜๋Œ€ ์˜๊ฒฌ์ธ์ง€ ํ™•์ธ
382
- common_words = set(desc1.split()) & set(desc2.split())
383
- if len(common_words) > 5: # ๊ณตํ†ต ๋‹จ์–ด๊ฐ€ 5๊ฐœ ์ด์ƒ์ด๋ฉด ๊ฐ™์€ ์ฃผ์ œ๋กœ ๊ฐ„์ฃผ
384
- contradictions.append({
385
- 'source1': results[i]['url'],
386
- 'source2': results[j]['url'],
387
- 'type': f"{word1} vs {word2}",
388
- 'desc1': results[i]['description'][:100],
389
- 'desc2': results[j]['description'][:100]
390
- })
391
-
392
- return contradictions
393
 
394
  def brave_search(self, query: str) -> List[Dict]:
395
  """Brave Search API ํ˜ธ์ถœ"""
396
  if self.test_mode or self.bapi_token == "YOUR_BRAVE_API_TOKEN":
397
- # ํ…Œ์ŠคํŠธ ๋ชจ๋“œ์—์„œ๋Š” ์‹œ๋ฎฌ๋ ˆ์ด์…˜๋œ ๊ฒฐ๊ณผ ๋ฐ˜ํ™˜
398
  test_results = []
399
  for i in range(5):
400
  test_results.append({
401
- "title": f"Best Practices for {query} - Source {i+1}",
402
- "description": f"Comprehensive guide on implementing {query} with proven methodologies and real-world examples from industry leaders.",
403
  "url": f"https://example{i+1}.com/{query.replace(' ', '-')}",
404
  "credibility_score": 0.7 + (i * 0.05)
405
  })
@@ -408,9 +356,8 @@ class LLMCollaborativeSystem:
408
  try:
409
  params = {
410
  "q": query,
411
- "count": 20, # 20๊ฐœ๋กœ ์ฆ๊ฐ€
412
- "safesearch": "moderate",
413
- "freshness": "pw" # Past week for recent results
414
  }
415
 
416
  response = requests.get(
@@ -423,18 +370,15 @@ class LLMCollaborativeSystem:
423
  if response.status_code == 200:
424
  data = response.json()
425
  results = []
426
- for item in data.get("web", {}).get("results", [])[:20]:
427
  result = {
428
  "title": item.get("title", ""),
429
  "description": item.get("description", ""),
430
  "url": item.get("url", ""),
431
- "published": item.get("published", "")
432
  }
433
- # ์‹ ๋ขฐ๋„ ์ ์ˆ˜ ๊ณ„์‚ฐ
434
- result["credibility_score"] = self.calculate_credibility_score(result)
435
  results.append(result)
436
 
437
- # ์‹ ๋ขฐ๋„ ์ ์ˆ˜ ๊ธฐ์ค€์œผ๋กœ ์ •๋ ฌ
438
  results.sort(key=lambda x: x['credibility_score'], reverse=True)
439
  return results
440
  else:
@@ -445,14 +389,6 @@ class LLMCollaborativeSystem:
445
  logger.error(f"Brave ๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜: {str(e)}")
446
  return []
447
 
448
- def simulate_streaming(self, text: str, role: str) -> Generator[str, None, None]:
449
- """ํ…Œ์ŠคํŠธ ๋ชจ๋“œ์—์„œ ์ŠคํŠธ๋ฆฌ๋ฐ ์‹œ๋ฎฌ๋ ˆ์ด์…˜"""
450
- words = text.split()
451
- for i in range(0, len(words), 3):
452
- chunk = " ".join(words[i:i+3])
453
- yield chunk + " "
454
- time.sleep(0.05)
455
-
456
  def call_gemini_streaming(self, messages: List[Dict[str, str]], role: str) -> Generator[str, None, None]:
457
  """Gemini API ์ŠคํŠธ๋ฆฌ๋ฐ ํ˜ธ์ถœ"""
458
  if not self.gemini_client:
@@ -460,28 +396,7 @@ class LLMCollaborativeSystem:
460
  return
461
 
462
  try:
463
- # ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ์„ค์ •
464
- system_prompts = {
465
- "supervisor": "๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.",
466
- "researcher": "๋‹น์‹ ์€ ์ •๋ณด๋ฅผ ์กฐ์‚ฌํ•˜๊ณ  ์ฒด๊ณ„์ ์œผ๋กœ ์ •๋ฆฌํ•˜๋Š” ์กฐ์‚ฌ์ž AI์ž…๋‹ˆ๋‹ค.",
467
- "executor": "๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค.",
468
- "evaluator": "๋‹น์‹ ์€ ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ •๊ณผ ๊ฒฐ๊ณผ๋ฅผ ํ‰๊ฐ€ํ•˜๋Š” ํ‰๊ฐ€์ž AI์ž…๋‹ˆ๋‹ค."
469
- }
470
-
471
- # Gemini ํ˜•์‹์˜ contents ๊ตฌ์„ฑ
472
  contents = []
473
-
474
- # ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์ฒซ ๋ฒˆ์งธ ์‚ฌ์šฉ์ž ๋ฉ”์‹œ์ง€๋กœ ์ถ”๊ฐ€
475
- contents.append(types.Content(
476
- role="user",
477
- parts=[types.Part.from_text(text=system_prompts.get(role, ""))]
478
- ))
479
- contents.append(types.Content(
480
- role="model",
481
- parts=[types.Part.from_text(text="๋„ค, ์ดํ•ดํ–ˆ์Šต๋‹ˆ๋‹ค. ์ œ ์—ญํ• ์„ ์ˆ˜ํ–‰ํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค.")]
482
- ))
483
-
484
- # ์‚ฌ์šฉ์ž ๋ฉ”์‹œ์ง€ ์ถ”๊ฐ€
485
  for msg in messages:
486
  if msg["role"] == "user":
487
  contents.append(types.Content(
@@ -489,7 +404,6 @@ class LLMCollaborativeSystem:
489
  parts=[types.Part.from_text(text=msg["content"])]
490
  ))
491
 
492
- # GenerateContentConfig ์„ค์ •
493
  generate_content_config = types.GenerateContentConfig(
494
  temperature=0.7,
495
  top_p=0.8,
@@ -497,7 +411,6 @@ class LLMCollaborativeSystem:
497
  response_mime_type="text/plain"
498
  )
499
 
500
- # ์ŠคํŠธ๋ฆฌ๋ฐ ์ƒ์„ฑ
501
  for chunk in self.gemini_client.models.generate_content_stream(
502
  model="gemini-2.5-pro",
503
  contents=contents,
@@ -512,453 +425,31 @@ class LLMCollaborativeSystem:
512
 
513
  def call_llm_streaming(self, messages: List[Dict[str, str]], role: str) -> Generator[str, None, None]:
514
  """์ŠคํŠธ๋ฆฌ๋ฐ LLM API ํ˜ธ์ถœ"""
515
-
516
- # Gemini ๋ชจ๋“œ์ธ ๊ฒฝ์šฐ
517
  if self.use_gemini:
518
  yield from self.call_gemini_streaming(messages, role)
519
  return
520
 
521
- # ํ…Œ์ŠคํŠธ ๋ชจ๋“œ
522
  if self.test_mode:
523
- logger.info(f"ํ…Œ์ŠคํŠธ ๋ชจ๋“œ ์ŠคํŠธ๋ฆฌ๋ฐ - Role: {role}")
524
- test_responses = {
525
- "supervisor_initial": """์ด ์งˆ๋ฌธ์— ๋Œ€ํ•œ ๊ฑฐ์‹œ์  ๋ถ„์„์„ ์ œ์‹œํ•˜๊ฒ ์Šต๋‹ˆ๋‹ค.
526
-
527
- 1. **ํ•ต์‹ฌ ๊ฐœ๋… ํŒŒ์•…**
528
- - ์งˆ๋ฌธ์˜ ๋ณธ์งˆ์  ์š”์†Œ๋ฅผ ์‹ฌ์ธต ๋ถ„์„ํ•ฉ๋‹ˆ๋‹ค
529
- - ๊ด€๋ จ๋œ ์ฃผ์š” ์ด๋ก ๊ณผ ์›์น™์„ ๊ฒ€ํ† ํ•ฉ๋‹ˆ๋‹ค
530
- - ๋‹ค์–‘ํ•œ ๊ด€์ ์—์„œ์˜ ์ ‘๊ทผ ๋ฐฉ๋ฒ•์„ ๊ณ ๋ คํ•ฉ๋‹ˆ๋‹ค
531
-
532
- 2. **์ „๋žต์  ์ ‘๊ทผ ๋ฐฉํ–ฅ**
533
- - ์ฒด๊ณ„์ ์ด๊ณ  ๋‹จ๊ณ„๋ณ„ ํ•ด๊ฒฐ ๋ฐฉ์•ˆ์„ ์ˆ˜๋ฆฝํ•ฉ๋‹ˆ๋‹ค
534
- - ์žฅ๋‹จ๊ธฐ ๋ชฉํ‘œ๋ฅผ ๋ช…ํ™•ํžˆ ์„ค์ •ํ•ฉ๋‹ˆ๋‹ค
535
- - ๋ฆฌ์Šคํฌ ์š”์ธ๊ณผ ๋Œ€์‘ ๋ฐฉ์•ˆ์„ ๋งˆ๋ จํ•ฉ๋‹ˆ๋‹ค
536
-
537
- 3. **๊ธฐ๋Œ€ ํšจ๊ณผ์™€ ๊ณผ์ œ**
538
- - ์˜ˆ์ƒ๋˜๋Š” ๊ธ์ •์  ์„ฑ๊ณผ๋ฅผ ๋ถ„์„ํ•ฉ๋‹ˆ๋‹ค
539
- - ์ž ์žฌ์  ๋„์ „ ๊ณผ์ œ๋ฅผ ์‹๋ณ„ํ•ฉ๋‹ˆ๋‹ค
540
- - ์ง€์†๊ฐ€๋Šฅํ•œ ๋ฐœ์ „ ๋ฐฉํ–ฅ์„ ์ œ์‹œํ•ฉ๋‹ˆ๋‹ค
541
-
542
- [๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ]: machine learning optimization, performance improvement strategies, model efficiency techniques, hyperparameter tuning best practices, latest ML trends 2024""",
543
-
544
- "researcher": """์กฐ์‚ฌ ๊ฒฐ๊ณผ๋ฅผ ์ข…ํ•ฉํ•˜์—ฌ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์ •๋ฆฌํ–ˆ์Šต๋‹ˆ๋‹ค.
545
-
546
- **1. Machine Learning Optimization (์‹ ๋ขฐ๋„ ๋†’์Œ)**
547
- - ์ตœ์‹  ์—ฐ๊ตฌ์— ๋”ฐ๋ฅด๋ฉด ๋ชจ๋ธ ์ตœ์ ํ™”์˜ ํ•ต์‹ฌ์€ ์•„ํ‚คํ…์ฒ˜ ์„ค๊ณ„์™€ ํ›ˆ๋ จ ์ „๋žต์˜ ๊ท ํ˜•์ž…๋‹ˆ๋‹ค (์‹ ๋ขฐ๋„: 0.85)
548
- - AutoML ๋„๊ตฌ๋“ค์ด ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ํŠœ๋‹์„ ์ž๋™ํ™”ํ•˜์—ฌ ํšจ์œจ์„ฑ์„ ํฌ๊ฒŒ ํ–ฅ์ƒ์‹œํ‚ต๋‹ˆ๋‹ค (์‹ ๋ขฐ๋„: 0.82)
549
- - ์ถœ์ฒ˜: ML Conference 2024 (https://mlconf2024.org), Google Research (https://research.google)
550
-
551
- **2. Performance Improvement Strategies (์‹ ๋ขฐ๋„ ๋†’์Œ)**
552
- - ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„ ์ด ๋ชจ๋ธ ์„ฑ๋Šฅ ํ–ฅ์ƒ์˜ 80%๋ฅผ ์ฐจ์ง€ํ•œ๋‹ค๋Š” ์—ฐ๊ตฌ ๊ฒฐ๊ณผ (์‹ ๋ขฐ๋„: 0.90)
553
- - ์•™์ƒ๋ธ” ๊ธฐ๋ฒ•๊ณผ ์ „์ดํ•™์Šต์ด ์ฃผ์š” ์„ฑ๋Šฅ ๊ฐœ์„  ๋ฐฉ๋ฒ•์œผ๋กœ ์ž…์ฆ๋จ (์‹ ๋ขฐ๋„: 0.78)
554
- - ์ถœ์ฒ˜: Stanford AI Lab (https://ai.stanford.edu), MIT CSAIL (https://csail.mit.edu)
555
-
556
- **3. Model Efficiency Techniques (์‹ ๋ขฐ๋„ ์ค‘๊ฐ„)**
557
- - ๋ชจ๋ธ ๊ฒฝ๋Ÿ‰ํ™”(Pruning, Quantization)๋กœ ์ถ”๋ก  ์†๋„ 10๋ฐฐ ํ–ฅ์ƒ ๊ฐ€๋Šฅ (์‹ ๋ขฐ๋„: 0.75)
558
- - Knowledge Distillation์œผ๋กœ ๋ชจ๋ธ ํฌ๊ธฐ 90% ๊ฐ์†Œ, ์„ฑ๋Šฅ ์œ ์ง€ (์‹ ๋ขฐ๋„: 0.72)
559
- - ์ถœ์ฒ˜: ArXiv ๋…ผ๋ฌธ (https://arxiv.org/abs/2023.xxxxx)
560
-
561
- **4. ์‹ค์ œ ์ ์šฉ ์‚ฌ๋ก€ (์‹ ๋ขฐ๋„ ๋†’์Œ)**
562
- - Netflix: ์ถ”์ฒœ ์‹œ์Šคํ…œ ๊ฐœ์„ ์œผ๋กœ ์‚ฌ์šฉ์ž ๋งŒ์กฑ๋„ 35% ํ–ฅ์ƒ (์‹ ๋ขฐ๋„: 0.88)
563
- - Tesla: ์‹ค์‹œ๊ฐ„ ๊ฐ์ฒด ์ธ์‹ ์†๋„ 50% ๊ฐœ์„  (์‹ ๋ขฐ๋„: 0.80)
564
- - OpenAI: GPT ๋ชจ๋ธ ํšจ์œจ์„ฑ ๊ฐœ์„ ์œผ๋กœ ๋น„์šฉ 70% ์ ˆ๊ฐ (์‹ ๋ขฐ๋„: 0.85)
565
-
566
- **ํ•ต์‹ฌ ์ธ์‚ฌ์ดํŠธ:**
567
- - ์ตœ์‹  ํŠธ๋ Œ๋“œ๋Š” ํšจ์œจ์„ฑ๊ณผ ์„ฑ๋Šฅ์˜ ๊ท ํ˜•์— ์ดˆ์ 
568
- - 2024๋…„ ๋“ค์–ด Sparse Models์™€ MoE(Mixture of Experts) ๊ธฐ๋ฒ•์ด ๋ถ€์ƒ
569
- - ์‹ค๋ฌด ์ ์šฉ ์‹œ ๋‹จ๊ณ„๋ณ„ ๊ฒ€์ฆ์ด ์„ฑ๊ณต์˜ ํ•ต์‹ฌ""",
570
-
571
- "supervisor_execution": """์กฐ์‚ฌ ๋‚ด์šฉ์„ ๋ฐ”ํƒ•์œผ๋กœ ์‹คํ–‰์ž AI์—๊ฒŒ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ๊ตฌ์ฒด์ ์œผ๋กœ ์ง€์‹œํ•ฉ๋‹ˆ๋‹ค.
572
-
573
- **1๋‹จ๊ณ„: ํ˜„์žฌ ๋ชจ๋ธ ์ง„๋‹จ (1์ฃผ์ฐจ)**
574
- - ์กฐ์‚ฌ๋œ ๋ฒค์น˜๋งˆํฌ ๊ธฐ์ค€์œผ๋กœ ํ˜„์žฌ ๋ชจ๋ธ ์„ฑ๋Šฅ ํ‰๊ฐ€
575
- - Netflix ์‚ฌ๋ก€๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ์ฃผ์š” ๋ณ‘๋ชฉ ์ง€์  ์‹๋ณ„
576
- - AutoML ๋„๊ตฌ๋ฅผ ํ™œ์šฉํ•œ ์ดˆ๊ธฐ ์ตœ์ ํ™” ๊ฐ€๋Šฅ์„ฑ ํƒ์ƒ‰
577
-
578
- **2๋‹จ๊ณ„: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„  (2-3์ฃผ์ฐจ)**
579
- - ์กฐ์‚ฌ ๊ฒฐ๊ณผ์˜ "80% ๊ทœ์น™"์— ๋”ฐ๋ผ ๋ฐ์ดํ„ฐ ์ •์ œ ์šฐ์„  ์‹คํ–‰
580
- - ๋ฐ์ดํ„ฐ ์ฆ๊ฐ• ๊ธฐ๋ฒ• ์ ์šฉ (์กฐ์‚ฌ๋œ ์ตœ์‹  ๊ธฐ๋ฒ• ํ™œ์šฉ)
581
- - A/B ํ…Œ์ŠคํŠธ๋กœ ๊ฐœ์„  ํšจ๊ณผ ์ธก์ •
582
-
583
- **3๋‹จ๊ณ„: ๋ชจ๋ธ ์ตœ์ ํ™” ๊ตฌํ˜„ (4-6์ฃผ์ฐจ)**
584
- - Knowledge Distillation ์ ์šฉํ•˜์—ฌ ๋ชจ๋ธ ๊ฒฝ๋Ÿ‰ํ™”
585
- - ์กฐ์‚ฌ๋œ Pruning ๊ธฐ๋ฒ•์œผ๋กœ ์ถ”๋ก  ์†๋„ ๊ฐœ์„ 
586
- - Tesla ์‚ฌ๋ก€์˜ ์‹ค์‹œ๊ฐ„ ์ฒ˜๋ฆฌ ์ตœ์ ํ™” ๊ธฐ๋ฒ• ๋ฒค์น˜๋งˆํ‚น
587
-
588
- **4๋‹จ๊ณ„: ์„ฑ๊ณผ ๊ฒ€์ฆ ๋ฐ ๋ฐฐํฌ (7-8์ฃผ์ฐจ)**
589
- - OpenAI ์‚ฌ๋ก€์˜ ๋น„์šฉ ์ ˆ๊ฐ ์ง€ํ‘œ ์ ์šฉ
590
- - ์กฐ์‚ฌ๋œ ์„ฑ๋Šฅ ์ง€ํ‘œ๋กœ ๊ฐœ์„ ์œจ ์ธก์ •
591
- - ๋‹จ๊ณ„์  ๋ฐฐํฌ ์ „๋žต ์ˆ˜๋ฆฝ""",
592
-
593
- "executor": """๊ฐ๋…์ž์˜ ์ง€์‹œ์™€ ์กฐ์‚ฌ ๋‚ด์šฉ์„ ๊ธฐ๋ฐ˜์œผ๋กœ ๊ตฌ์ฒด์ ์ธ ์‹คํ–‰ ๊ณ„ํš์„ ์ˆ˜๋ฆฝํ•ฉ๋‹ˆ๋‹ค.
594
-
595
- **1๋‹จ๊ณ„: ํ˜„์žฌ ๋ชจ๋ธ ์ง„๋‹จ (1์ฃผ์ฐจ)**
596
- - ์›”์š”์ผ-ํ™”์š”์ผ: MLflow๋ฅผ ์‚ฌ์šฉํ•œ ํ˜„์žฌ ๋ชจ๋ธ ๋ฉ”ํŠธ๋ฆญ ์ˆ˜์ง‘
597
- * ์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ฐธ๊ณ : Netflix๊ฐ€ ์‚ฌ์šฉํ•œ ํ•ต์‹ฌ ์ง€ํ‘œ (์ •ํ™•๋„, ์ง€์—ฐ์‹œ๊ฐ„, ์ฒ˜๋ฆฌ๋Ÿ‰)
598
- - ์ˆ˜์š”์ผ-๋ชฉ์š”์ผ: AutoML ๋„๊ตฌ (Optuna, Ray Tune) ์„ค์ • ๋ฐ ์ดˆ๊ธฐ ์‹คํ–‰
599
- * ์กฐ์‚ฌ๋œ best practice์— ๋”ฐ๋ผ search space ์ •์˜
600
- - ๊ธˆ์š”์ผ: ์ง„๋‹จ ๋ณด๊ณ ์„œ ์ž‘์„ฑ ๋ฐ ๊ฐœ์„  ์šฐ์„ ์ˆœ์œ„ ๊ฒฐ์ •
601
-
602
- **2๋‹จ๊ณ„: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„  (2-3์ฃผ์ฐจ)**
603
- - ๋ฐ์ดํ„ฐ ์ •์ œ ํŒŒ์ดํ”„๋ผ์ธ ๊ตฌ์ถ•
604
- * ์กฐ์‚ฌ ๊ฒฐ๊ณผ์˜ "80% ๊ทœ์น™" ์ ์šฉ: ๋ˆ„๋ฝ๊ฐ’, ์ด์ƒ์น˜, ๋ ˆ์ด๋ธ” ์˜ค๋ฅ˜ ์ฒ˜๋ฆฌ
605
- * ์ฝ”๋“œ ์˜ˆ์‹œ: `data_quality_pipeline.py` ๊ตฌํ˜„
606
- - ๋ฐ์ดํ„ฐ ์ฆ๊ฐ• ๊ตฌํ˜„
607
- * ์ตœ์‹  ๊ธฐ๋ฒ• ์ ์šฉ: MixUp, CutMix, AutoAugment
608
- * ๊ฒ€์ฆ ๋ฐ์ดํ„ฐ์…‹์œผ๋กœ ํšจ๊ณผ ์ธก์ • (๋ชฉํ‘œ: 15% ์„ฑ๋Šฅ ํ–ฅ์ƒ)
609
-
610
- **3๋‹จ๊ณ„: ๋ชจ๋ธ ์ตœ์ ํ™” ๊ตฌํ˜„ (4-6์ฃผ์ฐจ)**
611
- - Knowledge Distillation ๊ตฌํ˜„
612
- * Teacher ๋ชจ๋ธ: ํ˜„์žฌ ๋Œ€๊ทœ๋ชจ ๋ชจ๋ธ
613
- * Student ๋ชจ๋ธ: 90% ์ž‘์€ ํฌ๊ธฐ ๋ชฉํ‘œ (์กฐ์‚ฌ ๊ฒฐ๊ณผ ๊ธฐ๋ฐ˜)
614
- * ๊ตฌํ˜„ ํ”„๋ ˆ์ž„์›Œํฌ: PyTorch/TensorFlow""",
615
-
616
- "supervisor_review": """์‹คํ–‰์ž AI์˜ ๊ณ„ํš์„ ๊ฒ€ํ† ํ•œ ๊ฒฐ๊ณผ, ์กฐ์‚ฌ ๋‚ด์šฉ์ด ์ž˜ ๋ฐ˜์˜๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ๋‹ค์Œ๊ณผ ๊ฐ™์€ ๊ฐœ์„ ์‚ฌํ•ญ์„ ์ œ์•ˆํ•ฉ๋‹ˆ๋‹ค.
617
-
618
- **๊ฐ•์ **
619
- - ์กฐ์‚ฌ๋œ ์‚ฌ๋ก€๋“ค(Netflix, Tesla, OpenAI)์ด ๊ฐ ๋‹จ๊ณ„์— ์ ์ ˆํžˆ ํ™œ์šฉ๋จ
620
- - ๊ตฌ์ฒด์ ์ธ ๋„๊ตฌ์™€ ๊ธฐ๋ฒ•์ด ๋ช…์‹œ๋˜์–ด ์‹คํ–‰ ๊ฐ€๋Šฅ์„ฑ์ด ๋†’์Œ
621
- - ์ธก์ • ๊ฐ€๋Šฅํ•œ ๋ชฉํ‘œ๊ฐ€ ์กฐ์‚ฌ ๊ฒฐ๊ณผ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์„ค์ •๋จ
622
-
623
- **๊ฐœ์„  ํ•„์š”์‚ฌํ•ญ**
624
- 1. **๋ฆฌ์Šคํฌ ๊ด€๋ฆฌ ๊ฐ•ํ™”**
625
- - ๊ฐ ๋‹จ๊ณ„๋ณ„ ์‹คํŒจ ์‹œ๋‚˜๋ฆฌ์˜ค์™€ ๋Œ€์‘ ๋ฐฉ์•ˆ ์ถ”๊ฐ€ ํ•„์š”
626
- - ๊ธฐ์ˆ ์  ๋ฌธ์ œ ๋ฐœ์ƒ ์‹œ ๋ฐฑ์—… ๊ณ„ํš ์ˆ˜๋ฆฝ
627
-
628
- 2. **๋น„์šฉ ๋ถ„์„ ๊ตฌ์ฒดํ™”**
629
- - OpenAI ์‚ฌ๋ก€์˜ 70% ์ ˆ๊ฐ์„ ์œ„ํ•œ ๊ตฌ์ฒด์ ์ธ ๋น„์šฉ ๊ณ„์‚ฐ
630
- - ROI ๋ถ„์„ ๋ฐ ํˆฌ์ž ๋Œ€๋น„ ํšจ๊ณผ ์ธก์ • ๋ฐฉ๋ฒ•
631
-
632
- **์ถ”๊ฐ€ ๊ถŒ์žฅ์‚ฌํ•ญ**
633
- - ์ตœ์‹  ์—ฐ๊ตฌ ๋™ํ–ฅ ๋ชจ๋‹ˆํ„ฐ๋ง ์ฒด๊ณ„ ๊ตฌ์ถ•
634
- - ๊ฒฝ์Ÿ์‚ฌ ๋ฒค์น˜๋งˆํ‚น์„ ์œ„ํ•œ ์ •๊ธฐ์ ์ธ ์กฐ์‚ฌ ํ”„๋กœ์„ธ์Šค""",
635
-
636
- "executor_final": """๊ฐ๋…์ž AI์˜ ํ”ผ๋“œ๋ฐฑ์„ ์™„์ „ํžˆ ๋ฐ˜์˜ํ•˜์—ฌ ์ตœ์ข… ์‹คํ–‰ ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•ฉ๋‹ˆ๋‹ค.
637
-
638
- # ๐ŸŽฏ ๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ ์„ฑ๋Šฅ ํ–ฅ์ƒ ์ตœ์ข… ์‹คํ–‰ ๋ณด๊ณ ์„œ
639
-
640
- ## ๐Ÿ“‹ Executive Summary
641
-
642
- ๋ณธ ๋ณด๊ณ ์„œ๋Š” ์›น ๊ฒ€์ƒ‰์„ ํ†ตํ•ด ์ˆ˜์ง‘๋œ ์ตœ์‹  ์‚ฌ๋ก€์™€ ๊ฐ๋…์ž AI์˜ ์ „๋žต์  ์ง€์นจ์„ ๋ฐ”ํƒ•์œผ๋กœ, 8์ฃผ๊ฐ„์˜ ์ฒด๊ณ„์ ์ธ ๋ชจ๋ธ ์ตœ์ ํ™” ํ”„๋กœ์ ํŠธ๋ฅผ ์ œ์‹œํ•ฉ๋‹ˆ๋‹ค.
643
-
644
- ### ๐ŸŽฏ ๋ชฉํ‘œ ๋‹ฌ์„ฑ ์ง€ํ‘œ
645
-
646
- | ์ง€ํ‘œ | ํ˜„์žฌ | ๋ชฉํ‘œ | ๊ฐœ์„ ์œจ |
647
- |------|------|------|--------|
648
- | ๋ชจ๋ธ ํฌ๊ธฐ | 2.5GB | 250MB | 90% ๊ฐ์†Œ |
649
- | ์ถ”๋ก  ์†๋„ | 45ms | 4.5ms | 10๋ฐฐ ํ–ฅ์ƒ |
650
- | ์šด์˜ ๋น„์šฉ | $2,000/์›” | $600/์›” | 70% ์ ˆ๊ฐ |
651
- | ์ •ํ™•๋„ | 92% | 90.5% | 1.5% ์†์‹ค |
652
-
653
- ## ๐Ÿ“Š 1๋‹จ๊ณ„: ํ˜„์žฌ ๋ชจ๋ธ ์ง„๋‹จ ๋ฐ ๋ฒ ์ด์Šค๋ผ์ธ ์„ค์ • (1์ฃผ์ฐจ)
654
-
655
- ### ์‹คํ–‰ ๊ณ„ํš
656
-
657
- **์›”-ํ™”์š”์ผ: ์„ฑ๋Šฅ ๋ฉ”ํŠธ๋ฆญ ์ˆ˜์ง‘**
658
- - MLflow๋ฅผ ํ†ตํ•œ ํ˜„์žฌ ๋ชจ๋ธ ์ „์ฒด ๋ถ„์„
659
- - Netflix ์‚ฌ๋ก€ ๊ธฐ๋ฐ˜ ํ•ต์‹ฌ ์ง€ํ‘œ:
660
- - ์ •ํ™•๋„: 92%
661
- - ์ง€์—ฐ์‹œ๊ฐ„: 45ms
662
- - ์ฒ˜๋ฆฌ๋Ÿ‰: 1,000 req/s
663
- - GPU ๋ฉ”๋ชจ๋ฆฌ: 8GB
664
-
665
- **์ˆ˜-๋ชฉ์š”์ผ: AutoML ์ดˆ๊ธฐ ํƒ์ƒ‰**
666
- ```python
667
- # Optuna ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ์ตœ์ ํ™” ์„ค์ •
668
- study = optuna.create_study(direction="maximize")
669
- study.optimize(objective, n_trials=200)
670
-
671
- # Ray Tune ๋ถ„์‚ฐ ํ•™์Šต ์„ค์ •
672
- analysis = tune.run(
673
- train_model,
674
- config=search_space,
675
- num_samples=50,
676
- resources_per_trial={"gpu": 1}
677
- )
678
- ```
679
-
680
- ### ์˜ˆ์ƒ ์‚ฐ์ถœ๋ฌผ
681
- - โœ… ์ƒ์„ธ ์„ฑ๋Šฅ ๋ฒ ์ด์Šค๋ผ์ธ ๋ฌธ์„œ
682
- - โœ… ๊ฐœ์„  ๊ธฐํšŒ ์šฐ์„ ์ˆœ์œ„ ๋งคํŠธ๋ฆญ์Šค
683
- - โœ… ๋ฆฌ์Šคํฌ ๋ ˆ์ง€์Šคํ„ฐ
684
-
685
- ## ๐Ÿ“Š 2๋‹จ๊ณ„: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ๊ฐœ์„  (2-3์ฃผ์ฐจ)
686
-
687
- ### ์‹คํ–‰ ๊ณ„ํš
688
-
689
- **๋ฐ์ดํ„ฐ ์ •์ œ ํŒŒ์ดํ”„๋ผ์ธ ๊ตฌ์ถ•**
690
-
691
- > ์กฐ์‚ฌ ๊ฒฐ๊ณผ์˜ "80% ๊ทœ์น™" ์ ์šฉ: ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ์ด ์„ฑ๋Šฅ์˜ 80%๋ฅผ ๊ฒฐ์ •
692
-
693
- ```python
694
- class DataQualityPipeline:
695
- def __init__(self):
696
- self.validators = [
697
- MissingValueHandler(threshold=0.05),
698
- OutlierDetector(method='isolation_forest'),
699
- LabelConsistencyChecker(),
700
- DataDriftMonitor()
701
- ]
702
-
703
- def process(self, data):
704
- for validator in self.validators:
705
- data = validator.transform(data)
706
- self.log_metrics(validator.get_stats())
707
- return data
708
- ```
709
-
710
- **๊ณ ๊ธ‰ ๋ฐ์ดํ„ฐ ์ฆ๊ฐ• ๊ธฐ๋ฒ•**
711
- - **MixUp**: 15% ์ •ํ™•๋„ ํ–ฅ์ƒ ์˜ˆ์ƒ
712
- - **CutMix**: ๊ฒฝ๊ณ„ ๊ฒ€์ถœ ์„ฑ๋Šฅ 20% ๊ฐœ์„ 
713
- - **AutoAugment**: ์ž๋™ ์ตœ์  ์ฆ๊ฐ• ์ •์ฑ… ํƒ์ƒ‰
714
-
715
- ### ๋ฆฌ์Šคํฌ ๋Œ€์‘ ์ „๋žต
716
-
717
- | ๋ฆฌ์Šคํฌ | ํ™•๋ฅ  | ์˜ํ–ฅ๋„ | ๋Œ€์‘ ๋ฐฉ์•ˆ |
718
- |--------|------|--------|-----------|
719
- | ๋ฐ์ดํ„ฐ ํ’ˆ์งˆ ์ €ํ•˜ | ์ค‘๊ฐ„ | ๋†’์Œ | ๋กค๋ฐฑ ๋ฉ”์ปค๋‹ˆ์ฆ˜ ๊ตฌํ˜„ |
720
- | ์ฆ๊ฐ• ๊ณผ์ ํ•ฉ | ๋‚ฎ์Œ | ์ค‘๊ฐ„ | ๊ฒ€์ฆ์…‹ ๋ถ„๋ฆฌ ๋ฐ ๊ต์ฐจ ๊ฒ€์ฆ |
721
- | ์ฒ˜๋ฆฌ ์‹œ๊ฐ„ ์ฆ๊ฐ€ | ๋†’์Œ | ๋‚ฎ์Œ | ๋ณ‘๋ ฌ ์ฒ˜๋ฆฌ ํŒŒ์ดํ”„๋ผ์ธ |
722
-
723
- ## ๐Ÿ“Š 3๋‹จ๊ณ„: ๋ชจ๋ธ ์ตœ์ ํ™” ๊ตฌํ˜„ (4-6์ฃผ์ฐจ)
724
-
725
- ### Knowledge Distillation ์ƒ์„ธ ๊ณ„ํš
726
-
727
- **Teacher-Student ์•„ํ‚คํ…์ฒ˜**
728
- - Teacher ๋ชจ๋ธ: ํ˜„์žฌ 2.5GB ๋ชจ๋ธ
729
- - Student ๋ชจ๋ธ ์ŠคํŽ™:
730
- - ํŒŒ๋ผ๋ฏธํ„ฐ: 250M โ†’ 25M (90% ๊ฐ์†Œ)
731
- - ๋ ˆ์ด์–ด: 24 โ†’ 6
732
- - Hidden dimension: 1024 โ†’ 256
733
-
734
- **ํ›ˆ๋ จ ์ „๋žต**
735
- ```python
736
- distillation_config = {
737
- "temperature": 5.0,
738
- "alpha": 0.7, # KD loss weight
739
- "beta": 0.3, # Original loss weight
740
- "epochs": 50,
741
- "learning_rate": 1e-4,
742
- "batch_size": 128
743
- }
744
- ```
745
-
746
- ### Pruning & Quantization
747
-
748
- **๊ตฌ์กฐ์  Pruning ๊ณ„ํš**
749
- 1. Magnitude ๊ธฐ๋ฐ˜ ์ค‘์š”๋„ ํ‰๊ฐ€
750
- 2. 50% ์ฑ„๋„ ์ œ๊ฑฐ
751
- 3. Fine-tuning: 10 ์—ํญ
752
- 4. ์„ฑ๋Šฅ ๊ฒ€์ฆ ๋ฐ ๋ฐ˜๋ณต
753
-
754
- **INT8 Quantization**
755
- - Post-training quantization ์ ์šฉ
756
- - Calibration dataset: 1,000 ์ƒ˜ํ”Œ
757
- - ์˜ˆ์ƒ ์†๋„ ํ–ฅ์ƒ: 4๋ฐฐ
758
-
759
- ## ๐Ÿ“Š 4๋‹จ๊ณ„: ์„ฑ๊ณผ ๊ฒ€์ฆ ๋ฐ ํ”„๋กœ๋•์…˜ ๋ฐฐํฌ (7-8์ฃผ์ฐจ)
760
-
761
- ### ์ข…ํ•ฉ ์„ฑ๋Šฅ ๊ฒ€์ฆ
762
-
763
- **์„ฑ๋Šฅ ์ง€ํ‘œ ๋‹ฌ์„ฑ๋„ ๊ฒ€์ฆ**
764
-
765
- | ํ…Œ์ŠคํŠธ ํ•ญ๋ชฉ | ๋ฐฉ๋ฒ• | ์„ฑ๊ณต ๊ธฐ์ค€ | ๊ฒฐ๊ณผ |
766
- |-------------|------|-----------|------|
767
- | ์ถ”๋ก  ์†๋„ | A/B ํ…Œ์ŠคํŠธ | <5ms | โœ… 4.5ms |
768
- | ์ •ํ™•๋„ | ํ™€๋“œ์•„์›ƒ ๊ฒ€์ฆ | >90% | โœ… 90.5% |
769
- | ๋ฉ”๋ชจ๋ฆฌ ์‚ฌ์šฉ๋Ÿ‰ | ํ”„๋กœํŒŒ์ผ๋ง | <300MB | โœ… 250MB |
770
- | ์ฒ˜๋ฆฌ๋Ÿ‰ | ๋ถ€ํ•˜ ํ…Œ์ŠคํŠธ | >5000 req/s | โœ… 6000 req/s |
771
-
772
- ### ๋‹จ๊ณ„์  ๋ฐฐํฌ ์ „๋žต
773
-
774
- ```mermaid
775
- graph LR
776
- A[1% ํŠธ๋ž˜ํ”ฝ] --> B[10% ํŠธ๋ž˜ํ”ฝ]
777
- B --> C[50% ํŠธ๋ž˜ํ”ฝ]
778
- C --> D[100% ์ „ํ™˜]
779
-
780
- A -->|Day 1-3| B
781
- B -->|Day 4-7| C
782
- C -->|Day 8-14| D
783
- ```
784
-
785
- ### ๋ชจ๋‹ˆํ„ฐ๋ง ๋Œ€์‹œ๋ณด๋“œ
786
-
787
- **ํ•ต์‹ฌ ๋ฉ”ํŠธ๋ฆญ**
788
- - ๐Ÿ”ด P99 ์ง€์—ฐ์‹œ๊ฐ„: < 10ms
789
- - ๐ŸŸก ์˜ค๋ฅ˜์œจ: < 0.1%
790
- - ๐ŸŸข CPU/GPU ์‚ฌ์šฉ๋ฅ : < 80%
791
-
792
- ## ๐Ÿ’ฐ ROI ๋ถ„์„
793
-
794
- ### ๋น„์šฉ-ํšจ์ต ๋ถ„์„
795
-
796
- | ํ•ญ๋ชฉ | ๋น„์šฉ/ํšจ์ต | ์„ธ๋ถ€ ๋‚ด์—ญ |
797
- |------|-----------|-----------|
798
- | **์ดˆ๊ธฐ ํˆฌ์ž** | $50,000 | ์ธ๊ฑด๋น„ + ์ธํ”„๋ผ |
799
- | **์›”๊ฐ„ ์ ˆ๊ฐ์•ก** | $14,000 | ์„œ๋ฒ„ + GPU ๋น„์šฉ |
800
- | **ํˆฌ์ž ํšŒ์ˆ˜ ๊ธฐ๊ฐ„** | 3.6๊ฐœ์›” | - |
801
- | **1๋…„ ์ˆœ์ด์ต** | $118,000 | ์ ˆ๊ฐ์•ก - ํˆฌ์ž๋น„ |
802
-
803
- ### ์žฅ๊ธฐ ํšจ๊ณผ
804
- - ๐Ÿš€ ํ™•์žฅ์„ฑ 10๋ฐฐ ํ–ฅ์ƒ
805
- - ๐Ÿ’ก ์‹ ๊ทœ ์„œ๋น„์Šค ์ถœ์‹œ ๊ฐ€๋Šฅ
806
- - ๐ŸŒ ํƒ„์†Œ ๋ฐฐ์ถœ 70% ๊ฐ์†Œ
807
-
808
- ## ๐Ÿ“ˆ ์ง€์†์  ๊ฐœ์„  ๊ณ„ํš
809
-
810
- ### ์›”๊ฐ„ ๋ชจ๋‹ˆํ„ฐ๋ง
811
- - ์„ฑ๋Šฅ ์ง€ํ‘œ ๋ฆฌ๋ทฐ
812
- - ์‚ฌ์šฉ์ž ํ”ผ๋“œ๋ฐฑ ๋ถ„์„
813
- - ๊ธฐ์ˆ  ๋ถ€์ฑ„ ๊ด€๋ฆฌ
814
-
815
- ### ๋ถ„๊ธฐ๋ณ„ ์—…๋ฐ์ดํŠธ
816
- - ๋ชจ๋ธ ์žฌํ›ˆ๋ จ
817
- - ์ƒˆ๋กœ์šด ์ตœ์ ํ™” ๊ธฐ๋ฒ• ๋„์ž…
818
- - ๋ฒค์น˜๋งˆํฌ ์—…๋ฐ์ดํŠธ
819
-
820
- ### ์ฐจ๊ธฐ ํ”„๋กœ์ ํŠธ ๋กœ๋“œ๋งต
821
-
822
- | ๋ถ„๊ธฐ | ํ”„๋กœ์ ํŠธ | ์˜ˆ์ƒ ํšจ๊ณผ |
823
- |------|----------|-----------|
824
- | Q2 2025 | ์—ฃ์ง€ ๋””๋ฐ”์ด์Šค ๋ฐฐํฌ | ์ง€์—ฐ์‹œ๊ฐ„ 90% ๊ฐ์†Œ |
825
- | Q3 2025 | ์—ฐํ•ฉ ํ•™์Šต ๋„์ž… | ํ”„๋ผ์ด๋ฒ„์‹œ ๊ฐ•ํ™” |
826
- | Q4 2025 | AutoML ํ”Œ๋žซํผ ๊ตฌ์ถ• | ๊ฐœ๋ฐœ ์†๋„ 5๋ฐฐ ํ–ฅ์ƒ |
827
-
828
- ## ๐Ÿ“ ๊ฒฐ๋ก  ๋ฐ ๊ถŒ๊ณ ์‚ฌํ•ญ
829
-
830
- ### ํ•ต์‹ฌ ์„ฑ๊ณผ
831
- - โœ… ๋ชจ๋“  ๋ชฉํ‘œ ์ง€ํ‘œ ๋‹ฌ์„ฑ
832
- - โœ… ์˜ˆ์‚ฐ ๋‚ด ํ”„๋กœ์ ํŠธ ์™„๋ฃŒ
833
- - โœ… ๋ฆฌ์Šคํฌ ์„ฑ๊ณต์  ๊ด€๋ฆฌ
834
-
835
- ### ํ–ฅํ›„ ๊ถŒ๊ณ ์‚ฌํ•ญ
836
- 1. **์ฆ‰์‹œ ์‹คํ–‰**: 1-2๋‹จ๊ณ„ ์ฆ‰์‹œ ์ฐฉ์ˆ˜
837
- 2. **ํŒ€ ๊ตฌ์„ฑ**: ML์—”์ง€๋‹ˆ์–ด 2๋ช…, DevOps 1๋ช… ํ•„์ˆ˜
838
- 3. **์ธํ”„๋ผ ์ค€๋น„**: GPU ์„œ๋ฒ„ ์‚ฌ์ „ ํ™•๋ณด
839
- 4. **๋ณ€๊ฒฝ ๊ด€๋ฆฌ**: ์ดํ•ด๊ด€๊ณ„์ž ์‚ฌ์ „ ๊ต์œก
840
-
841
- > ๋ณธ ํ”„๋กœ์ ํŠธ๋Š” ์ตœ์‹  ์—ฐ๊ตฌ ๊ฒฐ๊ณผ์™€ ์—…๊ณ„ ๋ฒ ์ŠคํŠธ ํ”„๋ž™ํ‹ฐ์Šค๋ฅผ ์ ์šฉํ•˜์—ฌ, 8์ฃผ ๋งŒ์— ๋ชจ๋ธ ์„ฑ๋Šฅ์„ ํš๊ธฐ์ ์œผ๋กœ ๊ฐœ์„ ํ•˜๊ณ  ์šด์˜ ๋น„์šฉ์„ 70% ์ ˆ๊ฐํ•˜๋Š” ์„ฑ๊ณผ๋ฅผ ๋‹ฌ์„ฑํ•  ๊ฒƒ์œผ๋กœ ํ™•์‹ ํ•ฉ๋‹ˆ๋‹ค.
842
-
843
- ---
844
- *์ž‘์„ฑ์ผ: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}*
845
- *์ž‘์„ฑ์ž: ํ˜‘๋ ฅ์  AI ์‹œ์Šคํ…œ (๊ฐ๋…์ž, ์กฐ์‚ฌ์ž, ์‹คํ–‰์ž, ํ‰๊ฐ€์ž AI)*""",
846
-
847
- "evaluator": """## ๐Ÿ“Š ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ • ํ‰๊ฐ€ ๋ณด๊ณ ์„œ
848
-
849
- ### 1๏ธโƒฃ ํ’ˆ์งˆ ํ‰๊ฐ€ (10์  ๋งŒ์ )
850
-
851
- | AI ์—ญํ•  | ์ ์ˆ˜ | ํ‰๊ฐ€ ๋‚ด์šฉ |
852
- |---------|------|-----------|
853
- | **๊ฐ๋…์ž AI** | 9.5/10 | ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ์ฒด๊ณ„์ ์ธ ๋ถ„์„๊ณผ ๋ฐฉํ–ฅ ์ œ์‹œ |
854
- | **์กฐ์‚ฌ์ž AI** | 9.0/10 | ์›น ๊ฒ€์ƒ‰์„ ํ†ตํ•œ ์ตœ์‹  ์ •๋ณด ์ˆ˜์ง‘ ์šฐ์ˆ˜ |
855
- | **์‹คํ–‰์ž AI** | 8.5/10 | ์กฐ์‚ฌ ๋‚ด์šฉ์„ ์ž˜ ํ™œ์šฉํ•œ ๊ตฌ์ฒด์  ๊ณ„ํš ์ˆ˜๋ฆฝ |
856
-
857
- **์ƒ์„ธ ํ‰๊ฐ€:**
858
- - โœ… ๊ฐ๋…์ž AI: ๋‹จ๊ณ„๋ณ„ ๊ตฌ์ฒด์ ์ธ ์ง€์‹œ์‚ฌํ•ญ ์ œ๊ณต์ด ํƒ์›”ํ•จ
859
- - โœ… ์กฐ์‚ฌ์ž AI: ์‹ ๋ขฐ๋„ ํ‰๊ฐ€์™€ ๋ชจ์ˆœ ๊ฐ์ง€ ๊ธฐ๋Šฅ์ด ํšจ๊ณผ์ 
860
- - โœ… ์‹คํ–‰์ž AI: ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ๋‹จ๊ณ„๋ณ„ ์ ‘๊ทผ๋ฒ• ์ œ์‹œ ์šฐ์ˆ˜
861
-
862
- ### 2๏ธโƒฃ ํ˜‘๋ ฅ ํšจ๊ณผ์„ฑ ํ‰๊ฐ€
863
-
864
- **๊ฐ•์ :**
865
- - ๐Ÿ”„ AI ๊ฐ„ ์—ญํ•  ๋ถ„๋‹ด์ด ๋ช…ํ™•ํ•˜๊ณ  ์ƒํ˜ธ๋ณด์™„์ 
866
- - ๐Ÿ“Š ์ •๋ณด ํ๋ฆ„์ด ์ฒด๊ณ„์ ์ด๊ณ  ์ผ๊ด€์„ฑ ์žˆ์Œ
867
- - โœจ ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜์ด ํšจ๊ณผ์ ์œผ๋กœ ์ด๋ฃจ์–ด์ง
868
-
869
- **๊ฐœ์„ ์ :**
870
- - โšก ์‹ค์‹œ๊ฐ„ ์ƒํ˜ธ์ž‘์šฉ ๋ฉ”์ปค๋‹ˆ์ฆ˜ ์ถ”๊ฐ€ ๊ณ ๋ ค
871
- - ๐Ÿ“‹ ์ค‘๊ฐ„ ์ ๊ฒ€ ๋‹จ๊ณ„ ๋„์ž… ํ•„์š”
872
-
873
- ### 3๏ธโƒฃ ์ •๋ณด ํ™œ์šฉ๋„ ํ‰๊ฐ€
874
-
875
- | ํ‰๊ฐ€ ํ•ญ๋ชฉ | ๋‹ฌ์„ฑ๋„ | ์„ธ๋ถ€ ๋‚ด์šฉ |
876
- |-----------|--------|-----------|
877
- | ๊ฒ€์ƒ‰ ๋ฒ”์œ„ | 95% | 20๊ฐœ ์ด์ƒ์˜ ์›น ์†Œ์Šค์—์„œ ์ •๋ณด ์ˆ˜์ง‘ |
878
- | ์‹ ๋ขฐ๋„ ํ‰๊ฐ€ | 90% | 0.7 ์ด์ƒ์˜ ์‹ ๋ขฐ๋„ ์†Œ์Šค ์šฐ์„  ํ™œ์šฉ |
879
- | ์ •๋ณด ํ†ตํ•ฉ | 85% | ๋‹ค์–‘ํ•œ ๊ด€์ ์˜ ์ •๋ณด๋ฅผ ๊ท ํ˜•์žˆ๊ฒŒ ํ†ตํ•ฉ |
880
-
881
- **์šฐ์ˆ˜ํ•œ ์ :**
882
- - โœ… ์‹ ๋ขฐ๋„ ๊ธฐ๋ฐ˜ ์ •๋ณด ์„ ๋ณ„ ํšจ๊ณผ์ 
883
- - โœ… ์‹ค์ œ ๊ธฐ์—… ์‚ฌ๋ก€ ์ ์ ˆํžˆ ํ™œ์šฉ
884
- - โœ… ์ตœ์‹  ํŠธ๋ Œ๋“œ ๋ฐ˜์˜ ์šฐ์ˆ˜
885
-
886
- **๋ณด์™„ ํ•„์š”:**
887
- - ๐Ÿ“š ํ•™์ˆ  ๋…ผ๋ฌธ ๋“ฑ ๋” ๊นŠ์ด ์žˆ๋Š” ์ž๋ฃŒ ํ™œ์šฉ
888
- - ๐ŸŒ ์ง€์—ญ๋ณ„/์‚ฐ์—…๋ณ„ ํŠน์„ฑ ๊ณ ๋ ค ํ•„์š”
889
-
890
- ### 4๏ธโƒฃ ํ–ฅํ›„ ๊ฐœ์„  ๋ฐฉํ–ฅ
891
-
892
- #### 1. **์‹ค์‹œ๊ฐ„ ํ˜‘์—… ๊ฐ•ํ™”**
893
- - AI ๊ฐ„ ์ค‘๊ฐ„ ์ฒดํฌํฌ์ธํŠธ ์ถ”๊ฐ€
894
- - ๋™์  ์—ญํ•  ์กฐ์ • ๋ฉ”์ปค๋‹ˆ์ฆ˜ ๋„์ž…
895
-
896
- #### 2. **์ •๋ณด ๊ฒ€์ฆ ๊ฐ•ํ™”**
897
- - ๊ต์ฐจ ๊ฒ€์ฆ ํ”„๋กœ์„ธ์Šค ์ถ”๊ฐ€
898
- - ์ „๋ฌธ๊ฐ€ ๊ฒ€ํ†  ๋‹จ๊ณ„ ๊ณ ๋ ค
899
-
900
- #### 3. **๋งž์ถคํ™” ๊ฐ•ํ™”**
901
- - ์‚ฌ์šฉ์ž ์ปจํ…์ŠคํŠธ ๋” ๊นŠ์ด ๋ฐ˜์˜
902
- - ์‚ฐ์—…๋ณ„/๊ทœ๋ชจ๋ณ„ ๋งž์ถค ์ „๋žต ์ œ๊ณต
903
-
904
- ### 5๏ธโƒฃ ์ตœ์ข… ํ‰์ : โญโญโญโญโญ **9.0/10**
905
-
906
- > **์ข…ํ•ฉ ํ‰๊ฐ€:**
907
- >
908
- > ๋ณธ ํ˜‘๋ ฅ ์‹œ์Šคํ…œ์€ ๊ฐ AI์˜ ์ „๋ฌธ์„ฑ์„ ํšจ๊ณผ์ ์œผ๋กœ ํ™œ์šฉํ•˜์—ฌ ์‚ฌ์šฉ์ž ์งˆ๋ฌธ์— ๋Œ€ํ•œ ์ข…ํ•ฉ์ ์ด๊ณ  ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ๋‹ต๋ณ€์„ ์ œ๊ณตํ–ˆ์Šต๋‹ˆ๋‹ค. ํŠนํžˆ ์›น ๊ฒ€์ƒ‰์„ ํ†ตํ•œ ์ตœ์‹  ์ •๋ณด ํ™œ์šฉ๊ณผ ๋‹จ๊ณ„์  ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜์ด ์šฐ์ˆ˜ํ–ˆ์Šต๋‹ˆ๋‹ค.
909
- >
910
- > ํ–ฅํ›„ ์‹ค์‹œ๊ฐ„ ํ˜‘์—…๊ณผ ๋งž์ถคํ™”๋ฅผ ๋”์šฑ ๊ฐ•ํ™”ํ•œ๋‹ค๋ฉด ๋”์šฑ ๋›ฐ์–ด๋‚œ ์„ฑ๊ณผ๋ฅผ ๋‹ฌ์„ฑํ•  ์ˆ˜ ์žˆ์„ ๊ฒƒ์ž…๋‹ˆ๋‹ค.
911
-
912
- ---
913
-
914
- **ํ‰๊ฐ€ ์™„๋ฃŒ ์‹œ๊ฐ**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"""
915
- }
916
-
917
- # ํ”„๋กฌํ”„ํŠธ ๋‚ด์šฉ์— ๋”ฐ๋ผ ์ ์ ˆํ•œ ์‘๋‹ต ์„ ํƒ
918
- if role == "supervisor" and "์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌํ•œ" in messages[0]["content"]:
919
- response = test_responses["supervisor_execution"]
920
- elif role == "supervisor" and messages[0]["content"].find("์‹คํ–‰์ž AI์˜ ๋‹ต๋ณ€") > -1:
921
- response = test_responses["supervisor_review"]
922
- elif role == "supervisor":
923
- response = test_responses["supervisor_initial"]
924
- elif role == "researcher":
925
- response = test_responses["researcher"]
926
- elif role == "executor" and "์ตœ์ข… ๋ณด๊ณ ์„œ" in messages[0]["content"]:
927
- response = test_responses["executor_final"]
928
- elif role == "evaluator":
929
- response = test_responses["evaluator"]
930
- else:
931
- response = test_responses["executor"]
932
 
933
- yield from self.simulate_streaming(response, role)
 
 
 
 
934
  return
935
 
936
- # ์‹ค์ œ API ํ˜ธ์ถœ
937
  try:
938
- system_prompts = {
939
- "supervisor": "๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.",
940
- "researcher": "๋‹น์‹ ์€ ์ •๋ณด๋ฅผ ์กฐ์‚ฌํ•˜๊ณ  ์ฒด๊ณ„์ ์œผ๋กœ ์ •๋ฆฌํ•˜๋Š” ์กฐ์‚ฌ์ž AI์ž…๋‹ˆ๋‹ค.",
941
- "executor": "๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค.",
942
- "evaluator": "๋‹น์‹ ์€ ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ •๊ณผ ๊ฒฐ๊ณผ๋ฅผ ํ‰๊ฐ€ํ•˜๋Š” ํ‰๊ฐ€์ž AI์ž…๋‹ˆ๋‹ค."
943
- }
944
-
945
- full_messages = [
946
- {"role": "system", "content": system_prompts.get(role, "")},
947
- *messages
948
- ]
949
-
950
  payload = {
951
  "model": self.model_id,
952
- "messages": full_messages,
953
  "max_tokens": 4096,
954
  "temperature": 0.7,
955
- "top_p": 0.8,
956
- "stream": True,
957
- "stream_options": {"include_usage": True}
958
  }
959
 
960
- logger.info(f"API ์ŠคํŠธ๋ฆฌ๋ฐ ํ˜ธ์ถœ ์‹œ์ž‘ - Role: {role}")
961
-
962
  response = requests.post(
963
  self.api_url,
964
  headers=self.create_headers(),
@@ -968,8 +459,7 @@ graph LR
968
  )
969
 
970
  if response.status_code != 200:
971
- logger.error(f"API ์˜ค๋ฅ˜: {response.status_code}")
972
- yield f"โŒ API ์˜ค๋ฅ˜ ({response.status_code}): {response.text[:200]}"
973
  return
974
 
975
  for line in response.iter_lines():
@@ -988,638 +478,366 @@ graph LR
988
  except json.JSONDecodeError:
989
  continue
990
 
991
- except requests.exceptions.Timeout:
992
- yield "โฑ๏ธ API ํ˜ธ์ถœ ์‹œ๊ฐ„์ด ์ดˆ๊ณผ๋˜์—ˆ์Šต๋‹ˆ๋‹ค. ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”."
993
- except requests.exceptions.ConnectionError:
994
- yield "๐Ÿ”Œ API ์„œ๋ฒ„์— ์—ฐ๊ฒฐํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค. ์ธํ„ฐ๋„ท ์—ฐ๊ฒฐ์„ ํ™•์ธํ•ด์ฃผ์„ธ์š”."
995
  except Exception as e:
996
  logger.error(f"์ŠคํŠธ๋ฆฌ๋ฐ ์ค‘ ์˜ค๋ฅ˜: {str(e)}")
997
  yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
998
 
999
  # ์‹œ์Šคํ…œ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
1000
- llm_system = LLMCollaborativeSystem()
1001
 
1002
- # ๋‚ด๋ถ€ ํžˆ์Šคํ† ๋ฆฌ ๊ด€๋ฆฌ (UI์—๋Š” ํ‘œ์‹œํ•˜์ง€ ์•Š์Œ)
1003
- internal_history = []
1004
-
1005
- def process_query_streaming(user_query: str, llm_mode: str):
1006
- """์ŠคํŠธ๋ฆฌ๋ฐ์„ ์ง€์›ํ•˜๋Š” ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ"""
1007
- global internal_history
1008
-
1009
  if not user_query:
1010
- return "", "", "", "", "", "โŒ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
1011
 
1012
- # LLM ๋ชจ๋“œ ์„ค์ •
1013
- llm_system.set_llm_mode(llm_mode)
1014
 
1015
- conversation_log = []
1016
- all_responses = {"supervisor": [], "researcher": [], "executor": [], "evaluator": []}
 
1017
 
1018
  try:
1019
- # 1๋‹จ๊ณ„: ๊ฐ๋…์ž AI ์ดˆ๊ธฐ ๋ถ„์„ ๋ฐ ํ‚ค์›Œ๋“œ ์ถ”์ถœ
1020
- supervisor_prompt = llm_system.create_supervisor_initial_prompt(user_query)
1021
- supervisor_initial_response = ""
1022
 
1023
- supervisor_text = "[์ดˆ๊ธฐ ๋ถ„์„] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1024
- for chunk in llm_system.call_llm_streaming(
1025
- [{"role": "user", "content": supervisor_prompt}],
1026
- "supervisor"
1027
  ):
1028
- supervisor_initial_response += chunk
1029
- supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_initial_response}"
1030
- yield supervisor_text, "", "", "", "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๋ถ„์„ ์ค‘..."
1031
-
1032
- all_responses["supervisor"].append(supervisor_initial_response)
1033
-
1034
- # ํ‚ค์›Œ๋“œ ์ถ”์ถœ
1035
- keywords = llm_system.extract_keywords(supervisor_initial_response)
1036
- logger.info(f"์ถ”์ถœ๋œ ํ‚ค์›Œ๋“œ: {keywords}")
1037
 
1038
- # 2๋‹จ๊ณ„: ๋ธŒ๋ ˆ์ด๋ธŒ ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰
1039
- researcher_text = "[์›น ๊ฒ€์ƒ‰] ๐Ÿ” ๊ฒ€์ƒ‰ ์ค‘...\n"
1040
- yield supervisor_text, researcher_text, "", "", "", "๐Ÿ” ์›น ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰ ์ค‘..."
1041
 
1042
- search_results = {}
1043
- total_search_count = 0
 
 
1044
 
1045
- # ์›๋ž˜ ํ‚ค์›Œ๋“œ๋กœ ๊ฒ€์ƒ‰
1046
  for keyword in keywords:
1047
- results = llm_system.brave_search(keyword)
1048
  if results:
1049
  search_results[keyword] = results
1050
- total_search_count += len(results)
1051
- researcher_text += f"โœ“ '{keyword}' ๊ฒ€์ƒ‰ ์™„๋ฃŒ ({len(results)}๊ฐœ ๊ฒฐ๊ณผ)\n"
1052
- yield supervisor_text, researcher_text, "", "", "", f"๐Ÿ” '{keyword}' ๊ฒ€์ƒ‰ ์ค‘..."
1053
-
1054
- # ๋™์˜์–ด๋กœ ์ถ”๊ฐ€ ๊ฒ€์ƒ‰
1055
- synonyms = llm_system.generate_synonyms(keyword)
1056
- for synonym in synonyms:
1057
- syn_results = llm_system.brave_search(f"{keyword} {synonym}")
1058
- if syn_results:
1059
- search_results[f"{keyword} ({synonym})"] = syn_results
1060
- total_search_count += len(syn_results)
1061
- researcher_text += f"โœ“ ๋™์˜์–ด '{synonym}' ๊ฒ€์ƒ‰ ์™„๋ฃŒ ({len(syn_results)}๊ฐœ ๊ฒฐ๊ณผ)\n"
1062
- yield supervisor_text, researcher_text, "", "", "", f"๐Ÿ” ๋™์˜์–ด '{synonym}' ๊ฒ€์ƒ‰ ์ค‘..."
1063
-
1064
- researcher_text += f"\n๐Ÿ“Š ์ด {total_search_count}๊ฐœ์˜ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ˆ˜์ง‘ ์™„๋ฃŒ\n"
1065
-
1066
- # URL ์ฝ˜ํ…์ธ  ๊ฐ€์ ธ์˜ค๊ธฐ (์ƒ์œ„ 3๊ฐœ)
1067
- researcher_text += "\n[์ฝ˜ํ…์ธ  ๋ถ„์„] ๐Ÿ“– ์ฃผ์š” ์›นํŽ˜์ด์ง€ ๋‚ด์šฉ ๋ถ„์„ ์ค‘...\n"
1068
- yield supervisor_text, researcher_text, "", "", "", "๐Ÿ“– ์›นํŽ˜์ด์ง€ ๋‚ด์šฉ ๋ถ„์„ ์ค‘..."
1069
-
1070
- content_analyzed = 0
1071
- for keyword, results in search_results.items():
1072
- for result in results[:2]: # ๊ฐ ํ‚ค์›Œ๋“œ๋‹น ์ƒ์œ„ 2๊ฐœ๋งŒ
1073
- if content_analyzed >= 5: # ์ด 5๊ฐœ๊นŒ์ง€๋งŒ
1074
- break
1075
-
1076
- url = result.get('url', '')
1077
- if url and result.get('credibility_score', 0) >= 0.7:
1078
- content = llm_system.fetch_url_content(url)
1079
- if content:
1080
- result['content_preview'] = content[:500] # ๋ฏธ๋ฆฌ๋ณด๊ธฐ ์ €์žฅ
1081
- content_analyzed += 1
1082
- researcher_text += f"โœ“ ์ฝ˜ํ…์ธ  ๋ถ„์„ ์™„๋ฃŒ: {url[:50]}...\n"
1083
- yield supervisor_text, researcher_text, "", "", "", f"๐Ÿ“– ๋ถ„์„ ์ค‘: {url[:30]}..."
1084
 
1085
- # 3๋‹จ๊ณ„: ์กฐ์‚ฌ์ž AI๊ฐ€ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์ •๋ฆฌ
1086
- researcher_prompt = llm_system.create_researcher_prompt(user_query, supervisor_initial_response, search_results)
1087
- researcher_response = ""
1088
 
1089
- researcher_text = "[์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ •๋ฆฌ] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1090
- for chunk in llm_system.call_llm_streaming(
1091
- [{"role": "user", "content": researcher_prompt}],
1092
- "researcher"
1093
  ):
1094
- researcher_response += chunk
1095
- researcher_text = f"[์กฐ์‚ฌ ๊ฒฐ๊ณผ ์ •๋ฆฌ] - {datetime.now().strftime('%H:%M:%S')}\n{researcher_response}"
1096
- yield supervisor_text, researcher_text, "", "", "", "๐Ÿ“ ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌ ์ค‘..."
1097
 
1098
- all_responses["researcher"].append(researcher_response)
1099
-
1100
- # 4๋‹จ๊ณ„: ํ‰๊ฐ€์ž AI๊ฐ€ ์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€
1101
- evaluator_research_prompt = f"""๋‹น์‹ ์€ ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ •๊ณผ ๊ฒฐ๊ณผ๋ฅผ ํ‰๊ฐ€ํ•˜๋Š” ํ‰๊ฐ€์ž AI์ž…๋‹ˆ๋‹ค.
1102
-
1103
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1104
-
1105
- ๊ฐ๋…์ž AI์˜ ์ดˆ๊ธฐ ๋ถ„์„:
1106
- {supervisor_initial_response}
1107
-
1108
- ์กฐ์‚ฌ์ž AI์˜ ์กฐ์‚ฌ ๊ฒฐ๊ณผ:
1109
- {researcher_response}
1110
-
1111
- ์œ„ ์กฐ์‚ฌ ๊ฒฐ๊ณผ๋ฅผ ํ‰๊ฐ€ํ•˜์—ฌ:
1112
- 1. ์กฐ์‚ฌ์˜ ์ถฉ์‹ค๋„์™€ ์‹ ๋ขฐ์„ฑ์„ ํ‰๊ฐ€ํ•˜์„ธ์š”
1113
- 2. ๋ˆ„๋ฝ๋œ ์ค‘์š” ์ •๋ณด๊ฐ€ ์žˆ๋Š”์ง€ ํ™•์ธํ•˜์„ธ์š”
1114
- 3. ์กฐ์‚ฌ ๊ฒฐ๊ณผ์˜ ํ™œ์šฉ ๊ฐ€๋Šฅ์„ฑ์„ ํ‰๊ฐ€ํ•˜์„ธ์š”
1115
- 4. ๊ฐœ์„ ์ด ํ•„์š”ํ•œ ๋ถ€๋ถ„์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”"""
1116
 
1117
- evaluator_research_response = ""
1118
- evaluator_text = "[์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€] ๐Ÿ”„ ํ‰๊ฐ€ ์ค‘...\n"
 
1119
 
1120
- for chunk in llm_system.call_llm_streaming(
1121
- [{"role": "user", "content": evaluator_research_prompt}],
1122
- "evaluator"
1123
  ):
1124
- evaluator_research_response += chunk
1125
- evaluator_text = f"[์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€] - {datetime.now().strftime('%H:%M:%S')}\n{evaluator_research_response}"
1126
- yield supervisor_text, researcher_text, "", evaluator_text, "", "๐Ÿ“Š ํ‰๊ฐ€์ž AI๊ฐ€ ์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€ ์ค‘..."
1127
 
1128
- all_responses["evaluator"].append(evaluator_research_response)
1129
-
1130
- # 5๋‹จ๊ณ„: ๊ฐ๋…์ž AI๊ฐ€ ํ‰๊ฐ€๋ฅผ ๋ฐ˜์˜ํ•œ ์‹คํ–‰ ์ง€์‹œ
1131
- supervisor_execution_prompt = f"""๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.
1132
-
1133
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1134
-
1135
- ์กฐ์‚ฌ์ž AI๊ฐ€ ์ •๋ฆฌํ•œ ์กฐ์‚ฌ ๋‚ด์šฉ:
1136
- {researcher_response}
1137
-
1138
- ํ‰๊ฐ€์ž AI์˜ ์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€:
1139
- {evaluator_research_response}
1140
-
1141
- ์œ„ ์กฐ์‚ฌ ๋‚ด์šฉ๊ณผ ํ‰๊ฐ€๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์‹คํ–‰์ž AI์—๊ฒŒ ์•„์ฃผ ๊ตฌ์ฒด์ ์ธ ์ง€์‹œ๋ฅผ ๋‚ด๋ ค์ฃผ์„ธ์š”:
1142
- 1. ํ‰๊ฐ€์ž์˜ ํ”ผ๋“œ๋ฐฑ์„ ๋ฐ˜์˜ํ•˜์—ฌ ์ง€์‹œ๋ฅผ ๊ฐœ์„ ํ•˜์„ธ์š”
1143
- 2. ์กฐ์‚ฌ๋œ ์ •๋ณด๋ฅผ ์–ด๋–ป๊ฒŒ ํ™œ์šฉํ• ์ง€ ๋ช…ํ™•ํžˆ ์ง€์‹œํ•˜์„ธ์š”
1144
- 3. ์‹คํ–‰ ๊ฐ€๋Šฅํ•œ ๋‹จ๊ณ„๋ณ„ ์ž‘์—…์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”
1145
- 4. ์˜ˆ์ƒ๋˜๋Š” ๊ฒฐ๊ณผ๋ฌผ์˜ ํ˜•ํƒœ๋ฅผ ๊ตฌ์ฒด์ ์œผ๋กœ ์„ค๋ช…ํ•˜์„ธ์š”"""
1146
 
1147
- supervisor_execution_response = ""
 
 
1148
 
1149
- supervisor_text += "\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1150
- for chunk in llm_system.call_llm_streaming(
1151
- [{"role": "user", "content": supervisor_execution_prompt}],
1152
- "supervisor"
1153
  ):
1154
- supervisor_execution_response += chunk
1155
- temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_execution_response}"
1156
- supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
1157
- yield supervisor_text, researcher_text, "", evaluator_text, "", "๐ŸŽฏ ๊ฐ๋…์ž AI๊ฐ€ ์ง€์‹œ ์ค‘..."
1158
 
1159
- all_responses["supervisor"].append(supervisor_execution_response)
1160
 
1161
- # 6๋‹จ๊ณ„: ์‹คํ–‰์ž AI๊ฐ€ ์กฐ์‚ฌ ๋‚ด์šฉ๊ณผ ์ง€์‹œ๋ฅผ ๊ธฐ๋ฐ˜์œผ๋กœ ์ดˆ๊ธฐ ๊ตฌํ˜„
1162
- executor_prompt = llm_system.create_executor_prompt(user_query, supervisor_execution_response, researcher_response)
1163
- executor_response = ""
1164
 
1165
- executor_text = "[์ดˆ๊ธฐ ๊ตฌํ˜„] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
1166
- for chunk in llm_system.call_llm_streaming(
1167
- [{"role": "user", "content": executor_prompt}],
1168
- "executor"
1169
  ):
1170
- executor_response += chunk
1171
- executor_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{executor_response}"
1172
- yield supervisor_text, researcher_text, executor_text, evaluator_text, "", "๐Ÿ”ง ์‹คํ–‰์ž AI๊ฐ€ ๊ตฌํ˜„ ์ค‘..."
1173
 
1174
- all_responses["executor"].append(executor_response)
1175
 
1176
- # 7๋‹จ๊ณ„: ํ‰๊ฐ€์ž AI๊ฐ€ ์ดˆ๊ธฐ ๊ตฌํ˜„ ํ‰๊ฐ€
1177
- evaluator_execution_prompt = f"""๋‹น์‹ ์€ ์ „์ฒด ํ˜‘๋ ฅ ๊ณผ์ •๊ณผ ๊ฒฐ๊ณผ๋ฅผ ํ‰๊ฐ€ํ•˜๋Š” ํ‰๊ฐ€์ž AI์ž…๋‹ˆ๋‹ค.
1178
-
1179
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1180
-
1181
- ์‹คํ–‰์ž AI์˜ ์ดˆ๊ธฐ ๊ตฌํ˜„:
1182
- {executor_response}
1183
-
1184
- ๊ฐ๋…์ž AI์˜ ์ง€์‹œ์‚ฌํ•ญ:
1185
- {supervisor_execution_response}
1186
-
1187
- ์œ„ ์ดˆ๊ธฐ ๊ตฌํ˜„์„ ํ‰๊ฐ€ํ•˜์—ฌ:
1188
- 1. ์ง€์‹œ์‚ฌํ•ญ์ด ์–ผ๋งˆ๋‚˜ ์ž˜ ๋ฐ˜์˜๋˜์—ˆ๋Š”์ง€ ํ‰๊ฐ€ํ•˜์„ธ์š”
1189
- 2. ๊ตฌํ˜„์˜ ์‹คํ–‰ ๊ฐ€๋Šฅ์„ฑ๊ณผ ๊ตฌ์ฒด์„ฑ์„ ํ‰๊ฐ€ํ•˜์„ธ์š”
1190
- 3. ๋ˆ„๋ฝ๋œ ์ค‘์š” ์š”์†Œ๊ฐ€ ์žˆ๋Š”์ง€ ํ™•์ธํ•˜์„ธ์š”
1191
- 4. ๊ฐœ์„ ์ด ํ•„์š”ํ•œ ๋ถ€๋ถ„์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ œ์‹œํ•˜์„ธ์š”"""
1192
 
1193
- evaluator_execution_response = ""
1194
- evaluator_text += "\n\n---\n\n[์ดˆ๊ธฐ ๊ตฌํ˜„ ํ‰๊ฐ€] ๐Ÿ”„ ํ‰๊ฐ€ ์ค‘...\n"
 
 
 
1195
 
1196
- for chunk in llm_system.call_llm_streaming(
1197
- [{"role": "user", "content": evaluator_execution_prompt}],
1198
- "evaluator"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1199
  ):
1200
- evaluator_execution_response += chunk
1201
- temp_text = f"{all_responses['evaluator'][0]}\n\n---\n\n[์ดˆ๊ธฐ ๊ตฌํ˜„ ํ‰๊ฐ€] - {datetime.now().strftime('%H:%M:%S')}\n{evaluator_execution_response}"
1202
- evaluator_text = f"[์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
1203
- yield supervisor_text, researcher_text, executor_text, evaluator_text, "", "๐Ÿ“Š ํ‰๊ฐ€์ž AI๊ฐ€ ๊ตฌํ˜„ ํ‰๊ฐ€ ์ค‘..."
1204
 
1205
- all_responses["evaluator"].append(evaluator_execution_response)
1206
 
1207
- # 8๋‹จ๊ณ„: ๊ฐ๋…์ž AI๊ฐ€ ํ‰๊ฐ€๋ฅผ ๋ฐ˜์˜ํ•œ ๊ฐœ์„  ์ง€์‹œ
1208
- supervisor_improvement_prompt = f"""๋‹น์‹ ์€ ๊ฑฐ์‹œ์  ๊ด€์ ์—์„œ ๋ถ„์„ํ•˜๊ณ  ์ง€๋„ํ•˜๋Š” ๊ฐ๋…์ž AI์ž…๋‹ˆ๋‹ค.
1209
-
1210
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1211
-
1212
- ์‹คํ–‰์ž AI์˜ ์ดˆ๊ธฐ ๊ตฌํ˜„:
1213
- {executor_response}
1214
-
1215
- ํ‰๊ฐ€์ž AI์˜ ๊ตฌํ˜„ ํ‰๊ฐ€:
1216
- {evaluator_execution_response}
1217
-
1218
- ์œ„ ํ‰๊ฐ€๋ฅผ ๋ฐ˜์˜ํ•˜์—ฌ ์ตœ์ข… ๋ณด๊ณ ์„œ ์ž‘์„ฑ์„ ์œ„ํ•œ ๊ฐœ์„  ์ง€์‹œ๋ฅผ ๋‚ด๋ ค์ฃผ์„ธ์š”:
1219
- 1. ํ‰๊ฐ€์ž๊ฐ€ ์ง€์ ํ•œ ๋ชจ๋“  ๊ฐœ์„ ์‚ฌํ•ญ์„ ๊ตฌ์ฒด์ ์œผ๋กœ ๋ฐ˜์˜ํ•˜์„ธ์š”
1220
- 2. ์ถ”๊ฐ€๋กœ ํ•„์š”ํ•œ ๊ตฌ์ฒด์ ์ธ ๋‚ด์šฉ์„ ์ง€์‹œํ•˜์„ธ์š”
1221
- 3. ์ตœ์ข… ๋ณด๊ณ ์„œ์˜ ๊ตฌ์กฐ์™€ ํฌํ•จํ•ด์•ผ ํ•  ์š”์†Œ๋ฅผ ๋ช…ํ™•ํžˆ ์ œ์‹œํ•˜์„ธ์š”"""
1222
 
1223
- supervisor_improvement_response = ""
1224
- supervisor_text += "\n\n---\n\n[๊ฐœ์„  ์ง€์‹œ] ๐Ÿ”„ ์ƒ์„ฑ ์ค‘...\n"
 
 
 
 
1225
 
1226
- for chunk in llm_system.call_llm_streaming(
1227
- [{"role": "user", "content": supervisor_improvement_prompt}],
1228
- "supervisor"
1229
  ):
1230
- supervisor_improvement_response += chunk
1231
- temp_text = f"{all_responses['supervisor'][0]}\n\n---\n\n[์‹คํ–‰ ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['supervisor'][1]}\n\n---\n\n[๊ฐœ์„  ์ง€์‹œ] - {datetime.now().strftime('%H:%M:%S')}\n{supervisor_improvement_response}"
1232
- supervisor_text = f"[์ดˆ๊ธฐ ๋ถ„์„] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
1233
- yield supervisor_text, researcher_text, executor_text, evaluator_text, "", "๐Ÿ”„ ๊ฐ๋…์ž AI๊ฐ€ ๊ฐœ์„  ์ง€์‹œ ์ค‘..."
1234
 
1235
- all_responses["supervisor"].append(supervisor_improvement_response)
1236
 
1237
- # 9๋‹จ๊ณ„: ํ‰๊ฐ€์ž AI๊ฐ€ ์ „์ฒด ๊ณผ์ • ์ตœ์ข… ํ‰๊ฐ€
1238
- evaluator_final_prompt = llm_system.create_evaluator_prompt(
1239
- user_query,
1240
- all_responses["supervisor"],
1241
- all_responses["researcher"][0],
1242
- all_responses["executor"],
1243
- all_responses["evaluator"] # ์ด์ „ ํ‰๊ฐ€๋“ค๋„ ์ „๋‹ฌ
1244
- )
1245
- evaluator_final_response = ""
1246
 
1247
- evaluator_text += "\n\n---\n\n[์ „์ฒด ๊ณผ์ • ์ตœ์ข… ํ‰๊ฐ€] ๐Ÿ”„ ํ‰๊ฐ€ ์ค‘...\n"
1248
- for chunk in llm_system.call_llm_streaming(
1249
- [{"role": "user", "content": evaluator_final_prompt}],
1250
- "evaluator"
1251
  ):
1252
- evaluator_final_response += chunk
1253
- temp_text = f"{all_responses['evaluator'][0]}\n\n---\n\n[์ดˆ๊ธฐ ๊ตฌํ˜„ ํ‰๊ฐ€] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['evaluator'][1]}\n\n---\n\n[์ „์ฒด ๊ณผ์ • ์ตœ์ข… ํ‰๊ฐ€] - {datetime.now().strftime('%H:%M:%S')}\n{evaluator_final_response}"
1254
- evaluator_text = f"[์กฐ์‚ฌ ๊ฒฐ๊ณผ ํ‰๊ฐ€] - {datetime.now().strftime('%H:%M:%S')}\n{temp_text}"
1255
- yield supervisor_text, researcher_text, executor_text, evaluator_text, "", "๐Ÿ“Š ํ‰๊ฐ€์ž AI๊ฐ€ ์ตœ์ข… ํ‰๊ฐ€ ์ค‘..."
1256
-
1257
- all_responses["evaluator"].append(evaluator_final_response)
1258
 
1259
- # 10๋‹จ๊ณ„: ์‹คํ–‰์ž AI ์ตœ์ข… ๋ณด๊ณ ์„œ (๋ชจ๋“  ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜)
1260
- final_executor_prompt = f"""๋‹น์‹ ์€ ์„ธ๋ถ€์ ์ธ ๋‚ด์šฉ์„ ๊ตฌํ˜„ํ•˜๋Š” ์‹คํ–‰์ž AI์ž…๋‹ˆ๋‹ค.
1261
-
1262
- ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
1263
-
1264
- ์กฐ์‚ฌ์ž AI์˜ ์กฐ์‚ฌ ๋‚ด์šฉ:
1265
- {researcher_response}
1266
-
1267
- ๋‹น์‹ ์˜ ์ดˆ๊ธฐ ๊ตฌํ˜„:
1268
- {executor_response}
1269
-
1270
- ๊ฐ๋…์ž AI์˜ ๊ฐœ์„  ์ง€์‹œ:
1271
- {supervisor_improvement_response}
1272
-
1273
- ํ‰๊ฐ€์ž AI์˜ ์ „์ฒด ํ‰๊ฐ€:
1274
- {evaluator_final_response}
1275
-
1276
- ์œ„ ๋ชจ๋“  ํ”ผ๋“œ๋ฐฑ์„ ์™„์ „ํžˆ ๋ฐ˜์˜ํ•˜์—ฌ ์ตœ์ข… ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•˜์„ธ์š”:
1277
- 1. ๋ชจ๋“  ๊ฐœ์„ ์‚ฌํ•ญ๊ณผ ์ง€์‹œ์‚ฌํ•ญ์„ ๋น ์ง์—†์ด ๋ฐ˜์˜ํ•˜์„ธ์š”
1278
- 2. ์กฐ์‚ฌ ๋‚ด์šฉ์„ ์ตœ๋Œ€ํ•œ ๊ตฌ์ฒด์ ์œผ๋กœ ํ™œ์šฉํ•˜์„ธ์š”
1279
- 3. ์‹คํ–‰ ๊ฐ€๋Šฅ์„ฑ์„ ๋†’์ด๋Š” ์„ธ๋ถ€ ๊ณ„ํš์„ ํฌํ•จํ•˜์„ธ์š”
1280
- 4. ๋ช…ํ™•ํ•œ ๊ฒฐ๋ก ๊ณผ ๋‹ค์Œ ๋‹จ๊ณ„๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
1281
- 5. ์ „๋ฌธ์ ์ด๊ณ  ์™„์„ฑ๋„ ๋†’์€ ์ตœ์ข… ๋ณด๊ณ ์„œ ํ˜•์‹์œผ๋กœ ์ž‘์„ฑํ•˜์„ธ์š”
1282
-
1283
- **์ค‘์š”: ๋งˆํฌ๋‹ค์šด ํ˜•์‹์„ ์ ๊ทน ํ™œ์šฉํ•˜์„ธ์š”**
1284
- - ์ œ๋ชฉ์€ #, ##, ### ์„ ์‚ฌ์šฉํ•˜์—ฌ ๊ณ„์ธต์ ์œผ๋กœ ๊ตฌ์„ฑ
1285
- - ์ค‘์š”ํ•œ ๋‚ด์šฉ์€ **๊ตต๊ฒŒ** ํ‘œ์‹œ
1286
- - ๋ฆฌ์ŠคํŠธ๋Š” -, * ๋˜๋Š” 1. 2. 3. ํ˜•์‹ ์‚ฌ์šฉ
1287
- - ํ‘œ๊ฐ€ ํ•„์š”ํ•œ ๊ฒฝ์šฐ ๋งˆํฌ๋‹ค์šด ํ‘œ ํ˜•์‹ ์‚ฌ์šฉ:
1288
- | ํ•ญ๋ชฉ | ๋‚ด์šฉ | ๋น„๊ณ  |
1289
- |------|------|------|
1290
- | ์˜ˆ์‹œ1 | ์„ค๋ช…1 | ์ฐธ๊ณ 1 |
1291
- - ์ฝ”๋“œ๋Š” ``` ๋กœ ๊ฐ์‹ธ์„œ ํ‘œ์‹œ
1292
- - ์ธ์šฉ๊ตฌ๋Š” > ๋ฅผ ์‚ฌ์šฉ
1293
- - ๊ตฌ๋ถ„์„ ์€ --- ์‚ฌ์šฉ"""
1294
 
1295
- final_executor_response = ""
 
 
1296
 
1297
- executor_text += "\n\n---\n\n[์ตœ์ข… ๋ณด๊ณ ์„œ] ๐Ÿ”„ ์ž‘์„ฑ ์ค‘...\n"
1298
- for chunk in llm_system.call_llm_streaming(
1299
- [{"role": "user", "content": final_executor_prompt}],
1300
- "executor"
1301
  ):
1302
- final_executor_response += chunk
1303
- temp_text = f"[์ดˆ๊ธฐ ๊ตฌํ˜„] - {datetime.now().strftime('%H:%M:%S')}\n{all_responses['executor'][0]}\n\n---\n\n[์ตœ์ข… ๋ณด๊ณ ์„œ] - {datetime.now().strftime('%H:%M:%S')}\n{final_executor_response}"
1304
- executor_text = temp_text
1305
- yield supervisor_text, researcher_text, executor_text, evaluator_text, "", "๐Ÿ“„ ์ตœ์ข… ๋ณด๊ณ ์„œ ์ž‘์„ฑ ์ค‘..."
1306
 
1307
- all_responses["executor"].append(final_executor_response)
1308
 
1309
- # ์ตœ์ข… ๊ฒฐ๊ณผ ์ƒ์„ฑ (์ตœ์ข… ๋ณด๊ณ ์„œ๋ฅผ ๋ฉ”์ธ์œผ๋กœ)
1310
- final_summary = f"""# ๐ŸŽฏ ์ตœ์ข… ์ข…ํ•ฉ ๋ณด๊ณ ์„œ
1311
-
1312
- ## ๐Ÿ“Œ ์‚ฌ์šฉ์ž ์งˆ๋ฌธ
1313
- **{user_query}**
1314
-
1315
- ---
1316
-
1317
- ## ๐Ÿ“„ ์ตœ์ข… ๋ณด๊ณ ์„œ (์‹คํ–‰์ž AI - ๋ชจ๋“  ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜)
1318
-
1319
- {final_executor_response}
1320
-
1321
- ---
1322
-
1323
- ## ๐Ÿ“Š ์ „์ฒด ํ”„๋กœ์„ธ์Šค ํ‰๊ฐ€ (ํ‰๊ฐ€์ž AI)
1324
-
1325
- {evaluator_final_response}
1326
-
1327
- ---
1328
-
1329
- ## ๐Ÿ” ํ•ต์‹ฌ ์กฐ์‚ฌ ๊ฒฐ๊ณผ ์š”์•ฝ (์กฐ์‚ฌ์ž AI)
1330
-
1331
- {researcher_response[:800]}...
1332
-
1333
- ---
1334
-
1335
- ## ๐Ÿ“‹ ํ”„๋กœ์„ธ์Šค ์™„๋ฃŒ
1336
-
1337
- | ํ•ญ๋ชฉ | ๋‚ด์šฉ |
1338
- |------|------|
1339
- | **์‚ฌ์šฉ ๋ชจ๋ธ** | {'Gemini 2.5 Pro' if llm_system.use_gemini else '๊ธฐ๋ณธ LLM'} |
1340
- | **ํ”„๋กœ์„ธ์Šค** | ๊ฐ๋…โ†’์กฐ์‚ฌโ†’ํ‰๊ฐ€โ†’๊ฐ๋…โ†’์‹คํ–‰โ†’ํ‰๊ฐ€โ†’๊ฐ๋…โ†’ํ‰๊ฐ€โ†’์‹คํ–‰ |
1341
- | **์ด ๋‹จ๊ณ„** | 9๋‹จ๊ณ„ ํ˜‘๋ ฅ ์™„๋ฃŒ |
1342
- | **์ƒ์„ฑ ์‹œ๊ฐ„** | {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} |
1343
-
1344
- ---
1345
-
1346
- > ๐Ÿ’ก **์ฐธ๊ณ **: ์ด ๋ณด๊ณ ์„œ๋Š” 4๊ฐœ AI์˜ ํ˜‘๋ ฅ์„ ํ†ตํ•ด ์ƒ์„ฑ๋˜์—ˆ์œผ๋ฉฐ, ๋‹ค๋‹จ๊ณ„ ํ‰๊ฐ€ ํ”„๋กœ์„ธ์Šค๋ฅผ ๊ฑฐ์ณ ํ’ˆ์งˆ์ด ๊ฒ€์ฆ๋˜์—ˆ์Šต๋‹ˆ๋‹ค."""
1347
 
1348
- # ๋‚ด๋ถ€ ํžˆ์Šคํ† ๋ฆฌ ์—…๋ฐ์ดํŠธ (UI์—๋Š” ํ‘œ์‹œํ•˜์ง€ ์•Š์Œ)
1349
- internal_history.append((user_query, final_summary))
 
 
 
 
 
 
1350
 
1351
- yield supervisor_text, researcher_text, executor_text, evaluator_text, final_summary, "โœ… ์ตœ์ข… ๋ณด๊ณ ์„œ ์™„์„ฑ!"
 
1352
 
1353
  except Exception as e:
1354
  error_msg = f"โŒ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {str(e)}"
1355
- yield "", "", "", "", error_msg, error_msg
1356
 
1357
- def clear_all():
1358
- """๋ชจ๋“  ๋‚ด์šฉ ์ดˆ๊ธฐํ™”"""
1359
- global internal_history
1360
- internal_history = []
1361
- return "", "", "", "", "", "๐Ÿ”„ ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
1362
 
1363
- # Gradio ์ธํ„ฐํŽ˜์ด์Šค
1364
  css = """
1365
  .gradio-container {
1366
  font-family: 'Arial', sans-serif;
1367
  }
1368
- .supervisor-box textarea {
1369
- border-left: 4px solid #667eea !important;
1370
- padding-left: 10px !important;
1371
- background-color: #f8f9ff !important;
1372
- }
1373
- .researcher-box textarea {
1374
  border-left: 4px solid #10b981 !important;
1375
- padding-left: 10px !important;
1376
  background-color: #f0fdf4 !important;
1377
  }
1378
- .executor-box textarea {
1379
- border-left: 4px solid #764ba2 !important;
1380
- padding-left: 10px !important;
 
 
 
1381
  background-color: #faf5ff !important;
1382
  }
1383
- .evaluator-box textarea {
1384
  border-left: 4px solid #f59e0b !important;
1385
- padding-left: 10px !important;
1386
  background-color: #fffbeb !important;
1387
  }
1388
- .final-report-box {
1389
- border: 2px solid #3b82f6 !important;
1390
- border-radius: 8px !important;
1391
- padding: 16px !important;
1392
- background-color: #eff6ff !important;
1393
- margin-top: 10px !important;
1394
- font-size: 14px !important;
1395
- max-height: 700px !important;
1396
- overflow-y: auto !important;
1397
- line-height: 1.6 !important;
1398
- }
1399
- .final-report-box h1 {
1400
- color: #1e40af !important;
1401
- font-size: 24px !important;
1402
- margin-bottom: 12px !important;
1403
- }
1404
- .final-report-box h2 {
1405
- color: #2563eb !important;
1406
- font-size: 20px !important;
1407
- margin-top: 16px !important;
1408
- margin-bottom: 10px !important;
1409
- }
1410
- .final-report-box h3 {
1411
- color: #3b82f6 !important;
1412
- font-size: 18px !important;
1413
- margin-top: 12px !important;
1414
- margin-bottom: 8px !important;
1415
- }
1416
- .final-report-box table {
1417
- border-collapse: collapse !important;
1418
- width: 100% !important;
1419
- margin: 16px 0 !important;
1420
- }
1421
- .final-report-box th, .final-report-box td {
1422
- border: 1px solid #cbd5e1 !important;
1423
- padding: 8px 10px !important;
1424
- text-align: left !important;
1425
- }
1426
- .final-report-box th {
1427
- background-color: #e0e7ff !important;
1428
- font-weight: bold !important;
1429
- color: #1e40af !important;
1430
- }
1431
- .final-report-box tr:nth-child(even) {
1432
- background-color: #f8fafc !important;
1433
- }
1434
- .final-report-box tr:hover {
1435
- background-color: #f0f4f8 !important;
1436
- }
1437
- .final-report-box code {
1438
- background-color: #f1f5f9 !important;
1439
- padding: 2px 6px !important;
1440
- border-radius: 4px !important;
1441
- font-family: 'Consolas', 'Monaco', monospace !important;
1442
- color: #dc2626 !important;
1443
- }
1444
- .final-report-box pre {
1445
- background-color: #1e293b !important;
1446
- color: #e2e8f0 !important;
1447
- padding: 12px !important;
1448
- border-radius: 6px !important;
1449
- overflow-x: auto !important;
1450
- margin: 12px 0 !important;
1451
- font-size: 13px !important;
1452
- }
1453
- .final-report-box pre code {
1454
- background-color: transparent !important;
1455
- color: #e2e8f0 !important;
1456
- padding: 0 !important;
1457
- }
1458
- .final-report-box blockquote {
1459
  border-left: 4px solid #3b82f6 !important;
1460
- padding-left: 12px !important;
1461
- margin-left: 0 !important;
1462
- margin: 12px 0 !important;
1463
- color: #475569 !important;
1464
- font-style: italic !important;
1465
- background-color: #f0f9ff !important;
1466
- padding: 10px 12px !important;
1467
- border-radius: 0 6px 6px 0 !important;
1468
- }
1469
- .final-report-box ul, .final-report-box ol {
1470
- margin-left: 20px !important;
1471
- margin-bottom: 12px !important;
1472
- }
1473
- .final-report-box li {
1474
- margin-bottom: 6px !important;
1475
- line-height: 1.6 !important;
1476
- }
1477
- .final-report-box strong {
1478
- color: #1e40af !important;
1479
- font-weight: 600 !important;
1480
- }
1481
- .final-report-box em {
1482
- color: #3730a3 !important;
1483
- }
1484
- .final-report-box hr {
1485
- border: none !important;
1486
- border-top: 2px solid #cbd5e1 !important;
1487
- margin: 24px 0 !important;
1488
  }
1489
- .final-report-box a {
1490
- color: #2563eb !important;
1491
- text-decoration: underline !important;
1492
  }
1493
- .final-report-box a:hover {
1494
- color: #1d4ed8 !important;
 
1495
  }
1496
  """
1497
 
1498
- with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ - ๋‹ค๋‹จ๊ณ„ ํ‰๊ฐ€", theme=gr.themes.Soft(), css=css) as app:
 
1499
  gr.Markdown(
1500
  """
1501
- # ๐Ÿค ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ (๋‹ค๋‹จ๊ณ„ ํ‰๊ฐ€ ํ”„๋กœ์„ธ์Šค)
1502
-
1503
- ### ๐Ÿ“‹ ํ”„๋กœ์„ธ์Šค ํ”Œ๋กœ์šฐ
1504
- ```
1505
- ๊ฐ๋…(๋ถ„์„) โ†’ ์กฐ์‚ฌ(๊ฒ€์ƒ‰) โ†’ ํ‰๊ฐ€(์กฐ์‚ฌ) โ†’ ๊ฐ๋…(์ง€์‹œ) โ†’ ์‹คํ–‰(์ดˆ์•ˆ)
1506
- โ†’ ํ‰๊ฐ€(์ดˆ์•ˆ) โ†’ ๊ฐ๋…(๊ฐœ์„ ) โ†’ ํ‰๊ฐ€(์ตœ์ข…) โ†’ ์‹คํ–‰(์™„์„ฑ)
1507
- ```
1508
-
1509
- **4๊ฐœ AI์˜ ํ˜‘๋ ฅ์„ ํ†ตํ•œ ์ตœ๊ณ  ํ’ˆ์งˆ์˜ ๋‹ต๋ณ€ ์ƒ์„ฑ**
 
 
 
1510
  """
1511
  )
1512
 
1513
- # ์ž…๋ ฅ ์„น์…˜
1514
  with gr.Row():
1515
- with gr.Column():
1516
- gr.Markdown("""
1517
- ## ๐Ÿš€ 4๊ฐœ AI์˜ ํ˜‘๋ ฅ ์‹œ์Šคํ…œ
1518
- - **๊ฐ๋…์ž AI**: ๊ฑฐ์‹œ์  ๋ถ„์„๊ณผ ์ „๋žต ์ˆ˜๋ฆฝ
1519
- - **์กฐ์‚ฌ์ž AI**: ์›น ๊ฒ€์ƒ‰๊ณผ ์ •๋ณด ์ˆ˜์ง‘/์ •๋ฆฌ
1520
- - **์‹คํ–‰์ž AI**: ๊ตฌ์ฒด์  ๊ณ„ํš ์ˆ˜๋ฆฝ๊ณผ ์‹คํ–‰
1521
- - **ํ‰๊ฐ€์ž AI**: ์ „์ฒด ๊ณผ์ • ํ‰๊ฐ€์™€ ๊ฐœ์„ ์  ์ œ์‹œ
1522
-
1523
- ### ๐ŸŒŸ ์ฃผ์š” ๊ธฐ๋Šฅ
1524
- - ์ตœ๋Œ€ 4096 ํ† ํฐ ์ง€์› (๊ธด ์‘๋‹ต ๊ฐ€๋Šฅ)
1525
- - 20๊ฐœ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์™€ ๋™์˜์–ด ๊ฒ€์ƒ‰
1526
- - ์‹ ๋ขฐ๋„ ๊ธฐ๋ฐ˜ ์ •๋ณด ํ‰๊ฐ€
1527
- - ๋‹ค๋‹จ๊ณ„ ํ‰๊ฐ€์™€ ํ”ผ๋“œ๋ฐฑ ๋ฐ˜์˜
1528
-
1529
- ### ๐Ÿ“‹ ํ”„๋กœ์„ธ์Šค
1530
- ๊ฐ๋… โ†’ ์กฐ์‚ฌ โ†’ ํ‰๊ฐ€ โ†’ ๊ฐ๋… โ†’ ์‹คํ–‰ โ†’ ํ‰๊ฐ€ โ†’ ๊ฐ๋… โ†’ ํ‰๊ฐ€ โ†’ ์‹คํ–‰
1531
- """)
1532
-
1533
- # LLM ์„ ํƒ ์˜ต์…˜
1534
  llm_mode = gr.Radio(
1535
  choices=["default", "commercial"],
1536
  value="default",
1537
- label="LLM ๋ชจ๋“œ ์„ ํƒ",
1538
- info="commercial์„ ์„ ํƒํ•˜๋ฉด Gemini 2.5 Pro๋ฅผ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค"
1539
  )
1540
 
1541
  user_input = gr.Textbox(
1542
  label="์งˆ๋ฌธ ์ž…๋ ฅ",
1543
- placeholder="์˜ˆ: ๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ๋ฐฉ๋ฒ•์€?",
1544
  lines=3
1545
  )
1546
 
1547
  with gr.Row():
1548
  submit_btn = gr.Button("๐Ÿš€ ๋ถ„์„ ์‹œ์ž‘", variant="primary", scale=2)
1549
  clear_btn = gr.Button("๐Ÿ—‘๏ธ ์ดˆ๊ธฐํ™”", scale=1)
1550
-
 
1551
  status_text = gr.Textbox(
1552
- label="์ƒํƒœ",
1553
  interactive=False,
1554
  value="๋Œ€๊ธฐ ์ค‘...",
1555
- max_lines=2
1556
  )
1557
 
1558
- # ์ตœ์ข… ๊ฒฐ๊ณผ ์„น์…˜ ์ถ”๊ฐ€
1559
  with gr.Row():
1560
  with gr.Column():
1561
- gr.Markdown("### ๐Ÿ“Š ์ตœ์ข… ์ข…ํ•ฉ ๋ณด๊ณ ์„œ")
1562
- final_report = gr.Markdown(
1563
- value="*์ตœ์ข… ๋ณด๊ณ ์„œ๊ฐ€ ์—ฌ๊ธฐ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*",
1564
- elem_classes=["final-report-box"]
 
 
 
 
 
 
 
 
 
 
 
 
 
1565
  )
1566
 
1567
- # AI ์ถœ๋ ฅ๋“ค - 2x2 ๊ทธ๋ฆฌ๋“œ
1568
  with gr.Row():
1569
- # ์ƒ๋‹จ ํ–‰
1570
  with gr.Column():
1571
- gr.Markdown("### ๐Ÿง  ๊ฐ๋…์ž AI (๊ฑฐ์‹œ์  ๋ถ„์„)")
1572
- supervisor_output = gr.Textbox(
1573
  label="",
1574
- lines=12,
1575
- max_lines=18,
1576
  interactive=False,
1577
- elem_classes=["supervisor-box"]
1578
  )
1579
 
1580
  with gr.Column():
1581
- gr.Markdown("### ๐Ÿ” ์กฐ์‚ฌ์ž AI (์›น ๊ฒ€์ƒ‰ & ์ •๋ฆฌ)")
1582
- researcher_output = gr.Textbox(
1583
  label="",
1584
- lines=12,
1585
- max_lines=18,
1586
  interactive=False,
1587
- elem_classes=["researcher-box"]
1588
  )
1589
 
1590
  with gr.Row():
1591
- # ํ•˜๋‹จ ํ–‰
1592
  with gr.Column():
1593
- gr.Markdown("### ๐Ÿ‘๏ธ ์‹คํ–‰์ž AI (๋ฏธ์‹œ์  ๊ตฌํ˜„)")
1594
- executor_output = gr.Textbox(
1595
  label="",
1596
- lines=12,
1597
- max_lines=18,
1598
  interactive=False,
1599
- elem_classes=["executor-box"]
1600
  )
1601
 
1602
  with gr.Column():
1603
- gr.Markdown("### ๐Ÿ“Š ํ‰๊ฐ€์ž AI (์ „์ฒด ํ‰๊ฐ€)")
1604
- evaluator_output = gr.Textbox(
1605
  label="",
1606
- lines=12,
1607
- max_lines=18,
1608
  interactive=False,
1609
- elem_classes=["evaluator-box"]
1610
  )
1611
 
1612
  # ์˜ˆ์ œ
1613
  gr.Examples(
1614
  examples=[
1615
- "๊ธฐ๊ณ„ํ•™์Šต ๋ชจ๋ธ์˜ ์„ฑ๋Šฅ์„ ํ–ฅ์ƒ์‹œํ‚ค๋Š” ์ตœ์‹  ๋ฐฉ๋ฒ•์€?",
1616
- "2025๋…„ ํšจ๊ณผ์ ์ธ ํ”„๋กœ์ ํŠธ ๊ด€๋ฆฌ ๋„๊ตฌ์™€ ์ „๋žต์€?",
1617
- "์ง€์† ๊ฐ€๋Šฅํ•œ ๋น„์ฆˆ๋‹ˆ์Šค ๋ชจ๋ธ์˜ ์ตœ์‹  ํŠธ๏ฟฝ๏ฟฝ๋“œ๋Š”?",
1618
- "์ตœ์‹  ๋ฐ์ดํ„ฐ ์‹œ๊ฐํ™” ๋„๊ตฌ์™€ ๊ธฐ๋ฒ•์€?",
1619
- "์›๊ฒฉ ํŒ€์˜ ์ƒ์‚ฐ์„ฑ์„ ๋†’์ด๋Š” ๊ฒ€์ฆ๋œ ๋ฐฉ๋ฒ•์€?",
1620
- "์Šคํƒ€ํŠธ์—…์„ ์œ„ํ•œ ํšจ๊ณผ์ ์ธ ๋งˆ์ผ€ํŒ… ์ „๋žต์€?",
1621
- "AI ์œค๋ฆฌ์™€ ๊ทœ์ œ์˜ ์ตœ์‹  ๋™ํ–ฅ์€?",
1622
- "ํด๋ผ์šฐ๋“œ ๋„ค์ดํ‹ฐ๋ธŒ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ๊ฐœ๋ฐœ ๋ชจ๋ฒ” ์‚ฌ๋ก€๋Š”?"
1623
  ],
1624
  inputs=user_input,
1625
  label="๐Ÿ’ก ์˜ˆ์ œ ์งˆ๋ฌธ"
@@ -1627,35 +845,33 @@ with gr.Blocks(title="ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ - ๋‹ค๋‹จ๊ณ„ ํ‰๊ฐ€", theme=gr.them
1627
 
1628
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
1629
  submit_btn.click(
1630
- fn=process_query_streaming,
1631
  inputs=[user_input, llm_mode],
1632
- outputs=[supervisor_output, researcher_output, executor_output, evaluator_output, final_report, status_text]
1633
  ).then(
1634
  fn=lambda: "",
1635
  outputs=[user_input]
1636
  )
1637
 
1638
  user_input.submit(
1639
- fn=process_query_streaming,
1640
  inputs=[user_input, llm_mode],
1641
- outputs=[supervisor_output, researcher_output, executor_output, evaluator_output, final_report, status_text]
1642
  ).then(
1643
  fn=lambda: "",
1644
  outputs=[user_input]
1645
  )
1646
 
1647
  clear_btn.click(
1648
- fn=clear_all,
1649
- outputs=[supervisor_output, researcher_output, executor_output, evaluator_output, final_report, status_text]
1650
  )
1651
-
1652
 
1653
  if __name__ == "__main__":
1654
- app.queue() # ์ŠคํŠธ๋ฆฌ๋ฐ์„ ์œ„ํ•œ ํ ํ™œ์„ฑํ™”
1655
  app.launch(
1656
  server_name="0.0.0.0",
1657
  server_port=7860,
1658
  share=True,
1659
  show_error=True
1660
- )
1661
-
 
38
  # ์ „์—ญ ๋ณ€์ˆ˜
39
  conversation_history = []
40
 
41
+ class WuxingLLMSystem:
42
+ """์˜คํ–‰ยท์˜ค์ƒ ๊ธฐ๋ฐ˜ ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ"""
43
+
44
  def __init__(self):
45
  self.token = FRIENDLI_TOKEN
46
  self.bapi_token = BAPI_TOKEN
 
52
  self.use_gemini = False
53
  self.gemini_client = None
54
 
55
+ # ์˜คํ–‰ ์—ญํ•  ์ •์˜
56
+ self.wuxing_roles = {
57
+ "wood": {
58
+ "name": "๊ฐ๋…๊ด€ (ํŒ€์žฅ)",
59
+ "virtue": "ไป",
60
+ "element": "ๆœจ",
61
+ "traits": "ํฌ์šฉยท์„ฑ์žฅํ˜• ๋ฆฌ๋”",
62
+ "expertise": "๋น„์ „ ์ œ์‹œ, ํŒ€ ์กฐ์œจ, ์ธ์žฌ ์œก์„ฑ",
63
+ "color": "#10b981" # Green
64
+ },
65
+ "fire": {
66
+ "name": "์ „๋žตยท๊ธฐํš ๋ฆฌ๋”",
67
+ "virtue": "็พฉ",
68
+ "element": "็ซ",
69
+ "traits": "์—ด์ •ยท๊ฒฐ๋‹จ, ๊ฐœ์ฒ™ ์ •์‹ ",
70
+ "expertise": "์ค‘ยท์žฅ๊ธฐ ๋กœ๋“œ๋งต, ์‚ฌ์—… ๋ชจ๋ธ ์„ค๊ณ„, ๋ฆฌ์Šคํฌ-๋ณด์ƒ ์‹œ๋‚˜๋ฆฌ์˜ค",
71
+ "color": "#ef4444" # Red
72
+ },
73
+ "metal": {
74
+ "name": "์•„ํ‚คํ…์ฒ˜ & ํ‘œ์ค€ ์ฑ…์ž„",
75
+ "virtue": "็ฆฎ",
76
+ "element": "้‡‘",
77
+ "traits": "๊ตฌ์กฐํ™”ยท์ •๋ฐ€, ์‹œ์Šคํ…œ ์„ค๊ณ„ ๋งˆ์Šคํ„ฐ",
78
+ "expertise": "๊ธฐ์ˆ ยท๋ฐ์ดํ„ฐ ์•„ํ‚คํ…์ฒ˜, ํ‘œ์ค€ ์ˆ˜๋ฆฝ, ํ’ˆ์งˆยทํ™•์žฅ์„ฑ ๊ฒ€์ฆ",
79
+ "color": "#f59e0b" # Gold
80
+ },
81
+ "water": {
82
+ "name": "๋„๊ตฌ ํ™œ์šฉ R&D ์ŠคํŽ˜์…œ๋ฆฌ์ŠคํŠธ",
83
+ "virtue": "ๆ™บ",
84
+ "element": "ๆฐด",
85
+ "traits": "๋ถ„์„ยทํ˜ธ๊ธฐ์‹ฌ, ITยทAI ๋„๊ตฌ ์ „๋ฌธ๊ฐ€",
86
+ "expertise": "์ตœ์‹  ๊ธฐ์ˆ ยท์‹œ์žฅ ์กฐ์‚ฌ, ํ”„๋กœํ† ํƒ€์ž… ๊ฐœ๋ฐœ, ์ž๋™ํ™”ยท์ƒ์‚ฐ์„ฑ ํˆด",
87
+ "color": "#3b82f6" # Blue
88
+ },
89
+ "earth": {
90
+ "name": "์‹คํ–‰ยท์šด์˜ยทํ’ˆ์งˆ ๋‹ด๋‹น",
91
+ "virtue": "ไฟก",
92
+ "element": "ๅœŸ",
93
+ "traits": "์‹ ๋ขฐยท์„ฑ์‹ค, ์‹คํ–‰๋ ฅ",
94
+ "expertise": "์ผ์ •ยท์˜ˆ์‚ฐยท๋ฆฌ์†Œ์Šค ๊ด€๋ฆฌ, ์šด์˜ ์ตœ์ ํ™”, ํ’ˆ์งˆ ๋ณด์ฆ",
95
+ "color": "#a855f7" # Purple
96
+ }
97
+ }
98
+
99
  if self.test_mode:
100
  logger.warning("ํ…Œ์ŠคํŠธ ๋ชจ๋“œ๋กœ ์‹คํ–‰๋ฉ๋‹ˆ๋‹ค.")
 
 
 
 
101
 
102
  def set_llm_mode(self, mode: str):
103
+ """LLM ๋ชจ๋“œ ์„ค์ •"""
104
  if mode == "commercial" and GEMINI_AVAILABLE and self.gemini_api_key != "YOUR_GEMINI_API_KEY":
105
  self.use_gemini = True
106
  if not self.gemini_client:
 
109
  else:
110
  self.use_gemini = False
111
  logger.info("๊ธฐ๋ณธ LLM ๋ชจ๋“œ๋กœ ์ „ํ™˜๋˜์—ˆ์Šต๋‹ˆ๋‹ค.")
112
+
113
  def create_headers(self):
114
  """API ํ—ค๋” ์ƒ์„ฑ"""
115
  return {
 
125
  "X-Subscription-Token": self.bapi_token
126
  }
127
 
128
+ def create_wood_initial_prompt(self, user_query: str) -> str:
129
+ """ๆœจ(๊ฐ๋…๊ด€) ์ดˆ๊ธฐ ํ”„๋กฌํ”„ํŠธ"""
130
+ return f"""๋‹น์‹ ์€ ไป(์ธ์žํ•จ)์˜ ๋•๋ชฉ์„ ์ง€๋‹Œ ๆœจ์˜ ๊ธฐ์šด์„ ๊ฐ€์ง„ ๊ฐ๋…๊ด€์ž…๋‹ˆ๋‹ค.
131
+ ํฌ์šฉ์ ์ด๊ณ  ์„ฑ์žฅ์ง€ํ–ฅ์ ์ธ ๋ฆฌ๋”์‹ญ์œผ๋กœ ํŒ€์„ ์ด๋•๋‹ˆ๋‹ค.
132
 
133
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
134
 
135
+ ํŒ€์žฅ์œผ๋กœ์„œ ์ด ์งˆ๋ฌธ์— ๋Œ€ํ•ด:
136
+ 1. ์ „์ฒด์ ์ธ ๋น„์ „๊ณผ ๋ฐฉํ–ฅ์„ฑ์„ ์ œ์‹œํ•˜์„ธ์š”
137
+ 2. ๊ฐ ํŒ€์›(็ซ, ๅœŸ, ้‡‘, ๆฐด)์˜ ์—ญํ• ๊ณผ ๊ธฐ์—ฌ ๋ฐฉํ–ฅ์„ ์„ค๊ณ„ํ•˜์„ธ์š”
138
+ 3. ์„ฑ์žฅ๊ณผ ๋ฐœ์ „์˜ ๊ด€์ ์—์„œ ํ•ต์‹ฌ ๋ชฉํ‘œ๋ฅผ ์„ค์ •ํ•˜์„ธ์š”
139
+ 4. ํŒ€ ์ „์ฒด๊ฐ€ ์กฐํ™”๋กญ๊ฒŒ ํ˜‘๋ ฅํ•  ์ˆ˜ ์žˆ๋Š” ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
140
 
141
+ [ํ•ต์‹ฌ ํ‚ค์›Œ๋“œ]: 5-7๊ฐœ์˜ ์กฐ์‚ฌ๊ฐ€ ํ•„์š”ํ•œ ํ‚ค์›Œ๋“œ๋ฅผ ์ œ์‹œํ•˜์„ธ์š”"""
 
142
 
143
+ def create_fire_strategy_prompt(self, user_query: str, wood_response: str, critic_feedback: str) -> str:
144
+ """็ซ(์ „๋žต๊ธฐํš) ํ”„๋กฌํ”„ํŠธ"""
145
+ return f"""๋‹น์‹ ์€ ็พฉ(์ •์˜๋กœ์›€)์˜ ๋•๋ชฉ์„ ์ง€๋‹Œ ็ซ์˜ ๊ธฐ์šด์„ ๊ฐ€์ง„ ์ „๋žตยท๊ธฐํš ๋ฆฌ๋”์ž…๋‹ˆ๋‹ค.
146
+ ์—ด์ •๊ณผ ๊ฒฐ๋‹จ๋ ฅ์œผ๋กœ ํ˜์‹ ์ ์ธ ์ „๋žต์„ ์ˆ˜๋ฆฝํ•ฉ๋‹ˆ๋‹ค.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
147
 
148
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
149
 
150
+ ๊ฐ๋…๊ด€(ๆœจ)์˜ ๋น„์ „:
151
+ {wood_response}
152
 
153
+ ๋น„ํ‰์ž์˜ ํ”ผ๋“œ๋ฐฑ:
154
+ {critic_feedback}
 
155
 
156
+ ์ „๋žต๊ธฐํš ๋ฆฌ๋”๋กœ์„œ:
157
+ 1. ์ค‘ยท์žฅ๊ธฐ ๋กœ๋“œ๋งต์„ ๊ตฌ์ฒด์ ์œผ๋กœ ์ˆ˜๋ฆฝํ•˜์„ธ์š”
158
+ 2. ํ˜์‹ ์ ์ธ ์‚ฌ์—… ๋ชจ๋ธ์„ ์„ค๊ณ„ํ•˜์„ธ์š”
159
+ 3. ๋ฆฌ์Šคํฌ์™€ ๊ธฐํšŒ ๋ถ„์„์„ ์ˆ˜ํ–‰ํ•˜์„ธ์š”
160
+ 4. ๊ฒฝ์Ÿ ์šฐ์œ„ ํ™•๋ณด ์ „๋žต์„ ์ œ์‹œํ•˜์„ธ์š”
161
+ 5. ๋น„ํ‰์ž์˜ ํ”ผ๋“œ๋ฐฑ์„ ๋ฐ˜์˜ํ•˜์—ฌ ์ „๋žต์„ ๋ณด์™„ํ•˜์„ธ์š”"""
 
162
 
163
+ def create_earth_execution_prompt(self, user_query: str, fire_response: str, critic_feedback: str, search_results: Dict) -> str:
164
+ """ๅœŸ(์‹คํ–‰์šด์˜) ํ”„๋กฌํ”„ํŠธ"""
165
+ return f"""๋‹น์‹ ์€ ไฟก(์‹ ๋ขฐ)์˜ ๋•๋ชฉ์„ ์ง€๋‹Œ ๅœŸ์˜ ๊ธฐ์šด์„ ๊ฐ€์ง„ ์‹คํ–‰ยท์šด์˜ยทํ’ˆ์งˆ ๋‹ด๋‹น์ž์ž…๋‹ˆ๋‹ค.
166
+ ์„ฑ์‹คํ•จ๊ณผ ์‹คํ–‰๋ ฅ์œผ๋กœ ๊ณ„ํš์„ ํ˜„์‹ค๋กœ ๋งŒ๋“ญ๋‹ˆ๋‹ค.
167
 
168
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
169
 
170
+ ์ „๋žต๊ธฐํš(็ซ)์˜ ์ „๋žต:
171
+ {fire_response}
172
+
173
+ ๋น„ํ‰์ž์˜ ํ”ผ๋“œ๋ฐฑ:
174
+ {critic_feedback}
175
+
176
+ ์›น ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ:
177
+ {self._format_search_results(search_results)}
178
 
179
+ ์‹คํ–‰์šด์˜ ๋‹ด๋‹น์ž๋กœ์„œ:
180
+ 1. ๊ตฌ์ฒด์ ์ธ ์‹คํ–‰ ๊ณ„ํš๊ณผ ์ผ์ •์„ ์ˆ˜๋ฆฝํ•˜์„ธ์š”
181
+ 2. ํ•„์š”ํ•œ ๋ฆฌ์†Œ์Šค์™€ ์˜ˆ์‚ฐ์„ ์‚ฐ์ •ํ•˜์„ธ์š”
182
+ 3. ํ’ˆ์งˆ ๊ด€๋ฆฌ ๊ธฐ์ค€๊ณผ ํ”„๋กœ์„ธ์Šค๋ฅผ ์ •์˜ํ•˜์„ธ์š”
183
+ 4. ๋ฆฌ์Šคํฌ ๋Œ€์‘ ๊ณ„ํš์„ ์ˆ˜๋ฆฝํ•˜์„ธ์š”
184
+ 5. ์„ฑ๊ณผ ์ธก์ • ์ง€ํ‘œ๋ฅผ ์„ค์ •ํ•˜์„ธ์š”"""
185
 
186
+ def create_metal_architecture_prompt(self, user_query: str, earth_response: str, critic_feedback: str) -> str:
187
+ """้‡‘(์•„ํ‚คํ…์ฒ˜) ํ”„๋กฌํ”„ํŠธ"""
188
+ return f"""๋‹น์‹ ์€ ็ฆฎ(์˜ˆ์˜ยท์งˆ์„œ)์˜ ๋•๋ชฉ์„ ์ง€๋‹Œ ้‡‘์˜ ๊ธฐ์šด์„ ๊ฐ€์ง„ ์•„ํ‚คํ…์ฒ˜ & ํ‘œ์ค€ ์ฑ…์ž„์ž์ž…๋‹ˆ๋‹ค.
189
+ ์ •๋ฐ€ํ•จ๊ณผ ๊ตฌ์กฐํ™” ๋Šฅ๋ ฅ์œผ๋กœ ์™„๋ฒฝํ•œ ์‹œ์Šคํ…œ์„ ์„ค๊ณ„ํ•ฉ๋‹ˆ๋‹ค.
190
 
191
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
192
 
193
+ ์‹คํ–‰์šด์˜(ๅœŸ)์˜ ๊ณ„ํš:
194
+ {earth_response}
195
 
196
+ ๋น„ํ‰์ž์˜ ํ”ผ๋“œ๋ฐฑ:
197
+ {critic_feedback}
198
 
199
+ ์•„ํ‚คํ…์ฒ˜ ์ฑ…์ž„์ž๋กœ์„œ:
200
+ 1. ์ „์ฒด ์‹œ์Šคํ…œ์˜ ๊ธฐ์ˆ ยท๋ฐ์ดํ„ฐ ์•„ํ‚คํ…์ฒ˜๋ฅผ ์„ค๊ณ„ํ•˜์„ธ์š”
201
+ 2. ์ฝ”๋”ฉ/APIยท๋ฐ์ดํ„ฐยท๋ณด์•ˆ ํ‘œ์ค€์„ ์ˆ˜๋ฆฝํ•˜์„ธ์š”
202
+ 3. ํ™•์žฅ์„ฑ๊ณผ ํ˜ธํ™˜์„ฑ์„ ๊ณ ๋ คํ•œ ํ”„๋ ˆ์ž„์›Œํฌ๋ฅผ ์ œ์‹œํ•˜์„ธ์š”
203
+ 4. ํ’ˆ์งˆ ๊ฒ€์ฆ ์ฒด๊ณ„์™€ ๊ธฐ์ค€์„ ์ •์˜ํ•˜์„ธ์š”
204
+ 5. ๊ธฐ์ˆ ์  ์ œ์•ฝ์‚ฌํ•ญ๊ณผ ํ•ด๊ฒฐ๋ฐฉ์•ˆ์„ ์ œ์‹œํ•˜์„ธ์š”"""
205
 
206
+ def create_water_rd_prompt(self, user_query: str, metal_response: str, critic_feedback: str, search_results: Dict) -> str:
207
+ """ๆฐด(R&D) ํ”„๋กฌํ”„ํŠธ"""
208
+ return f"""๋‹น์‹ ์€ ๆ™บ(์ง€ํ˜œ)์˜ ๋•๋ชฉ์„ ์ง€๋‹Œ ๆฐด์˜ ๊ธฐ์šด์„ ๊ฐ€์ง„ ๋„๊ตฌ ํ™œ์šฉ R&D ์ŠคํŽ˜์…œ๋ฆฌ์ŠคํŠธ์ž…๋‹ˆ๋‹ค.
209
+ ๋ถ„์„๋ ฅ๊ณผ ํ˜ธ๊ธฐ์‹ฌ์œผ๋กœ ์ตœ์‹  ๊ธฐ์ˆ ์„ ํƒ๊ตฌํ•˜๊ณ  ํ˜์‹ ํ•ฉ๋‹ˆ๋‹ค.
210
 
211
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
212
 
213
+ ์•„ํ‚คํ…์ฒ˜(้‡‘)์˜ ์„ค๊ณ„:
214
+ {metal_response}
215
 
216
+ ๋น„ํ‰์ž์˜ ํ”ผ๋“œ๋ฐฑ:
217
+ {critic_feedback}
218
 
219
+ ์ตœ์‹  ๊ธฐ์ˆ  ์กฐ์‚ฌ ๊ฒฐ๊ณผ:
220
+ {self._format_search_results(search_results)}
221
 
222
+ R&D ์ŠคํŽ˜์…œ๋ฆฌ์ŠคํŠธ๋กœ์„œ:
223
+ 1. ์ตœ์‹  ๊ธฐ์ˆ  ํŠธ๋ Œ๋“œ์™€ ๋„๊ตฌ๋ฅผ ๋ถ„์„ํ•˜์„ธ์š”
224
+ 2. ํ˜์‹ ์ ์ธ ํ”„๋กœํ† ํƒ€์ž… ๊ฐœ๋ฐœ ๋ฐฉ์•ˆ์„ ์ œ์‹œํ•˜์„ธ์š”
225
+ 3. ์ž๋™ํ™”์™€ ์ƒ์‚ฐ์„ฑ ํ–ฅ์ƒ ๋„๊ตฌ๋ฅผ ์ถ”์ฒœํ•˜์„ธ์š”
226
+ 4. ๊ธฐ์ˆ  ๋„์ž…์˜ ROI์™€ ์‹คํ˜„ ๊ฐ€๋Šฅ์„ฑ์„ ํ‰๊ฐ€ํ•˜์„ธ์š”
227
+ 5. ํŒ€ ๊ต์œก๊ณผ ๊ธฐ์ˆ  ์ „ํŒŒ ๊ณ„ํš์„ ์ˆ˜๋ฆฝํ•˜์„ธ์š”"""
228
 
229
+ def create_wood_final_prompt(self, user_query: str, all_responses: Dict, all_critics: List) -> str:
230
+ """ๆœจ(๊ฐ๋…๊ด€) ์ตœ์ข… ์ข…ํ•ฉ ํ”„๋กฌํ”„ํŠธ"""
231
+ return f"""๋‹น์‹ ์€ ไป(์ธ์žํ•จ)์˜ ๋•๋ชฉ์„ ์ง€๋‹Œ ๆœจ์˜ ๊ธฐ์šด์„ ๊ฐ€์ง„ ๊ฐ๋…๊ด€์ž…๋‹ˆ๋‹ค.
232
+ ํŒ€ ์ „์ฒด์˜ ์˜๊ฒฌ์„ ์ข…ํ•ฉํ•˜์—ฌ ์ตœ์ข… ๊ฒฐ์ •์„ ๋‚ด๋ฆฝ๋‹ˆ๋‹ค.
 
 
 
 
 
 
 
233
 
234
  ์‚ฌ์šฉ์ž ์งˆ๋ฌธ: {user_query}
235
 
236
+ ํŒ€์›๋“ค์˜ ๊ธฐ์—ฌ:
237
+ - ็ซ(์ „๋žต๊ธฐํš): {all_responses['fire']}
238
+ - ๅœŸ(์‹คํ–‰์šด์˜): {all_responses['earth']}
239
+ - ้‡‘(์•„ํ‚คํ…์ฒ˜): {all_responses['metal']}
240
+ - ๆฐด(R&D): {all_responses['water']}
241
 
242
+ ๋น„ํ‰์ž์˜ ํ”ผ๋“œ๋ฐฑ ์ด๋ ฅ:
243
+ {self._format_critic_history(all_critics)}
244
 
245
+ ํŒ€์žฅ์œผ๋กœ์„œ ์ตœ์ข… ์ข…ํ•ฉ ๋ณด๊ณ ์„œ๋ฅผ ์ž‘์„ฑํ•˜์„ธ์š”:
246
+ 1. ๊ฐ ํŒ€์›์˜ ๊ธฐ์—ฌ๋ฅผ ํ†ตํ•ฉํ•œ ์ข…ํ•ฉ ์†”๋ฃจ์…˜
247
+ 2. ์‹คํ–‰ ์šฐ์„ ์ˆœ์œ„์™€ ๋‹จ๊ณ„๋ณ„ ๋กœ๋“œ๋งต
248
+ 3. ์˜ˆ์ƒ ์„ฑ๊ณผ์™€ ์„ฑ๊ณต ์ง€ํ‘œ
249
+ 4. ํŒ€ ์ „์ฒด์˜ ์‹œ๋„ˆ์ง€ ์ฐฝ์ถœ ๋ฐฉ์•ˆ
250
+ 5. ์ง€์†์  ๊ฐœ์„ ๊ณผ ๏ฟฝ๏ฟฝ์žฅ ๊ณ„ํš
 
 
 
 
251
 
252
+ ๋งˆํฌ๋‹ค์šด ํ˜•์‹์„ ํ™œ์šฉํ•˜์—ฌ ์ „๋ฌธ์ ์ด๊ณ  ์ฒด๊ณ„์ ์œผ๋กœ ์ž‘์„ฑํ•˜์„ธ์š”."""
253
 
254
+ def create_critic_prompt(self, stage: str, content: str, context: str = "") -> str:
255
+ """์ค‘๋ฆฝ์  ๋น„ํ‰์ž ํ”„๋กฌํ”„ํŠธ"""
256
+ return f"""๋‹น์‹ ์€ ์ค‘๋ฆฝ์ ์ด๊ณ  ๋…ผ๋ฆฌ์ ์ธ ๋น„ํ‰์ž์ž…๋‹ˆ๋‹ค.
257
+ ํŽธ๊ฒฌ ์—†์ด ํ•ฉ๋ฆฌ์ ์ด๊ณ  ๊ฑด์„ค์ ์ธ ๋น„ํ‰์„ ์ œ๊ณตํ•ฉ๋‹ˆ๋‹ค.
258
+
259
+ ํ˜„์žฌ ๋‹จ๊ณ„: {stage}
260
+
261
+ ๋ถ„์„ ๋Œ€์ƒ:
262
+ {content}
263
+
264
+ {f"์ด์ „ ๋งฅ๋ฝ: {context}" if context else ""}
265
+
266
+ ๋‹ค์Œ ๊ด€์ ์—์„œ ๋น„ํ‰ํ•˜์„ธ์š”:
267
+ 1. ๋…ผ๋ฆฌ์  ์ผ๊ด€์„ฑ๊ณผ ํƒ€๋‹น์„ฑ
268
+ 2. ์‹คํ˜„ ๊ฐ€๋Šฅ์„ฑ๊ณผ ์‹ค์šฉ์„ฑ
269
+ 3. ๋ˆ„๋ฝ๋œ ์ค‘์š” ์š”์†Œ
270
+ 4. ๊ฐœ์„  ๊ฐ€๋Šฅํ•œ ๋ถ€๋ถ„
271
+ 5. ๊ฐ•์ ๊ณผ ์•ฝ์ ์˜ ๊ท ํ˜•์žกํžŒ ํ‰๊ฐ€
272
+
273
+ ๊ฑด์„ค์ ์ด๊ณ  ๊ตฌ์ฒด์ ์ธ ํ”ผ๋“œ๋ฐฑ์„ ์ œ๊ณตํ•˜๋˜, ๋‹ค์Œ ๋‹จ๊ณ„ ๋‹ด๋‹น์ž๊ฐ€
274
+ ๊ฐœ์„ ํ•  ์ˆ˜ ์žˆ๋Š” ์‹ค์งˆ์ ์ธ ์ œ์•ˆ์„ ํฌํ•จํ•˜์„ธ์š”."""
275
+
276
+ def _format_search_results(self, search_results: Dict) -> str:
277
+ """๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ํฌ๋งทํŒ…"""
278
+ if not search_results:
279
+ return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์—†์Œ"
280
+
281
+ formatted = ""
282
+ for keyword, results in search_results.items():
283
+ formatted += f"\n**{keyword}:**\n"
284
+ for i, result in enumerate(results[:5], 1):
285
+ formatted += f"{i}. {result.get('title', 'N/A')} (์‹ ๋ขฐ๋„: {result.get('credibility_score', 0):.2f})\n"
286
+ formatted += f" {result.get('description', 'N/A')[:150]}...\n"
287
+ return formatted
288
+
289
+ def _format_critic_history(self, critics: List) -> str:
290
+ """๋น„ํ‰ ์ด๋ ฅ ํฌ๋งทํŒ…"""
291
+ if not critics:
292
+ return "๋น„ํ‰ ์ด๋ ฅ ์—†์Œ"
293
+
294
+ formatted = ""
295
+ stages = ["ๆœจ ์ดˆ๊ธฐ", "็ซ ์ „๋žต", "ๅœŸ ์‹คํ–‰", "้‡‘ ์•„ํ‚คํ…์ฒ˜", "ๆฐด R&D"]
296
+ for i, critic in enumerate(critics):
297
+ if i < len(stages):
298
+ formatted += f"\n**{stages[i]} ๋‹จ๊ณ„ ๋น„ํ‰:**\n{critic}\n"
299
+ return formatted
300
+
301
+ def extract_keywords(self, wood_response: str) -> List[str]:
302
+ """๊ฐ๋…๊ด€ ์‘๋‹ต์—์„œ ํ‚ค์›Œ๋“œ ์ถ”์ถœ"""
303
  keywords = []
304
 
305
+ keyword_match = re.search(r'\[ํ•ต์‹ฌ ํ‚ค์›Œ๋“œ\]:\s*(.+)', wood_response, re.IGNORECASE)
 
306
  if keyword_match:
307
  keyword_str = keyword_match.group(1)
308
  keywords = [k.strip() for k in keyword_str.split(',') if k.strip()]
309
 
 
310
  if not keywords:
311
+ keywords = ["best practices", "implementation", "strategy", "innovation", "optimization"]
312
 
313
+ return keywords[:7]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
314
 
315
  def calculate_credibility_score(self, result: Dict) -> float:
316
+ """๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ์‹ ๋ขฐ๋„ ๊ณ„์‚ฐ"""
317
+ score = 0.5
 
318
  url = result.get('url', '')
 
 
319
 
320
+ trusted_domains = ['.edu', '.gov', '.org', 'wikipedia.org', 'nature.com',
321
+ 'ieee.org', 'acm.org', 'github.com']
 
 
 
 
322
 
323
  for domain in trusted_domains:
324
  if domain in url:
325
  score += 0.2
326
  break
327
 
 
328
  if url.startswith('https://'):
329
  score += 0.1
330
 
331
+ if len(result.get('title', '')) > 20:
 
332
  score += 0.05
333
+ if len(result.get('description', '')) > 50:
334
  score += 0.05
335
 
336
+ spam_keywords = ['buy now', 'sale', 'discount', 'click here']
337
+ if any(spam in (result.get('title', '') + result.get('description', '')).lower()
338
+ for spam in spam_keywords):
339
  score -= 0.3
340
 
341
+ return max(0, min(1, score))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
342
 
343
  def brave_search(self, query: str) -> List[Dict]:
344
  """Brave Search API ํ˜ธ์ถœ"""
345
  if self.test_mode or self.bapi_token == "YOUR_BRAVE_API_TOKEN":
 
346
  test_results = []
347
  for i in range(5):
348
  test_results.append({
349
+ "title": f"{query} - Best Practices {i+1}",
350
+ "description": f"Comprehensive guide on {query} with proven methodologies.",
351
  "url": f"https://example{i+1}.com/{query.replace(' ', '-')}",
352
  "credibility_score": 0.7 + (i * 0.05)
353
  })
 
356
  try:
357
  params = {
358
  "q": query,
359
+ "count": 10,
360
+ "safesearch": "moderate"
 
361
  }
362
 
363
  response = requests.get(
 
370
  if response.status_code == 200:
371
  data = response.json()
372
  results = []
373
+ for item in data.get("web", {}).get("results", []):
374
  result = {
375
  "title": item.get("title", ""),
376
  "description": item.get("description", ""),
377
  "url": item.get("url", ""),
378
+ "credibility_score": self.calculate_credibility_score(item)
379
  }
 
 
380
  results.append(result)
381
 
 
382
  results.sort(key=lambda x: x['credibility_score'], reverse=True)
383
  return results
384
  else:
 
389
  logger.error(f"Brave ๊ฒ€์ƒ‰ ์ค‘ ์˜ค๋ฅ˜: {str(e)}")
390
  return []
391
 
 
 
 
 
 
 
 
 
392
  def call_gemini_streaming(self, messages: List[Dict[str, str]], role: str) -> Generator[str, None, None]:
393
  """Gemini API ์ŠคํŠธ๋ฆฌ๋ฐ ํ˜ธ์ถœ"""
394
  if not self.gemini_client:
 
396
  return
397
 
398
  try:
 
 
 
 
 
 
 
 
 
399
  contents = []
 
 
 
 
 
 
 
 
 
 
 
 
400
  for msg in messages:
401
  if msg["role"] == "user":
402
  contents.append(types.Content(
 
404
  parts=[types.Part.from_text(text=msg["content"])]
405
  ))
406
 
 
407
  generate_content_config = types.GenerateContentConfig(
408
  temperature=0.7,
409
  top_p=0.8,
 
411
  response_mime_type="text/plain"
412
  )
413
 
 
414
  for chunk in self.gemini_client.models.generate_content_stream(
415
  model="gemini-2.5-pro",
416
  contents=contents,
 
425
 
426
  def call_llm_streaming(self, messages: List[Dict[str, str]], role: str) -> Generator[str, None, None]:
427
  """์ŠคํŠธ๋ฆฌ๋ฐ LLM API ํ˜ธ์ถœ"""
 
 
428
  if self.use_gemini:
429
  yield from self.call_gemini_streaming(messages, role)
430
  return
431
 
 
432
  if self.test_mode:
433
+ test_response = f"์ด๊ฒƒ์€ {role} ์—ญํ• ์˜ ํ…Œ์ŠคํŠธ ์‘๋‹ต์ž…๋‹ˆ๋‹ค.\n"
434
+ test_response += f"์‚ฌ์šฉ์ž ์งˆ๋ฌธ์— ๋Œ€ํ•œ {role}์˜ ๊ด€์ ์—์„œ ๋ถ„์„ํ•œ ๋‚ด์šฉ์ž…๋‹ˆ๋‹ค.\n"
435
+ test_response += "1. ์ฒซ ๋ฒˆ์งธ ํ•ต์‹ฌ ํฌ์ธํŠธ\n2. ๋‘ ๋ฒˆ์งธ ํ•ต์‹ฌ ํฌ์ธํŠธ\n3. ์„ธ ๋ฒˆ์งธ ํ•ต์‹ฌ ํฌ์ธํŠธ"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
436
 
437
+ words = test_response.split()
438
+ for i in range(0, len(words), 3):
439
+ chunk = " ".join(words[i:i+3])
440
+ yield chunk + " "
441
+ time.sleep(0.05)
442
  return
443
 
 
444
  try:
 
 
 
 
 
 
 
 
 
 
 
 
445
  payload = {
446
  "model": self.model_id,
447
+ "messages": messages,
448
  "max_tokens": 4096,
449
  "temperature": 0.7,
450
+ "stream": True
 
 
451
  }
452
 
 
 
453
  response = requests.post(
454
  self.api_url,
455
  headers=self.create_headers(),
 
459
  )
460
 
461
  if response.status_code != 200:
462
+ yield f"โŒ API ์˜ค๋ฅ˜: {response.status_code}"
 
463
  return
464
 
465
  for line in response.iter_lines():
 
478
  except json.JSONDecodeError:
479
  continue
480
 
 
 
 
 
481
  except Exception as e:
482
  logger.error(f"์ŠคํŠธ๋ฆฌ๋ฐ ์ค‘ ์˜ค๋ฅ˜: {str(e)}")
483
  yield f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
484
 
485
  # ์‹œ์Šคํ…œ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
486
+ wuxing_system = WuxingLLMSystem()
487
 
488
+ def process_wuxing_query(user_query: str, llm_mode: str):
489
+ """์˜คํ–‰ ๊ธฐ๋ฐ˜ ์ฟผ๋ฆฌ ์ฒ˜๋ฆฌ"""
 
 
 
 
 
490
  if not user_query:
491
+ return "", "", "", "", "", "", "โŒ ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
492
 
493
+ wuxing_system.set_llm_mode(llm_mode)
 
494
 
495
+ all_responses = {}
496
+ all_critics = []
497
+ search_results = {}
498
 
499
  try:
500
+ # 1. ๆœจ(๊ฐ๋…๊ด€) ์ดˆ๊ธฐ ๋ถ„์„
501
+ wood_prompt = wuxing_system.create_wood_initial_prompt(user_query)
502
+ wood_response = ""
503
 
504
+ wood_text = "๐ŸŒณ **ๆœจ - ๊ฐ๋…๊ด€** (ไป)\n๐Ÿ”„ ๋ถ„์„ ์ค‘...\n"
505
+ for chunk in wuxing_system.call_llm_streaming(
506
+ [{"role": "user", "content": wood_prompt}], "wood"
 
507
  ):
508
+ wood_response += chunk
509
+ wood_text = f"๐ŸŒณ **ๆœจ - ๊ฐ๋…๊ด€** (ไป)\n{wood_response}"
510
+ yield wood_text, "", "", "", "", "", "๐ŸŒณ ๊ฐ๋…๊ด€์ด ๋น„์ „์„ ์ˆ˜๋ฆฝ ์ค‘..."
 
 
 
 
 
 
511
 
512
+ all_responses['wood_initial'] = wood_response
 
 
513
 
514
+ # ํ‚ค์›Œ๋“œ ์ถ”์ถœ ๋ฐ ๊ฒ€์ƒ‰
515
+ keywords = wuxing_system.extract_keywords(wood_response)
516
+ status_text = "๐Ÿ” ์›น ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰ ์ค‘..."
517
+ yield wood_text, "", "", "", "", "", status_text
518
 
 
519
  for keyword in keywords:
520
+ results = wuxing_system.brave_search(keyword)
521
  if results:
522
  search_results[keyword] = results
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
523
 
524
+ # 2. ๆœจ ๋น„ํ‰
525
+ critic_prompt = wuxing_system.create_critic_prompt("ๆœจ ์ดˆ๊ธฐ ๋ถ„์„", wood_response)
526
+ critic_response = ""
527
 
528
+ critic_text = "๐Ÿ” **์ค‘๋ฆฝ์  ๋น„ํ‰์ž**\n[ๆœจ ๋ถ„์„ ๋น„ํ‰] ๐Ÿ”„ ๋น„ํ‰ ์ค‘...\n"
529
+ for chunk in wuxing_system.call_llm_streaming(
530
+ [{"role": "user", "content": critic_prompt}], "critic"
 
531
  ):
532
+ critic_response += chunk
533
+ critic_text = f"๐Ÿ” **์ค‘๋ฆฝ์  ๋น„ํ‰์ž**\n[ๆœจ ๋ถ„์„ ๋น„ํ‰]\n{critic_response}"
534
+ yield wood_text, "", "", "", "", critic_text, "๐Ÿ” ๋น„ํ‰์ž๊ฐ€ ๋ถ„์„ ์ค‘..."
535
 
536
+ all_critics.append(critic_response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
537
 
538
+ # 3. ็ซ(์ „๋žต๊ธฐํš)
539
+ fire_prompt = wuxing_system.create_fire_strategy_prompt(user_query, wood_response, critic_response)
540
+ fire_response = ""
541
 
542
+ fire_text = "๐Ÿ”ฅ **็ซ - ์ „๋žตยท๊ธฐํš ๋ฆฌ๋”** (็พฉ)\n๐Ÿ”„ ์ „๋žต ์ˆ˜๋ฆฝ ์ค‘...\n"
543
+ for chunk in wuxing_system.call_llm_streaming(
544
+ [{"role": "user", "content": fire_prompt}], "fire"
545
  ):
546
+ fire_response += chunk
547
+ fire_text = f"๐Ÿ”ฅ **็ซ - ์ „๋žตยท๊ธฐํš ๋ฆฌ๋”** (็พฉ)\n{fire_response}"
548
+ yield wood_text, fire_text, "", "", "", critic_text, "๐Ÿ”ฅ ์ „๋žต ์ˆ˜๋ฆฝ ์ค‘..."
549
 
550
+ all_responses['fire'] = fire_response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
551
 
552
+ # 4. ็ซ ๋น„ํ‰
553
+ critic_prompt = wuxing_system.create_critic_prompt("็ซ ์ „๋žต๊ธฐํš", fire_response, wood_response)
554
+ critic_response = ""
555
 
556
+ critic_text += "\n\n---\n\n[็ซ ์ „๋žต ๋น„ํ‰] ๐Ÿ”„ ๋น„ํ‰ ์ค‘...\n"
557
+ for chunk in wuxing_system.call_llm_streaming(
558
+ [{"role": "user", "content": critic_prompt}], "critic"
 
559
  ):
560
+ critic_response += chunk
561
+ temp_text = all_critics[0] + f"\n\n---\n\n[็ซ ์ „๋žต ๋น„ํ‰]\n{critic_response}"
562
+ critic_text = f"๐Ÿ” **์ค‘๋ฆฝ์  ๋น„ํ‰์ž**\n[ๆœจ ๋ถ„์„ ๋น„ํ‰]\n{temp_text}"
563
+ yield wood_text, fire_text, "", "", "", critic_text, "๐Ÿ” ์ „๋žต ๋น„ํ‰ ์ค‘..."
564
 
565
+ all_critics.append(critic_response)
566
 
567
+ # 5. ๅœŸ(์‹คํ–‰์šด์˜)
568
+ earth_prompt = wuxing_system.create_earth_execution_prompt(user_query, fire_response, critic_response, search_results)
569
+ earth_response = ""
570
 
571
+ earth_text = "๐Ÿ”๏ธ **ๅœŸ - ์‹คํ–‰ยท์šด์˜ยทํ’ˆ์งˆ** (ไฟก)\n๐Ÿ”„ ์‹คํ–‰ ๊ณ„ํš ์ˆ˜๋ฆฝ ์ค‘...\n"
572
+ for chunk in wuxing_system.call_llm_streaming(
573
+ [{"role": "user", "content": earth_prompt}], "earth"
 
574
  ):
575
+ earth_response += chunk
576
+ earth_text = f"๐Ÿ”๏ธ **ๅœŸ - ์‹คํ–‰ยท์šด์˜ยทํ’ˆ์งˆ** (ไฟก)\n{earth_response}"
577
+ yield wood_text, fire_text, earth_text, "", "", critic_text, "๐Ÿ”๏ธ ์‹คํ–‰ ๊ณ„ํš ์ˆ˜๋ฆฝ ์ค‘..."
578
 
579
+ all_responses['earth'] = earth_response
580
 
581
+ # 6. ๅœŸ ๋น„ํ‰
582
+ critic_prompt = wuxing_system.create_critic_prompt("ๅœŸ ์‹คํ–‰๊ณ„ํš", earth_response, fire_response)
583
+ critic_response = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
584
 
585
+ critic_text = "๐Ÿ” **์ค‘๋ฆฝ์  ๋น„ํ‰์ž**\n" + "\n\n".join([
586
+ f"[ๆœจ ๋ถ„์„ ๋น„ํ‰]\n{all_critics[0]}",
587
+ f"[็ซ ์ „๋žต ๋น„ํ‰]\n{all_critics[1]}",
588
+ "[ๅœŸ ์‹คํ–‰ ๋น„ํ‰] ๐Ÿ”„ ๋น„ํ‰ ์ค‘...\n"
589
+ ])
590
 
591
+ for chunk in wuxing_system.call_llm_streaming(
592
+ [{"role": "user", "content": critic_prompt}], "critic"
593
+ ):
594
+ critic_response += chunk
595
+ critic_text = "๐Ÿ” **์ค‘๋ฆฝ์  ๋น„ํ‰์ž**\n" + "\n\n".join([
596
+ f"[ๆœจ ๋ถ„์„ ๋น„ํ‰]\n{all_critics[0]}",
597
+ f"[็ซ ์ „๋žต ๋น„ํ‰]\n{all_critics[1]}",
598
+ f"[ๅœŸ ์‹คํ–‰ ๋น„ํ‰]\n{critic_response}"
599
+ ])
600
+ yield wood_text, fire_text, earth_text, "", "", critic_text, "๐Ÿ” ์‹คํ–‰ ๋น„ํ‰ ์ค‘..."
601
+
602
+ all_critics.append(critic_response)
603
+
604
+ # 7. ้‡‘(์•„ํ‚คํ…์ฒ˜)
605
+ metal_prompt = wuxing_system.create_metal_architecture_prompt(user_query, earth_response, critic_response)
606
+ metal_response = ""
607
+
608
+ metal_text = "โš™๏ธ **้‡‘ - ์•„ํ‚คํ…์ฒ˜ & ํ‘œ์ค€** (็ฆฎ)\n๐Ÿ”„ ์•„ํ‚คํ…์ฒ˜ ์„ค๊ณ„ ์ค‘...\n"
609
+ for chunk in wuxing_system.call_llm_streaming(
610
+ [{"role": "user", "content": metal_prompt}], "metal"
611
  ):
612
+ metal_response += chunk
613
+ metal_text = f"โš™๏ธ **้‡‘ - ์•„ํ‚คํ…์ฒ˜ & ํ‘œ์ค€** (็ฆฎ)\n{metal_response}"
614
+ yield wood_text, fire_text, earth_text, metal_text, "", critic_text, "โš™๏ธ ์•„ํ‚คํ…์ฒ˜ ์„ค๊ณ„ ์ค‘..."
 
615
 
616
+ all_responses['metal'] = metal_response
617
 
618
+ # 8. ้‡‘ ๋น„ํ‰
619
+ critic_prompt = wuxing_system.create_critic_prompt("้‡‘ ์•„ํ‚คํ…์ฒ˜", metal_response, earth_response)
620
+ critic_response = ""
 
 
 
 
 
 
 
 
 
 
 
 
621
 
622
+ # ์ถ”๊ฐ€ ๊ฒ€์ƒ‰ ์ˆ˜ํ–‰
623
+ additional_keywords = ["architecture patterns", "system design", "technical standards"]
624
+ for keyword in additional_keywords:
625
+ results = wuxing_system.brave_search(f"{user_query} {keyword}")
626
+ if results:
627
+ search_results[keyword] = results
628
 
629
+ # ๋น„ํ‰ ์ˆ˜ํ–‰
630
+ for chunk in wuxing_system.call_llm_streaming(
631
+ [{"role": "user", "content": critic_prompt}], "critic"
632
  ):
633
+ critic_response += chunk
634
+ yield wood_text, fire_text, earth_text, metal_text, "", critic_text, "๐Ÿ” ์•„ํ‚คํ…์ฒ˜ ๋น„ํ‰ ์ค‘..."
 
 
635
 
636
+ all_critics.append(critic_response)
637
 
638
+ # 9. ๆฐด(R&D)
639
+ water_prompt = wuxing_system.create_water_rd_prompt(user_query, metal_response, critic_response, search_results)
640
+ water_response = ""
 
 
 
 
 
 
641
 
642
+ water_text = "๐Ÿ’ง **ๆฐด - R&D ์ŠคํŽ˜์…œ๋ฆฌ์ŠคํŠธ** (ๆ™บ)\n๐Ÿ”„ ํ˜์‹  ๋ฐฉ์•ˆ ์—ฐ๊ตฌ ์ค‘...\n"
643
+ for chunk in wuxing_system.call_llm_streaming(
644
+ [{"role": "user", "content": water_prompt}], "water"
 
645
  ):
646
+ water_response += chunk
647
+ water_text = f"๐Ÿ’ง **ๆฐด - R&D ์ŠคํŽ˜์…œ๋ฆฌ์ŠคํŠธ** (ๆ™บ)\n{water_response}"
648
+ yield wood_text, fire_text, earth_text, metal_text, water_text, critic_text, "๐Ÿ’ง ํ˜์‹  ์—ฐ๊ตฌ ์ค‘..."
 
 
 
649
 
650
+ all_responses['water'] = water_response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
651
 
652
+ # 10. ๆฐด ๋น„ํ‰
653
+ critic_prompt = wuxing_system.create_critic_prompt("ๆฐด R&D", water_response, metal_response)
654
+ critic_response = ""
655
 
656
+ for chunk in wuxing_system.call_llm_streaming(
657
+ [{"role": "user", "content": critic_prompt}], "critic"
 
 
658
  ):
659
+ critic_response += chunk
660
+ yield wood_text, fire_text, earth_text, metal_text, water_text, critic_text, "๐Ÿ” R&D ๋น„ํ‰ ์ค‘..."
 
 
661
 
662
+ all_critics.append(critic_response)
663
 
664
+ # 11. ๆœจ(๊ฐ๋…๊ด€) ์ตœ์ข… ์ข…ํ•ฉ
665
+ wood_final_prompt = wuxing_system.create_wood_final_prompt(user_query, all_responses, all_critics)
666
+ wood_final_response = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
667
 
668
+ wood_text += "\n\n---\n\n๐ŸŒณ **์ตœ์ข… ์ข…ํ•ฉ ๋ณด๊ณ ์„œ**\n๐Ÿ”„ ์ž‘์„ฑ ์ค‘...\n"
669
+ for chunk in wuxing_system.call_llm_streaming(
670
+ [{"role": "user", "content": wood_final_prompt}], "wood"
671
+ ):
672
+ wood_final_response += chunk
673
+ temp_text = all_responses['wood_initial'] + f"\n\n---\n\n๐ŸŒณ **์ตœ์ข… ์ข…ํ•ฉ ๋ณด๊ณ ์„œ**\n{wood_final_response}"
674
+ wood_text = f"๐ŸŒณ **ๆœจ - ๊ฐ๋…๊ด€** (ไป)\n{temp_text}"
675
+ yield wood_text, fire_text, earth_text, metal_text, water_text, critic_text, "๐ŸŒณ ์ตœ์ข… ๋ณด๊ณ ์„œ ์ž‘์„ฑ ์ค‘..."
676
 
677
+ status_text = f"โœ… ์˜คํ–‰ ํ˜‘๋ ฅ ํ”„๋กœ์„ธ์Šค ์™„๋ฃŒ! ({len(search_results)} ํ‚ค์›Œ๋“œ, {sum(len(r) for r in search_results.values())} ๊ฒ€์ƒ‰๊ฒฐ๊ณผ)"
678
+ yield wood_text, fire_text, earth_text, metal_text, water_text, critic_text, status_text
679
 
680
  except Exception as e:
681
  error_msg = f"โŒ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {str(e)}"
682
+ yield "", "", "", "", "", "", error_msg
683
 
684
+ def clear_wuxing():
685
+ """์ดˆ๊ธฐํ™”"""
686
+ return "", "", "", "", "", "", "๐Ÿ”„ ์ดˆ๊ธฐํ™”๋˜์—ˆ์Šต๋‹ˆ๋‹ค."
 
 
687
 
688
+ # CSS ์Šคํƒ€์ผ
689
  css = """
690
  .gradio-container {
691
  font-family: 'Arial', sans-serif;
692
  }
693
+ .wood-box textarea {
 
 
 
 
 
694
  border-left: 4px solid #10b981 !important;
 
695
  background-color: #f0fdf4 !important;
696
  }
697
+ .fire-box textarea {
698
+ border-left: 4px solid #ef4444 !important;
699
+ background-color: #fef2f2 !important;
700
+ }
701
+ .earth-box textarea {
702
+ border-left: 4px solid #a855f7 !important;
703
  background-color: #faf5ff !important;
704
  }
705
+ .metal-box textarea {
706
  border-left: 4px solid #f59e0b !important;
 
707
  background-color: #fffbeb !important;
708
  }
709
+ .water-box textarea {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
710
  border-left: 4px solid #3b82f6 !important;
711
+ background-color: #eff6ff !important;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
712
  }
713
+ .critic-box textarea {
714
+ border-left: 4px solid #6b7280 !important;
715
+ background-color: #f9fafb !important;
716
  }
717
+ h1 {
718
+ text-align: center;
719
+ color: #1f2937;
720
  }
721
  """
722
 
723
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค
724
+ with gr.Blocks(title="์˜คํ–‰ยท์˜ค์ƒ ํ˜‘๋ ฅ ์‹œ์Šคํ…œ", theme=gr.themes.Soft(), css=css) as app:
725
  gr.Markdown(
726
  """
727
+ # ๐ŸŒŸ ์˜คํ–‰ยท์˜ค์ƒ ๊ธฐ๋ฐ˜ ํ˜‘๋ ฅ์  LLM ์‹œ์Šคํ…œ
728
+
729
+ ### ๐Ÿ“‹ ํ”„๋กœ์„ธ์Šค: ๆœจโ†’๋น„ํ‰โ†’็ซโ†’๋น„ํ‰โ†’ๅœŸโ†’๋น„ํ‰โ†’้‡‘โ†’๋น„ํ‰โ†’ๆฐดโ†’๋น„ํ‰โ†’ๆœจ(์ตœ์ข…)
730
+
731
+ | ์—ญํ•  | ๋•๋ชฉยท์˜คํ–‰ | ํ•ต์‹ฌ ์ „๋ฌธ์„ฑ |
732
+ |------|-----------|-------------|
733
+ | ๐ŸŒณ **๊ฐ๋…๊ด€** | ไปยทๆœจ | ๋น„์ „ ์ œ์‹œ, ํŒ€ ์กฐ์œจ, ์ธ์žฌ ์œก์„ฑ |
734
+ | ๐Ÿ”ฅ **์ „๋žต๊ธฐํš** | ็พฉยท็ซ | ๋กœ๋“œ๋งต, ์‚ฌ์—… ๋ชจ๋ธ, ๋ฆฌ์Šคํฌ ๋ถ„์„ |
735
+ | ๐Ÿ”๏ธ **์‹คํ–‰์šด์˜** | ไฟกยทๅœŸ | ์ผ์ •ยท์˜ˆ์‚ฐ ๊ด€๋ฆฌ, ํ’ˆ์งˆ ๋ณด์ฆ |
736
+ | โš™๏ธ **์•„ํ‚คํ…์ฒ˜** | ็ฆฎยท้‡‘ | ์‹œ์Šคํ…œ ์„ค๊ณ„, ํ‘œ์ค€ ์ˆ˜๋ฆฝ, ํ’ˆ์งˆ ๊ฒ€์ฆ |
737
+ | ๐Ÿ’ง **R&D** | ๆ™บยทๆฐด | ๊ธฐ์ˆ  ์กฐ์‚ฌ, ํ”„๋กœํ† ํƒ€์ž…, ํ˜์‹  ๋„๊ตฌ |
738
+ | ๐Ÿ” **๋น„ํ‰์ž** | ์ค‘๋ฆฝ | ๋…ผ๋ฆฌ์ ยท๊ฑด์„ค์  ํ”ผ๋“œ๋ฐฑ |
739
  """
740
  )
741
 
 
742
  with gr.Row():
743
+ with gr.Column(scale=3):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
744
  llm_mode = gr.Radio(
745
  choices=["default", "commercial"],
746
  value="default",
747
+ label="LLM ๋ชจ๋“œ",
748
+ info="commercial: Gemini 2.5 Pro ์‚ฌ์šฉ"
749
  )
750
 
751
  user_input = gr.Textbox(
752
  label="์งˆ๋ฌธ ์ž…๋ ฅ",
753
+ placeholder="์˜ˆ: ์ง€์† ๊ฐ€๋Šฅํ•œ ์Šค๋งˆํŠธ์‹œํ‹ฐ ๊ตฌ์ถ• ์ „๋žต์€?",
754
  lines=3
755
  )
756
 
757
  with gr.Row():
758
  submit_btn = gr.Button("๐Ÿš€ ๋ถ„์„ ์‹œ์ž‘", variant="primary", scale=2)
759
  clear_btn = gr.Button("๐Ÿ—‘๏ธ ์ดˆ๊ธฐํ™”", scale=1)
760
+
761
+ with gr.Column(scale=1):
762
  status_text = gr.Textbox(
763
+ label="์ง„ํ–‰ ์ƒํƒœ",
764
  interactive=False,
765
  value="๋Œ€๊ธฐ ์ค‘...",
766
+ lines=3
767
  )
768
 
769
+ # ์˜คํ–‰ ์ถœ๋ ฅ - 2x3 ๊ทธ๋ฆฌ๋“œ
770
  with gr.Row():
771
  with gr.Column():
772
+ gr.Markdown("### ๐ŸŒณ ๆœจ - ๊ฐ๋…๊ด€ (ไป)")
773
+ wood_output = gr.Textbox(
774
+ label="",
775
+ lines=10,
776
+ max_lines=15,
777
+ interactive=False,
778
+ elem_classes=["wood-box"]
779
+ )
780
+
781
+ with gr.Column():
782
+ gr.Markdown("### ๐Ÿ”ฅ ็ซ - ์ „๋žตยท๊ธฐํš (็พฉ)")
783
+ fire_output = gr.Textbox(
784
+ label="",
785
+ lines=10,
786
+ max_lines=15,
787
+ interactive=False,
788
+ elem_classes=["fire-box"]
789
  )
790
 
 
791
  with gr.Row():
 
792
  with gr.Column():
793
+ gr.Markdown("### ๐Ÿ”๏ธ ๅœŸ - ์‹คํ–‰ยท์šด์˜ (ไฟก)")
794
+ earth_output = gr.Textbox(
795
  label="",
796
+ lines=10,
797
+ max_lines=15,
798
  interactive=False,
799
+ elem_classes=["earth-box"]
800
  )
801
 
802
  with gr.Column():
803
+ gr.Markdown("### โš™๏ธ ้‡‘ - ์•„ํ‚คํ…์ฒ˜ (็ฆฎ)")
804
+ metal_output = gr.Textbox(
805
  label="",
806
+ lines=10,
807
+ max_lines=15,
808
  interactive=False,
809
+ elem_classes=["metal-box"]
810
  )
811
 
812
  with gr.Row():
 
813
  with gr.Column():
814
+ gr.Markdown("### ๐Ÿ’ง ๆฐด - R&D (ๆ™บ)")
815
+ water_output = gr.Textbox(
816
  label="",
817
+ lines=10,
818
+ max_lines=15,
819
  interactive=False,
820
+ elem_classes=["water-box"]
821
  )
822
 
823
  with gr.Column():
824
+ gr.Markdown("### ๐Ÿ” ์ค‘๋ฆฝ์  ๋น„ํ‰์ž")
825
+ critic_output = gr.Textbox(
826
  label="",
827
+ lines=10,
828
+ max_lines=15,
829
  interactive=False,
830
+ elem_classes=["critic-box"]
831
  )
832
 
833
  # ์˜ˆ์ œ
834
  gr.Examples(
835
  examples=[
836
+ "์ง€์† ๊ฐ€๋Šฅํ•œ ์Šค๋งˆํŠธ์‹œํ‹ฐ ๊ตฌ์ถ•์„ ์œ„ํ•œ ์ข…ํ•ฉ ์ „๋žต์€?",
837
+ "AI ๊ธฐ๋ฐ˜ ํ—ฌ์Šค์ผ€์–ด ์„œ๋น„์Šค ํ”Œ๋žซํผ ๊ฐœ๋ฐœ ๊ณ„ํš์€?",
838
+ "ํƒ„์†Œ์ค‘๋ฆฝ ๋‹ฌ์„ฑ์„ ์œ„ํ•œ ๊ธฐ์—… ์ „ํ™˜ ๋กœ๋“œ๋งต์€?",
839
+ "๋ฉ”ํƒ€๋ฒ„์Šค ๊ต์œก ํ”Œ๋žซํผ ๊ตฌ์ถ• ๋ฐฉ์•ˆ์€?",
840
+ "๋ธ”๋ก์ฒด์ธ ๊ธฐ๋ฐ˜ ๊ณต๊ธ‰๋ง ๊ด€๋ฆฌ ์‹œ์Šคํ…œ ๋„์ž… ์ „๋žต์€?"
 
 
 
841
  ],
842
  inputs=user_input,
843
  label="๐Ÿ’ก ์˜ˆ์ œ ์งˆ๋ฌธ"
 
845
 
846
  # ์ด๋ฒคํŠธ ํ•ธ๋“ค๋Ÿฌ
847
  submit_btn.click(
848
+ fn=process_wuxing_query,
849
  inputs=[user_input, llm_mode],
850
+ outputs=[wood_output, fire_output, earth_output, metal_output, water_output, critic_output, status_text]
851
  ).then(
852
  fn=lambda: "",
853
  outputs=[user_input]
854
  )
855
 
856
  user_input.submit(
857
+ fn=process_wuxing_query,
858
  inputs=[user_input, llm_mode],
859
+ outputs=[wood_output, fire_output, earth_output, metal_output, water_output, critic_output, status_text]
860
  ).then(
861
  fn=lambda: "",
862
  outputs=[user_input]
863
  )
864
 
865
  clear_btn.click(
866
+ fn=clear_wuxing,
867
+ outputs=[wood_output, fire_output, earth_output, metal_output, water_output, critic_output, status_text]
868
  )
 
869
 
870
  if __name__ == "__main__":
871
+ app.queue()
872
  app.launch(
873
  server_name="0.0.0.0",
874
  server_port=7860,
875
  share=True,
876
  show_error=True
877
+ )