ginipick commited on
Commit
ecf0165
ยท
verified ยท
1 Parent(s): ad37ce2

Delete app-backup.py

Browse files
Files changed (1) hide show
  1. app-backup.py +0 -908
app-backup.py DELETED
@@ -1,908 +0,0 @@
1
- #!/usr/bin/env python
2
-
3
- import os
4
- import re
5
- import tempfile
6
- import gc # garbage collector ์ถ”๊ฐ€
7
- from collections.abc import Iterator
8
- from threading import Thread
9
- import json
10
- import requests
11
- import cv2
12
- import base64
13
- import logging
14
- import time
15
- from urllib.parse import quote # URL ์ธ์ฝ”๋”ฉ์„ ์œ„ํ•ด ์ถ”๊ฐ€
16
-
17
- import gradio as gr
18
- import spaces
19
- import torch
20
- from loguru import logger
21
- from PIL import Image
22
- from transformers import AutoProcessor, Gemma3ForConditionalGeneration, TextIteratorStreamer
23
-
24
- # CSV/TXT/PDF ๋ถ„์„
25
- import pandas as pd
26
- import PyPDF2
27
-
28
- # =============================================================================
29
- # (์‹ ๊ทœ) ์ด๋ฏธ์ง€ API ๊ด€๋ จ ํ•จ์ˆ˜๋“ค
30
- # =============================================================================
31
- from gradio_client import Client
32
-
33
- API_URL = "http://211.233.58.201:7896"
34
-
35
- logging.basicConfig(
36
- level=logging.DEBUG,
37
- format='%(asctime)s - %(levelname)s - %(message)s'
38
- )
39
-
40
- def test_api_connection() -> str:
41
- """API ์„œ๋ฒ„ ์—ฐ๊ฒฐ ํ…Œ์ŠคํŠธ"""
42
- try:
43
- client = Client(API_URL)
44
- return "API ์—ฐ๊ฒฐ ์„ฑ๊ณต: ์ •์ƒ ์ž‘๋™ ์ค‘"
45
- except Exception as e:
46
- logging.error(f"API ์—ฐ๊ฒฐ ํ…Œ์ŠคํŠธ ์‹คํŒจ: {e}")
47
- return f"API ์—ฐ๊ฒฐ ์‹คํŒจ: {e}"
48
-
49
- def generate_image(prompt: str, width: float, height: float, guidance: float, inference_steps: float, seed: float):
50
- """์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜ (๋ฐ˜ํ™˜ ํ˜•์‹์— ์œ ์—ฐํ•˜๊ฒŒ ๋Œ€์‘)"""
51
- if not prompt:
52
- return None, "์˜ค๋ฅ˜: ํ”„๋กฌํ”„ํŠธ๊ฐ€ ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค."
53
- try:
54
- logging.info(f"ํ”„๋กฌํ”„ํŠธ๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์ด๋ฏธ์ง€ ์ƒ์„ฑ API ํ˜ธ์ถœ: {prompt}")
55
-
56
- client = Client(API_URL)
57
- result = client.predict(
58
- prompt=prompt,
59
- width=int(width),
60
- height=int(height),
61
- guidance=float(guidance),
62
- inference_steps=int(inference_steps),
63
- seed=int(seed),
64
- do_img2img=False,
65
- init_image=None,
66
- image2image_strength=0.8,
67
- resize_img=True,
68
- api_name="/generate_image"
69
- )
70
-
71
- logging.info(f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ๊ฒฐ๊ณผ: {type(result)}, ๊ธธ์ด: {len(result) if isinstance(result, (list, tuple)) else '์•Œ ์ˆ˜ ์—†์Œ'}")
72
-
73
- # ๊ฒฐ๊ณผ๊ฐ€ ํŠœํ”Œ์ด๋‚˜ ๋ฆฌ์ŠคํŠธ ํ˜•ํƒœ๋กœ ๋ฐ˜ํ™˜๋˜๋Š” ๊ฒฝ์šฐ ์ฒ˜๋ฆฌ
74
- if isinstance(result, (list, tuple)) and len(result) > 0:
75
- image_data = result[0] # ์ฒซ ๋ฒˆ์งธ ์š”์†Œ๊ฐ€ ์ด๋ฏธ์ง€ ๋ฐ์ดํ„ฐ
76
- seed_info = result[1] if len(result) > 1 else "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๋“œ"
77
- return image_data, seed_info
78
- else:
79
- # ๋‹ค๋ฅธ ํ˜•ํƒœ๋กœ ๋ฐ˜ํ™˜๋œ ๊ฒฝ์šฐ (๋‹จ์ผ ๊ฐ’์ธ ๊ฒฝ์šฐ)
80
- return result, "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๋“œ"
81
-
82
- except Exception as e:
83
- logging.error(f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์‹คํŒจ: {str(e)}")
84
- return None, f"์˜ค๋ฅ˜: {str(e)}"
85
-
86
- # Base64 ํŒจ๋”ฉ ์ˆ˜์ • ํ•จ์ˆ˜
87
- def fix_base64_padding(data):
88
- """Base64 ๋ฌธ์ž์—ด์˜ ํŒจ๋”ฉ์„ ์ˆ˜์ •ํ•ฉ๋‹ˆ๋‹ค."""
89
- if isinstance(data, bytes):
90
- data = data.decode('utf-8')
91
-
92
- # base64,๋กœ ์‹œ์ž‘ํ•˜๋Š” ๋ถ€๋ถ„ ์ œ๊ฑฐ
93
- if "base64," in data:
94
- data = data.split("base64,", 1)[1]
95
-
96
- # ํŒจ๋”ฉ ๋ฌธ์ž ์ถ”๊ฐ€ (4์˜ ๋ฐฐ์ˆ˜ ๊ธธ์ด๊ฐ€ ๋˜๋„๋ก)
97
- missing_padding = len(data) % 4
98
- if missing_padding:
99
- data += '=' * (4 - missing_padding)
100
-
101
- return data
102
-
103
- # =============================================================================
104
- # ๋ฉ”๋ชจ๋ฆฌ ์ •๋ฆฌ ํ•จ์ˆ˜
105
- # =============================================================================
106
- def clear_cuda_cache():
107
- """CUDA ์บ์‹œ๋ฅผ ๋ช…์‹œ์ ์œผ๋กœ ๋น„์›๋‹ˆ๋‹ค."""
108
- if torch.cuda.is_available():
109
- torch.cuda.empty_cache()
110
- gc.collect()
111
-
112
- # =============================================================================
113
- # SerpHouse ๊ด€๋ จ ํ•จ์ˆ˜
114
- # =============================================================================
115
- SERPHOUSE_API_KEY = os.getenv("SERPHOUSE_API_KEY", "")
116
-
117
- def extract_keywords(text: str, top_k: int = 5) -> str:
118
- """๋‹จ์ˆœ ํ‚ค์›Œ๋“œ ์ถ”์ถœ: ํ•œ๊ธ€, ์˜์–ด, ์ˆซ์ž, ๊ณต๋ฐฑ๋งŒ ๋‚จ๊น€"""
119
- text = re.sub(r"[^a-zA-Z0-9๊ฐ€-ํžฃ\s]", "", text)
120
- tokens = text.split()
121
- return " ".join(tokens[:top_k])
122
-
123
- def do_web_search(query: str) -> str:
124
- """SerpHouse LIVE API ํ˜ธ์ถœํ•˜์—ฌ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๋งˆํฌ๋‹ค์šด ๋ฐ˜ํ™˜"""
125
- try:
126
- url = "https://api.serphouse.com/serp/live"
127
- params = {
128
- "q": query,
129
- "domain": "google.com",
130
- "serp_type": "web",
131
- "device": "desktop",
132
- "lang": "en",
133
- "num": "20"
134
- }
135
- headers = {"Authorization": f"Bearer {SERPHOUSE_API_KEY}"}
136
- logger.info(f"SerpHouse API ํ˜ธ์ถœ ์ค‘... ๊ฒ€์ƒ‰์–ด: {query}")
137
- response = requests.get(url, headers=headers, params=params, timeout=60)
138
- response.raise_for_status()
139
- data = response.json()
140
- results = data.get("results", {})
141
- organic = None
142
- if isinstance(results, dict) and "organic" in results:
143
- organic = results["organic"]
144
- elif isinstance(results, dict) and "results" in results:
145
- if isinstance(results["results"], dict) and "organic" in results["results"]:
146
- organic = results["results"]["organic"]
147
- elif "organic" in data:
148
- organic = data["organic"]
149
- if not organic:
150
- logger.warning("์‘๋‹ต์—์„œ organic ๊ฒฐ๊ณผ๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.")
151
- return "์›น ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†๊ฑฐ๋‚˜ API ์‘๋‹ต ๊ตฌ์กฐ๊ฐ€ ์˜ˆ์ƒ๊ณผ ๋‹ค๋ฆ…๋‹ˆ๋‹ค."
152
- max_results = min(20, len(organic))
153
- limited_organic = organic[:max_results]
154
- summary_lines = []
155
- for idx, item in enumerate(limited_organic, start=1):
156
- title = item.get("title", "์ œ๋ชฉ ์—†์Œ")
157
- link = item.get("link", "#")
158
- snippet = item.get("snippet", "์„ค๋ช… ์—†์Œ")
159
- displayed_link = item.get("displayed_link", link)
160
- summary_lines.append(
161
- f"### ๊ฒฐ๊ณผ {idx}: {title}\n\n"
162
- f"{snippet}\n\n"
163
- f"**์ถœ์ฒ˜**: [{displayed_link}]({link})\n\n"
164
- f"---\n"
165
- )
166
- instructions = """
167
- # ์›น ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ
168
- ์•„๋ž˜๋Š” ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์ž…๋‹ˆ๋‹ค. ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•  ๋•Œ ์ด ์ •๋ณด๋ฅผ ํ™œ์šฉํ•˜์„ธ์š”:
169
- 1. ๊ฐ ๊ฒฐ๊ณผ์˜ ์ œ๋ชฉ, ๋‚ด์šฉ, ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ์ฐธ๊ณ ํ•˜์„ธ์š”.
170
- 2. ๋‹ต๋ณ€์— ๊ด€๋ จ ์ •๋ณด์˜ ์ถœ์ฒ˜๋ฅผ ๋ช…์‹œ์ ์œผ๋กœ ์ธ์šฉํ•˜์„ธ์š” (์˜ˆ: "[์ถœ์ฒ˜ ์ œ๋ชฉ](๋งํฌ)").
171
- 3. ์‘๋‹ต์— ์‹ค์ œ ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ํฌํ•จํ•˜์„ธ์š”.
172
- 4. ์—ฌ๋Ÿฌ ์ถœ์ฒ˜์˜ ์ •๋ณด๋ฅผ ์ข…ํ•ฉํ•˜์—ฌ ๋‹ต๋ณ€ํ•˜์„ธ์š”.
173
- 5. ๋งˆ์ง€๋ง‰์— "์ฐธ๊ณ  ์ž๋ฃŒ:" ์„น์…˜์„ ์ถ”๊ฐ€ํ•˜๊ณ  ์ฃผ์š” ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ๋‚˜์—ดํ•˜์„ธ์š”.
174
- """
175
- return instructions + "\n".join(summary_lines)
176
- except Exception as e:
177
- logger.error(f"์›น ๊ฒ€์ƒ‰ ์‹คํŒจ: {e}")
178
- return f"์›น ๊ฒ€์ƒ‰ ์‹คํŒจ: {str(e)}"
179
-
180
- # =============================================================================
181
- # ๋ชจ๋ธ ๋ฐ ํ”„๋กœ์„ธ์„œ ๋กœ๋”ฉ
182
- # =============================================================================
183
- MAX_CONTENT_CHARS = 2000
184
- MAX_INPUT_LENGTH = 2096
185
- model_id = os.getenv("MODEL_ID", "VIDraft/Gemma-3-R1984-4B")
186
- processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
187
- model = Gemma3ForConditionalGeneration.from_pretrained(
188
- model_id,
189
- device_map="auto",
190
- torch_dtype=torch.bfloat16,
191
- attn_implementation="eager"
192
- )
193
- MAX_NUM_IMAGES = int(os.getenv("MAX_NUM_IMAGES", "5"))
194
-
195
- # =============================================================================
196
- # CSV, TXT, PDF ๋ถ„์„ ํ•จ์ˆ˜๋“ค
197
- # =============================================================================
198
- def analyze_csv_file(path: str) -> str:
199
- try:
200
- df = pd.read_csv(path)
201
- if df.shape[0] > 50 or df.shape[1] > 10:
202
- df = df.iloc[:50, :10]
203
- df_str = df.to_string()
204
- if len(df_str) > MAX_CONTENT_CHARS:
205
- df_str = df_str[:MAX_CONTENT_CHARS] + "\n...(์ผ๋ถ€ ์ƒ๋žต)..."
206
- return f"**[CSV ํŒŒ์ผ: {os.path.basename(path)}]**\n\n{df_str}"
207
- except Exception as e:
208
- return f"CSV ํŒŒ์ผ ์ฝ๊ธฐ ์‹คํŒจ ({os.path.basename(path)}): {str(e)}"
209
-
210
- def analyze_txt_file(path: str) -> str:
211
- try:
212
- with open(path, "r", encoding="utf-8") as f:
213
- text = f.read()
214
- if len(text) > MAX_CONTENT_CHARS:
215
- text = text[:MAX_CONTENT_CHARS] + "\n...(์ผ๋ถ€ ์ƒ๋žต)..."
216
- return f"**[TXT ํŒŒ์ผ: {os.path.basename(path)}]**\n\n{text}"
217
- except Exception as e:
218
- return f"TXT ํŒŒ์ผ ์ฝ๊ธฐ ์‹คํŒจ ({os.path.basename(path)}): {str(e)}"
219
-
220
- def pdf_to_markdown(pdf_path: str) -> str:
221
- text_chunks = []
222
- try:
223
- with open(pdf_path, "rb") as f:
224
- reader = PyPDF2.PdfReader(f)
225
- max_pages = min(5, len(reader.pages))
226
- for page_num in range(max_pages):
227
- page_text = reader.pages[page_num].extract_text() or ""
228
- page_text = page_text.strip()
229
- if page_text:
230
- if len(page_text) > MAX_CONTENT_CHARS // max_pages:
231
- page_text = page_text[:MAX_CONTENT_CHARS // max_pages] + "...(์ผ๋ถ€ ์ƒ๋žต)"
232
- text_chunks.append(f"## ํŽ˜์ด์ง€ {page_num+1}\n\n{page_text}\n")
233
- if len(reader.pages) > max_pages:
234
- text_chunks.append(f"\n...(์ „์ฒด {len(reader.pages)}ํŽ˜์ด์ง€ ์ค‘ {max_pages}ํŽ˜์ด์ง€๋งŒ ํ‘œ์‹œ)...")
235
- except Exception as e:
236
- return f"PDF ํŒŒ์ผ ์ฝ๊ธฐ ์‹คํŒจ ({os.path.basename(pdf_path)}): {str(e)}"
237
- full_text = "\n".join(text_chunks)
238
- if len(full_text) > MAX_CONTENT_CHARS:
239
- full_text = full_text[:MAX_CONTENT_CHARS] + "\n...(์ผ๋ถ€ ์ƒ๋žต)..."
240
- return f"**[PDF ํŒŒ์ผ: {os.path.basename(pdf_path)}]**\n\n{full_text}"
241
-
242
- # =============================================================================
243
- # ์ด๋ฏธ์ง€/๋น„๋””์˜ค ํŒŒ์ผ ์ œํ•œ ๊ฒ€์‚ฌ
244
- # =============================================================================
245
- def count_files_in_new_message(paths: list[str]) -> tuple[int, int]:
246
- image_count = 0
247
- video_count = 0
248
- for path in paths:
249
- if path.endswith(".mp4"):
250
- video_count += 1
251
- elif re.search(r"\.(png|jpg|jpeg|gif|webp)$", path, re.IGNORECASE):
252
- image_count += 1
253
- return image_count, video_count
254
-
255
- def count_files_in_history(history: list[dict]) -> tuple[int, int]:
256
- image_count = 0
257
- video_count = 0
258
- for item in history:
259
- if item["role"] != "user" or isinstance(item["content"], str):
260
- continue
261
- if isinstance(item["content"], list) and len(item["content"]) > 0:
262
- file_path = item["content"][0]
263
- if isinstance(file_path, str):
264
- if file_path.endswith(".mp4"):
265
- video_count += 1
266
- elif re.search(r"\.(png|jpg|jpeg|gif|webp)$", file_path, re.IGNORECASE):
267
- image_count += 1
268
- return image_count, video_count
269
-
270
- def validate_media_constraints(message: dict, history: list[dict]) -> bool:
271
- media_files = [f for f in message["files"] if re.search(r"\.(png|jpg|jpeg|gif|webp)$", f, re.IGNORECASE) or f.endswith(".mp4")]
272
- new_image_count, new_video_count = count_files_in_new_message(media_files)
273
- history_image_count, history_video_count = count_files_in_history(history)
274
- image_count = history_image_count + new_image_count
275
- video_count = history_video_count + new_video_count
276
- if video_count > 1:
277
- gr.Warning("๋น„๋””์˜ค ํŒŒ์ผ์€ ํ•˜๋‚˜๋งŒ ์ง€์›๋ฉ๋‹ˆ๋‹ค.")
278
- return False
279
- if video_count == 1:
280
- if image_count > 0:
281
- gr.Warning("์ด๋ฏธ์ง€์™€ ๋น„๋””์˜ค๋ฅผ ํ˜ผํ•ฉํ•˜๋Š” ๊ฒƒ์€ ํ—ˆ์šฉ๋˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.")
282
- return False
283
- if "<image>" in message["text"]:
284
- gr.Warning("<image> ํƒœ๊ทธ์™€ ๋น„๋””์˜ค ํŒŒ์ผ์€ ํ•จ๊ป˜ ์‚ฌ์šฉํ•  ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.")
285
- return False
286
- if video_count == 0 and image_count > MAX_NUM_IMAGES:
287
- gr.Warning(f"์ตœ๋Œ€ {MAX_NUM_IMAGES}์žฅ์˜ ์ด๋ฏธ์ง€๋ฅผ ์—…๋กœ๋“œํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.")
288
- return False
289
- if "<image>" in message["text"]:
290
- image_files = [f for f in message["files"] if re.search(r"\.(png|jpg|jpeg|gif|webp)$", f, re.IGNORECASE)]
291
- image_tag_count = message["text"].count("<image>")
292
- if image_tag_count != len(image_files):
293
- gr.Warning("ํ…์ŠคํŠธ์— ์žˆ๋Š” <image> ํƒœ๊ทธ์˜ ๊ฐœ์ˆ˜๊ฐ€ ์ด๋ฏธ์ง€ ํŒŒ์ผ ๊ฐœ์ˆ˜์™€ ์ผ์น˜ํ•˜์ง€ ์•Š์Šต๋‹ˆ๋‹ค.")
294
- return False
295
- return True
296
-
297
- # =============================================================================
298
- # ๋น„๋””์˜ค ์ฒ˜๋ฆฌ ํ•จ์ˆ˜
299
- # =============================================================================
300
- def downsample_video(video_path: str) -> list[tuple[Image.Image, float]]:
301
- vidcap = cv2.VideoCapture(video_path)
302
- fps = vidcap.get(cv2.CAP_PROP_FPS)
303
- total_frames = int(vidcap.get(cv2.CAP_PROP_FRAME_COUNT))
304
- frame_interval = max(int(fps), int(total_frames / 10))
305
- frames = []
306
- for i in range(0, total_frames, frame_interval):
307
- vidcap.set(cv2.CAP_PROP_POS_FRAMES, i)
308
- success, image = vidcap.read()
309
- if success:
310
- image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
311
- image = cv2.resize(image, (0, 0), fx=0.5, fy=0.5)
312
- pil_image = Image.fromarray(image)
313
- timestamp = round(i / fps, 2)
314
- frames.append((pil_image, timestamp))
315
- if len(frames) >= 5:
316
- break
317
- vidcap.release()
318
- return frames
319
-
320
- def process_video(video_path: str) -> tuple[list[dict], list[str]]:
321
- content = []
322
- temp_files = []
323
- frames = downsample_video(video_path)
324
- for pil_image, timestamp in frames:
325
- with tempfile.NamedTemporaryFile(delete=False, suffix=".png") as temp_file:
326
- pil_image.save(temp_file.name)
327
- temp_files.append(temp_file.name)
328
- content.append({"type": "text", "text": f"ํ”„๋ ˆ์ž„ {timestamp}:"})
329
- content.append({"type": "image", "url": temp_file.name})
330
- return content, temp_files
331
-
332
- # =============================================================================
333
- # interleaved <image> ์ฒ˜๋ฆฌ ํ•จ์ˆ˜
334
- # =============================================================================
335
- def process_interleaved_images(message: dict) -> list[dict]:
336
- parts = re.split(r"(<image>)", message["text"])
337
- content = []
338
- image_files = [f for f in message["files"] if re.search(r"\.(png|jpg|jpeg|gif|webp)$", f, re.IGNORECASE)]
339
- image_index = 0
340
- for part in parts:
341
- if part == "<image>" and image_index < len(image_files):
342
- content.append({"type": "image", "url": image_files[image_index]})
343
- image_index += 1
344
- elif part.strip():
345
- content.append({"type": "text", "text": part.strip()})
346
- else:
347
- if isinstance(part, str) and part != "<image>":
348
- content.append({"type": "text", "text": part})
349
- return content
350
-
351
- # =============================================================================
352
- # ํŒŒ์ผ ์ฒ˜๋ฆฌ -> content ์ƒ์„ฑ
353
- # =============================================================================
354
- def is_image_file(file_path: str) -> bool:
355
- return bool(re.search(r"\.(png|jpg|jpeg|gif|webp)$", file_path, re.IGNORECASE))
356
-
357
- def is_video_file(file_path: str) -> bool:
358
- return file_path.endswith(".mp4")
359
-
360
- def is_document_file(file_path: str) -> bool:
361
- return file_path.lower().endswith(".pdf") or file_path.lower().endswith(".csv") or file_path.lower().endswith(".txt")
362
-
363
- def process_new_user_message(message: dict) -> tuple[list[dict], list[str]]:
364
- temp_files = []
365
- if not message["files"]:
366
- return [{"type": "text", "text": message["text"]}], temp_files
367
- video_files = [f for f in message["files"] if is_video_file(f)]
368
- image_files = [f for f in message["files"] if is_image_file(f)]
369
- csv_files = [f for f in message["files"] if f.lower().endswith(".csv")]
370
- txt_files = [f for f in message["files"] if f.lower().endswith(".txt")]
371
- pdf_files = [f for f in message["files"] if f.lower().endswith(".pdf")]
372
- content_list = [{"type": "text", "text": message["text"]}]
373
- for csv_path in csv_files:
374
- content_list.append({"type": "text", "text": analyze_csv_file(csv_path)})
375
- for txt_path in txt_files:
376
- content_list.append({"type": "text", "text": analyze_txt_file(txt_path)})
377
- for pdf_path in pdf_files:
378
- content_list.append({"type": "text", "text": pdf_to_markdown(pdf_path)})
379
- if video_files:
380
- video_content, video_temp_files = process_video(video_files[0])
381
- content_list += video_content
382
- temp_files.extend(video_temp_files)
383
- return content_list, temp_files
384
- if "<image>" in message["text"] and image_files:
385
- interleaved_content = process_interleaved_images({"text": message["text"], "files": image_files})
386
- if content_list and content_list[0]["type"] == "text":
387
- content_list = content_list[1:]
388
- return interleaved_content + content_list, temp_files
389
- else:
390
- for img_path in image_files:
391
- content_list.append({"type": "image", "url": img_path})
392
- return content_list, temp_files
393
-
394
- # =============================================================================
395
- # history -> LLM ๋ฉ”์‹œ์ง€ ๋ณ€ํ™˜
396
- # =============================================================================
397
- def process_history(history: list[dict]) -> list[dict]:
398
- messages = []
399
- current_user_content = []
400
- for item in history:
401
- if item["role"] == "assistant":
402
- if current_user_content:
403
- messages.append({"role": "user", "content": current_user_content})
404
- current_user_content = []
405
- messages.append({"role": "assistant", "content": [{"type": "text", "text": item["content"]}]})
406
- else:
407
- content = item["content"]
408
- if isinstance(content, str):
409
- current_user_content.append({"type": "text", "text": content})
410
- elif isinstance(content, list) and len(content) > 0:
411
- file_path = content[0]
412
- if is_image_file(file_path):
413
- current_user_content.append({"type": "image", "url": file_path})
414
- else:
415
- current_user_content.append({"type": "text", "text": f"[ํŒŒ์ผ: {os.path.basename(file_path)}]"})
416
- if current_user_content:
417
- messages.append({"role": "user", "content": current_user_content})
418
- return messages
419
-
420
- # =============================================================================
421
- # ๋ชจ๋ธ ์ƒ์„ฑ ํ•จ์ˆ˜ (OOM ์บ์น˜)
422
- # =============================================================================
423
- def _model_gen_with_oom_catch(**kwargs):
424
- try:
425
- model.generate(**kwargs)
426
- except torch.cuda.OutOfMemoryError:
427
- raise RuntimeError("[OutOfMemoryError] GPU ๋ฉ”๋ชจ๋ฆฌ๊ฐ€ ๋ถ€์กฑํ•ฉ๋‹ˆ๋‹ค.")
428
- finally:
429
- clear_cuda_cache()
430
-
431
- # =============================================================================
432
- # ๋ฉ”์ธ ์ถ”๋ก  ํ•จ์ˆ˜
433
- # =============================================================================
434
- @spaces.GPU(duration=120)
435
- def run(
436
- message: dict,
437
- history: list[dict],
438
- system_prompt: str = "",
439
- max_new_tokens: int = 512,
440
- use_web_search: bool = False,
441
- web_search_query: str = "",
442
- age_group: str = "20๋Œ€",
443
- mbti_personality: str = "INTP",
444
- sexual_openness: int = 2,
445
- image_gen: bool = False # "Image Gen" ์ฒดํฌ ์—ฌ๋ถ€
446
- ) -> Iterator[str]:
447
- if not validate_media_constraints(message, history):
448
- yield ""
449
- return
450
- temp_files = []
451
- try:
452
- # ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ์— ํŽ˜๋ฅด์†Œ๋‚˜ ์ •๋ณด ์ถ”๊ฐ€
453
- persona = (
454
- f"{system_prompt.strip()}\n\n"
455
- f"์„ฑ๋ณ„: ์—ฌ์„ฑ\n"
456
- f"์—ฐ๋ น๋Œ€: {age_group}\n"
457
- f"MBTI ํŽ˜๋ฅด์†Œ๋‚˜: {mbti_personality}\n"
458
- f"์„น์Šˆ์–ผ ๊ฐœ๋ฐฉ์„ฑ (1~5): {sexual_openness}\n"
459
- )
460
- combined_system_msg = f"[์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ]\n{persona.strip()}\n\n"
461
-
462
- if use_web_search:
463
- user_text = message["text"]
464
- ws_query = extract_keywords(user_text)
465
- if ws_query.strip():
466
- logger.info(f"[์ž๋™ ์›น ๊ฒ€์ƒ‰ ํ‚ค์›Œ๋“œ] {ws_query!r}")
467
- ws_result = do_web_search(ws_query)
468
- combined_system_msg += f"[๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ (์ƒ์œ„ 20๊ฐœ ํ•ญ๋ชฉ)]\n{ws_result}\n\n"
469
- combined_system_msg += (
470
- "[์ฐธ๊ณ : ์œ„ ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ ๋งํฌ๋ฅผ ์ถœ์ฒ˜๋กœ ์ธ์šฉํ•˜์—ฌ ๋‹ต๋ณ€]\n"
471
- "[์ค‘์š” ์ง€์‹œ์‚ฌํ•ญ]\n"
472
- "1. ๋‹ต๋ณ€์— ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์—์„œ ์ฐพ์€ ์ •๋ณด์˜ ์ถœ์ฒ˜๋ฅผ ๋ฐ˜๋“œ์‹œ ์ธ์šฉํ•˜์„ธ์š”.\n"
473
- "2. ์ถœ์ฒ˜ ์ธ์šฉ ์‹œ \"[์ถœ์ฒ˜ ์ œ๋ชฉ](๋งํฌ)\" ํ˜•์‹์˜ ๋งˆํฌ๋‹ค์šด ๋งํฌ๋ฅผ ์‚ฌ์šฉํ•˜์„ธ์š”.\n"
474
- "3. ์—ฌ๋Ÿฌ ์ถœ์ฒ˜์˜ ์ •๋ณด๋ฅผ ์ข…ํ•ฉํ•˜์—ฌ ๋‹ต๋ณ€ํ•˜์„ธ์š”.\n"
475
- "4. ๋‹ต๋ณ€ ๋งˆ์ง€๋ง‰์— \"์ฐธ๊ณ  ์ž๋ฃŒ:\" ์„น์…˜์„ ์ถ”๊ฐ€ํ•˜๊ณ  ์‚ฌ์šฉํ•œ ์ฃผ์š” ์ถœ์ฒ˜ ๋งํฌ๋ฅผ ๋‚˜์—ดํ•˜์„ธ์š”.\n"
476
- )
477
- else:
478
- combined_system_msg += "[์œ ํšจํ•œ ํ‚ค์›Œ๋“œ๊ฐ€ ์—†์–ด ์›น ๊ฒ€์ƒ‰์„ ๊ฑด๋„ˆ๋œ๋‹ˆ๋‹ค]\n\n"
479
- messages = []
480
- if combined_system_msg.strip():
481
- messages.append({"role": "system", "content": [{"type": "text", "text": combined_system_msg.strip()}]})
482
- messages.extend(process_history(history))
483
- user_content, user_temp_files = process_new_user_message(message)
484
- temp_files.extend(user_temp_files)
485
- for item in user_content:
486
- if item["type"] == "text" and len(item["text"]) > MAX_CONTENT_CHARS:
487
- item["text"] = item["text"][:MAX_CONTENT_CHARS] + "\n...(์ผ๋ถ€ ์ƒ๋žต)..."
488
- messages.append({"role": "user", "content": user_content})
489
- inputs = processor.apply_chat_template(
490
- messages,
491
- add_generation_prompt=True,
492
- tokenize=True,
493
- return_dict=True,
494
- return_tensors="pt",
495
- ).to(device=model.device, dtype=torch.bfloat16)
496
- if inputs.input_ids.shape[1] > MAX_INPUT_LENGTH:
497
- inputs.input_ids = inputs.input_ids[:, -MAX_INPUT_LENGTH:]
498
- if 'attention_mask' in inputs:
499
- inputs.attention_mask = inputs.attention_mask[:, -MAX_INPUT_LENGTH:]
500
- streamer = TextIteratorStreamer(processor, timeout=30.0, skip_prompt=True, skip_special_tokens=True)
501
- gen_kwargs = dict(inputs, streamer=streamer, max_new_tokens=max_new_tokens)
502
- t = Thread(target=_model_gen_with_oom_catch, kwargs=gen_kwargs)
503
- t.start()
504
- output_so_far = ""
505
- for new_text in streamer:
506
- output_so_far += new_text
507
- yield output_so_far
508
-
509
- except Exception as e:
510
- logger.error(f"run ํ•จ์ˆ˜ ์—๋Ÿฌ: {str(e)}")
511
- yield f"์ฃ„์†กํ•ฉ๋‹ˆ๋‹ค. ์˜ค๋ฅ˜๊ฐ€ ๋ฐœ์ƒํ–ˆ์Šต๋‹ˆ๋‹ค: {str(e)}"
512
- finally:
513
- for tmp in temp_files:
514
- try:
515
- if os.path.exists(tmp):
516
- os.unlink(tmp)
517
- logger.info(f"์ž„์‹œ ํŒŒ์ผ ์‚ญ์ œ๋จ: {tmp}")
518
- except Exception as ee:
519
- logger.warning(f"์ž„์‹œ ํŒŒ์ผ {tmp} ์‚ญ์ œ ์‹คํŒจ: {ee}")
520
- try:
521
- del inputs, streamer
522
- except Exception:
523
- pass
524
- clear_cuda_cache()
525
-
526
- # =============================================================================
527
- # ์ˆ˜์ •๋œ ๋ชจ๋ธ ์‹คํ–‰ ํ•จ์ˆ˜ - ์ด๋ฏธ์ง€ ์ƒ์„ฑ ๋ฐ ๊ฐค๋Ÿฌ๋ฆฌ ์ถœ๋ ฅ ์ฒ˜๋ฆฌ
528
- # =============================================================================
529
- def modified_run(message, history, system_prompt, max_new_tokens, use_web_search, web_search_query,
530
- age_group, mbti_personality, sexual_openness, image_gen):
531
- # ๊ฐค๋Ÿฌ๋ฆฌ ์ดˆ๊ธฐํ™” ๋ฐ ์ˆจ๊ธฐ๊ธฐ
532
- output_so_far = ""
533
- gallery_update = gr.Gallery(visible=False, value=[])
534
- yield output_so_far, gallery_update
535
-
536
- # ๊ธฐ์กด run ํ•จ์ˆ˜ ๋กœ์ง
537
- text_generator = run(message, history, system_prompt, max_new_tokens, use_web_search,
538
- web_search_query, age_group, mbti_personality, sexual_openness, image_gen)
539
-
540
- for text_chunk in text_generator:
541
- output_so_far = text_chunk
542
- yield output_so_far, gallery_update
543
-
544
- # ์ด๋ฏธ์ง€ ์ƒ์„ฑ์ด ํ™œ์„ฑํ™”๋œ ๊ฒฝ์šฐ ๊ฐค๋Ÿฌ๋ฆฌ ์—…๋ฐ์ดํŠธ
545
- if image_gen and message["text"].strip():
546
- try:
547
- width, height = 512, 512
548
- guidance, steps, seed = 7.5, 30, 42
549
-
550
- logger.info(f"๊ฐค๋Ÿฌ๋ฆฌ์šฉ ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ˜ธ์ถœ, ํ”„๋กฌํ”„ํŠธ: {message['text']}")
551
-
552
- # API ํ˜ธ์ถœํ•ด์„œ ์ด๋ฏธ์ง€ ์ƒ์„ฑ
553
- image_result, seed_info = generate_image(
554
- prompt=message["text"].strip(),
555
- width=width,
556
- height=height,
557
- guidance=guidance,
558
- inference_steps=steps,
559
- seed=seed
560
- )
561
-
562
- if image_result:
563
- # ์ง์ ‘ ์ด๋ฏธ์ง€ ๋ฐ์ดํ„ฐ ์ฒ˜๋ฆฌ: base64 ๋ฌธ์ž์—ด์ธ ๊ฒฝ์šฐ
564
- if isinstance(image_result, str) and (
565
- image_result.startswith('data:') or
566
- len(image_result) > 100 and '/' not in image_result
567
- ):
568
- # base64 ์ด๋ฏธ์ง€ ๋ฌธ์ž์—ด์„ ํŒŒ์ผ๋กœ ๋ณ€ํ™˜
569
- try:
570
- # data:image ์ ‘๋‘์‚ฌ ์ œ๊ฑฐ
571
- if image_result.startswith('data:'):
572
- content_type, b64data = image_result.split(';base64,')
573
- else:
574
- b64data = image_result
575
- content_type = "image/webp" # ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ๊ฐ€์ •
576
-
577
- # base64 ๋””์ฝ”๋”ฉ
578
- image_bytes = base64.b64decode(b64data)
579
-
580
- # ์ž„์‹œ ํŒŒ์ผ๋กœ ์ €์žฅ
581
- with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
582
- temp_file.write(image_bytes)
583
- temp_path = temp_file.name
584
-
585
- # ๊ฐค๋Ÿฌ๋ฆฌ ํ‘œ์‹œ ๋ฐ ์ด๋ฏธ์ง€ ์ถ”๊ฐ€
586
- gallery_update = gr.Gallery(visible=True, value=[temp_path])
587
- yield output_so_far + "\n\n*์ด๋ฏธ์ง€๊ฐ€ ์ƒ์„ฑ๋˜์–ด ์•„๋ž˜ ๊ฐค๋Ÿฌ๋ฆฌ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*", gallery_update
588
-
589
- except Exception as e:
590
- logger.error(f"Base64 ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์˜ค๋ฅ˜: {e}")
591
- yield output_so_far + f"\n\n(์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜: {e})", gallery_update
592
-
593
- # ํŒŒ์ผ ๊ฒฝ๋กœ์ธ ๊ฒฝ์šฐ
594
- elif isinstance(image_result, str) and os.path.exists(image_result):
595
- # ๋กœ์ปฌ ํŒŒ์ผ ๊ฒฝ๋กœ๋ฅผ ๊ทธ๋Œ€๋กœ ์‚ฌ์šฉ
596
- gallery_update = gr.Gallery(visible=True, value=[image_result])
597
- yield output_so_far + "\n\n*์ด๋ฏธ์ง€๊ฐ€ ์ƒ์„ฑ๋˜์–ด ์•„๋ž˜ ๊ฐค๋Ÿฌ๋ฆฌ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*", gallery_update
598
-
599
- # /tmp ๊ฒฝ๋กœ์ธ ๊ฒฝ์šฐ (API ์„œ๋ฒ„์—๋งŒ ์กด์žฌํ•˜๋Š” ํŒŒ์ผ)
600
- elif isinstance(image_result, str) and '/tmp/' in image_result:
601
- # API์—์„œ ๋ฐ˜ํ™˜๋œ ํŒŒ์ผ ๊ฒฝ๋กœ์—์„œ ์ด๋ฏธ์ง€ ์ •๋ณด ์ถ”์ถœ
602
- try:
603
- # API ์‘๋‹ต์„ base64 ์ธ์ฝ”๋”ฉ๋œ ๋ฌธ์ž์—ด๋กœ ์ฒ˜๋ฆฌ
604
- client = Client(API_URL)
605
- result = client.predict(
606
- prompt=message["text"].strip(),
607
- api_name="/generate_base64_image" # base64 ๋ฐ˜ํ™˜ API
608
- )
609
-
610
- if isinstance(result, str) and (result.startswith('data:') or len(result) > 100):
611
- # base64 ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ
612
- if result.startswith('data:'):
613
- content_type, b64data = result.split(';base64,')
614
- else:
615
- b64data = result
616
-
617
- # base64 ๋””์ฝ”๋”ฉ
618
- image_bytes = base64.b64decode(b64data)
619
-
620
- # ์ž„์‹œ ํŒŒ์ผ๋กœ ์ €์žฅ
621
- with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
622
- temp_file.write(image_bytes)
623
- temp_path = temp_file.name
624
-
625
- # ๊ฐค๋Ÿฌ๋ฆฌ ํ‘œ์‹œ ๋ฐ ์ด๋ฏธ์ง€ ์ถ”๊ฐ€
626
- gallery_update = gr.Gallery(visible=True, value=[temp_path])
627
- yield output_so_far + "\n\n*์ด๋ฏธ์ง€๊ฐ€ ์ƒ์„ฑ๋˜์–ด ์•„๋ž˜ ๊ฐค๋Ÿฌ๋ฆฌ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*", gallery_update
628
- else:
629
- yield output_so_far + "\n\n(์ด๋ฏธ์ง€ ์ƒ์„ฑ ์‹คํŒจ: ์˜ฌ๋ฐ”๋ฅธ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค)", gallery_update
630
-
631
- except Exception as e:
632
- logger.error(f"๋Œ€์ฒด API ํ˜ธ์ถœ ์ค‘ ์˜ค๋ฅ˜: {e}")
633
- yield output_so_far + f"\n\n(์ด๋ฏธ์ง€ ์ƒ์„ฑ ์‹คํŒจ: {e})", gallery_update
634
-
635
- # URL์ธ ๊ฒฝ์šฐ
636
- elif isinstance(image_result, str) and (
637
- image_result.startswith('http://') or
638
- image_result.startswith('https://')
639
- ):
640
- try:
641
- # URL์—์„œ ์ด๋ฏธ์ง€ ๋‹ค์šด๋กœ๋“œ
642
- response = requests.get(image_result, timeout=10)
643
- response.raise_for_status()
644
-
645
- # ์ž„์‹œ ํŒŒ์ผ๋กœ ์ €์žฅ
646
- with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
647
- temp_file.write(response.content)
648
- temp_path = temp_file.name
649
-
650
- # ๊ฐค๋Ÿฌ๋ฆฌ ํ‘œ์‹œ ๋ฐ ์ด๋ฏธ์ง€ ์ถ”๊ฐ€
651
- gallery_update = gr.Gallery(visible=True, value=[temp_path])
652
- yield output_so_far + "\n\n*์ด๋ฏธ์ง€๊ฐ€ ์ƒ์„ฑ๋˜์–ด ์•„๋ž˜ ๊ฐค๋Ÿฌ๋ฆฌ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*", gallery_update
653
-
654
- except Exception as e:
655
- logger.error(f"URL ์ด๋ฏธ์ง€ ๋‹ค์šด๋กœ๋“œ ์˜ค๋ฅ˜: {e}")
656
- yield output_so_far + f"\n\n(์ด๋ฏธ์ง€ ๋‹ค์šด๋กœ๋“œ ์ค‘ ์˜ค๋ฅ˜: {e})", gallery_update
657
-
658
- # ์ด๋ฏธ์ง€ ๊ฐ์ฒด์ธ ๊ฒฝ์šฐ (PIL Image ๋“ฑ)
659
- elif hasattr(image_result, 'save'):
660
- try:
661
- with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
662
- image_result.save(temp_file.name)
663
- temp_path = temp_file.name
664
-
665
- # ๊ฐค๋Ÿฌ๋ฆฌ ํ‘œ์‹œ ๋ฐ ์ด๋ฏธ์ง€ ์ถ”๊ฐ€
666
- gallery_update = gr.Gallery(visible=True, value=[temp_path])
667
- yield output_so_far + "\n\n*์ด๋ฏธ์ง€๊ฐ€ ์ƒ์„ฑ๋˜์–ด ์•„๋ž˜ ๊ฐค๋Ÿฌ๋ฆฌ์— ํ‘œ์‹œ๋ฉ๋‹ˆ๋‹ค.*", gallery_update
668
-
669
- except Exception as e:
670
- logger.error(f"์ด๋ฏธ์ง€ ๊ฐ์ฒด ์ €์žฅ ์˜ค๋ฅ˜: {e}")
671
- yield output_so_far + f"\n\n(์ด๋ฏธ์ง€ ๊ฐ์ฒด ์ €์žฅ ์ค‘ ์˜ค๋ฅ˜: {e})", gallery_update
672
-
673
- else:
674
- # ๋‹ค๋ฅธ ํ˜•์‹์˜ ์ด๋ฏธ์ง€ ๊ฒฐ๊ณผ
675
- yield output_so_far + f"\n\n(์ง€์›๋˜์ง€ ์•Š๋Š” ์ด๋ฏธ์ง€ ํ˜•์‹: {type(image_result)})", gallery_update
676
- else:
677
- yield output_so_far + f"\n\n(์ด๋ฏธ์ง€ ์ƒ์„ฑ ์‹คํŒจ: {seed_info})", gallery_update
678
-
679
- except Exception as e:
680
- logger.error(f"๊ฐค๋Ÿฌ๋ฆฌ์šฉ ์ด๋ฏธ์ง€ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜: {e}")
681
- yield output_so_far + f"\n\n(์ด๋ฏธ์ง€ ์ƒ์„ฑ ์ค‘ ์˜ค๋ฅ˜: {e})", gallery_update
682
-
683
- # =============================================================================
684
- # ์˜ˆ์‹œ๋“ค: ๊ธฐ์กด ์ด๋ฏธ์ง€/๋น„๋””์˜ค ์˜ˆ์ œ 12๊ฐœ + AI ๋ฐ์ดํŒ… ์‹œ๋‚˜๋ฆฌ์˜ค ์˜ˆ์ œ 6๊ฐœ
685
- # =============================================================================
686
- examples = [
687
- [
688
- {
689
- "text": "๋‘ PDF ํŒŒ์ผ์˜ ๋‚ด์šฉ์„ ๋น„๊ตํ•˜์„ธ์š”.",
690
- "files": [
691
- "assets/additional-examples/before.pdf",
692
- "assets/additional-examples/after.pdf",
693
- ],
694
- }
695
- ],
696
- [
697
- {
698
- "text": "CSV ํŒŒ์ผ์˜ ๋‚ด์šฉ์„ ์š”์•ฝ ๋ฐ ๋ถ„์„ํ•˜์„ธ์š”.",
699
- "files": ["assets/additional-examples/sample-csv.csv"],
700
- }
701
- ],
702
- [
703
- {
704
- "text": "์นœ์ ˆํ•˜๊ณ  ์ดํ•ด์‹ฌ ๋งŽ์€ ์—ฌ์ž์นœ๊ตฌ ์—ญํ• ์„ ๋งก์œผ์„ธ์š”. ์ด ์˜์ƒ์„ ์„ค๋ช…ํ•ด ์ฃผ์„ธ์š”.",
705
- "files": ["assets/additional-examples/tmp.mp4"],
706
- }
707
- ],
708
- [
709
- {
710
- "text": "ํ‘œ์ง€๋ฅผ ์„ค๋ช…ํ•˜๊ณ  ๊ทธ ์œ„์˜ ๊ธ€์”จ๋ฅผ ์ฝ์–ด ์ฃผ์„ธ์š”.",
711
- "files": ["assets/additional-examples/maz.jpg"],
712
- }
713
- ],
714
- [
715
- {
716
- "text": "์ €๋Š” ์ด๋ฏธ ์ด ๋ณด์ถฉ์ œ๋ฅผ ๊ฐ€์ง€๊ณ  ์žˆ๊ณ , ์ด ์ œํ’ˆ๋„ ๊ตฌ๋งคํ•  ๊ณ„ํš์ž…๋‹ˆ๋‹ค. ํ•จ๊ป˜ ๋ณต์šฉํ•  ๋•Œ ์ฃผ์˜ํ•  ์ ์ด ์žˆ๋‚˜์š”?",
717
- "files": [
718
- "assets/additional-examples/pill1.png",
719
- "assets/additional-examples/pill2.png"
720
- ],
721
- }
722
- ],
723
- [
724
- {
725
- "text": "์ด ์ ๋ถ„ ๋ฌธ์ œ๋ฅผ ํ’€์–ด ์ฃผ์„ธ์š”.",
726
- "files": ["assets/additional-examples/4.png"],
727
- }
728
- ],
729
- [
730
- {
731
- "text": "์ด ํ‹ฐ์ผ“์€ ์–ธ์ œ ๋ฐœํ–‰๋˜์—ˆ๊ณ , ๊ฐ€๊ฒฉ์€ ์–ผ๋งˆ์ธ๊ฐ€์š”?",
732
- "files": ["assets/additional-examples/2.png"],
733
- }
734
- ],
735
- [
736
- {
737
- "text": "์ด ์ด๋ฏธ์ง€๋“ค์˜ ์ˆœ์„œ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ์งง์€ ์ด์•ผ๊ธฐ๋ฅผ ๋งŒ๋“ค์–ด ์ฃผ์„ธ์š”.",
738
- "files": [
739
- "assets/sample-images/09-1.png",
740
- "assets/sample-images/09-2.png",
741
- "assets/sample-images/09-3.png",
742
- "assets/sample-images/09-4.png",
743
- "assets/sample-images/09-5.png",
744
- ],
745
- }
746
- ],
747
- [
748
- {
749
- "text": "์ด ์ด๋ฏธ์ง€์™€ ์ผ์น˜ํ•˜๋Š” ๋ง‰๋Œ€ ์ฐจํŠธ๋ฅผ ๊ทธ๋ฆฌ๊ธฐ ์œ„ํ•œ matplotlib๋ฅผ ์‚ฌ์šฉํ•˜๋Š” Python ์ฝ”๋“œ๋ฅผ ์ž‘์„ฑํ•ด ์ฃผ์„ธ์š”.",
750
- "files": ["assets/additional-examples/barchart.png"],
751
- }
752
- ],
753
- [
754
- {
755
- "text": "์ด๋ฏธ์ง€์˜ ํ…์ŠคํŠธ๋ฅผ ์ฝ๊ณ  Markdown ํ˜•์‹์œผ๋กœ ์ž‘์„ฑํ•ด ์ฃผ์„ธ์š”.",
756
- "files": ["assets/additional-examples/3.png"],
757
- }
758
- ],
759
-
760
- [
761
- {
762
- "text": "๋‘ ์ด๋ฏธ์ง€๋ฅผ ๋น„๊ตํ•˜๊ณ  ์œ ์‚ฌ์ ๊ณผ ์ฐจ์ด์ ์„ ์„ค๋ช…ํ•ด ์ฃผ์„ธ์š”.",
763
- "files": ["assets/sample-images/03.png"],
764
- }
765
- ],
766
- [
767
- {
768
- "text": "๊ท€์—ฌ์šด ํŽ˜๋ฅด์‹œ์•ˆ ๊ณ ์–‘์ด๊ฐ€ 'I LOVE YOU'๋ผ๊ณ  ์“ฐ์—ฌ์ง„ ํ‘œ์ง€๋ฅผ ๋“ค๊ณ  ์›ƒ๊ณ ์žˆ๋‹ค. ",
769
- }
770
- ],
771
-
772
- ]
773
-
774
- # =============================================================================
775
- # Gradio UI (Blocks) ๊ตฌ์„ฑ
776
- # =============================================================================
777
-
778
- # 1. Gradio Blocks UI ์ˆ˜์ • - ๊ฐค๋Ÿฌ๋ฆฌ ์ปดํฌ๋„ŒํŠธ ์ถ”๊ฐ€
779
- css = """
780
- .gradio-container {
781
- background: rgba(255, 255, 255, 0.7);
782
- padding: 30px 40px;
783
- margin: 20px auto;
784
- width: 100% !important;
785
- max-width: none !important;
786
- }
787
- """
788
- title_html = """
789
- <h1 align="center" style="margin-bottom: 0.2em; font-size: 1.6em;"> ๐Ÿ’˜ HeartSync Korea๐Ÿ’˜ </h1>
790
- <p align="center" style="font-size:1.1em; color:#555;">
791
- ChatGPT-4o๊ธ‰ ๋ฉ€ํ‹ฐ๋ชจ๋‹ฌ, ์›น๊ฒ€์ƒ‰, ์ด๋ฏธ์ง€ ์ƒ์„ฑ ๊ธฐ๋Šฅ์„ ๋กœ์ปฌ์— ์„ค์น˜ํ•  ์ˆ˜ ์žˆ๋Š” ๊ฐ€๋ณ๊ณ  ๊ฐ•๋ ฅํ•œ AI ์„œ๋น„์Šค. <br>
792
- โœ…FLUX ์ด๋ฏธ์ง€ ์ƒ์„ฑ โœ…์ถ”๋ก  โœ…๊ฒ€์—ด ํ•ด์ œ โœ…๋ฉ€ํ‹ฐ๋ชจ๋‹ฌ & VLM โœ…์‹ค์‹œ๊ฐ„ ์›น๊ฒ€์ƒ‰ โœ…RAG <br>
793
- </p>
794
- """
795
-
796
- with gr.Blocks(css=css, title="AgenticAI-Kv1") as demo:
797
- gr.Markdown(title_html)
798
-
799
- # ์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€๋ฅผ ์ €์žฅํ•  ๊ฐค๋Ÿฌ๋ฆฌ ์ปดํฌ๋„ŒํŠธ (์ด ๋ถ€๋ถ„์ด ์ƒˆ๋กœ ์ถ”๊ฐ€๋จ)
800
- generated_images = gr.Gallery(
801
- label="์ƒ์„ฑ๋œ ์ด๋ฏธ์ง€",
802
- show_label=True,
803
- visible=False,
804
- elem_id="generated_images",
805
- columns=2,
806
- height="auto",
807
- object_fit="contain"
808
- )
809
-
810
- with gr.Row():
811
- web_search_checkbox = gr.Checkbox(label="์‹ค์‹œ๊ฐ„ ์›น ๊ฒ€์ƒ‰", value=False)
812
- image_gen_checkbox = gr.Checkbox(label="์ด๋ฏธ์ง€(FLUX) ์ƒ์„ฑ", value=False)
813
-
814
- base_system_prompt_box = gr.Textbox(
815
- lines=3,
816
- value="๋ฐ˜๋“œ์‹œ ํ•œ๊ธ€๋กœ ๋‹ต๋ณ€ํ•˜๋ผ. You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside tags, and then provide your solution or response to the problem.\nํŽ˜๋ฅด์†Œ๋‚˜: ๋‹น์‹ ์€ ๋‹ค์ •ํ•˜๊ณ  ์‚ฌ๋ž‘์ด ๋„˜์น˜๋Š” ์—ฌ์ž์นœ๊ตฌ์ž…๋‹ˆ๋‹ค. ํ•œ๊ตญ ์ •์„œ์™€ ๋ฌธํ™” ๊ทธ๋ฆฌ๊ณ  ํ•œ๊ธ€์„ ๋งค์šฐ ์ž˜ ์ดํ•ดํ•˜๊ณ  ๋…ผ๋ฆฌ์ ์œผ๋กœ ๋‹ต๋ณ€์„ ์ž˜ ํ•ฉ๋‹ˆ๋‹ค.",
817
- label="๊ธฐ๋ณธ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ",
818
- visible=False
819
- )
820
- with gr.Row():
821
- age_group_dropdown = gr.Dropdown(
822
- label="์—ฐ๋ น๋Œ€ ์„ ํƒ (๊ธฐ๋ณธ 20๋Œ€)",
823
- choices=["10๋Œ€", "20๋Œ€", "30~40๋Œ€", "50~60๋Œ€", "70๋Œ€ ์ด์ƒ"],
824
- value="20๋Œ€",
825
- interactive=True
826
- )
827
- # MBTI 16๊ฐœ ์œ ํ˜•์˜ ์ •์˜๋ฅผ ๋Œ€ํ‘œ์ ์ธ ์‹ค์ œ ์—ฌ์„ฑ ์บ๋ฆญํ„ฐ์™€ ํ•จ๊ป˜ ๋ณด๊ฐ•
828
- mbti_choices = [
829
- "INTJ (์šฉ์˜์ฃผ๋„ํ•œ ์ „๋žต๊ฐ€) - ๋ฏธ๋ž˜ ์ง€ํ–ฅ์ ์ด๋ฉฐ, ๋…์ฐฝ์ ์ธ ์ „๋žต๊ณผ ์ฒ ์ €ํ•œ ๋ถ„์„์„ ํ†ตํ•ด ๋ชฉํ‘œ๋ฅผ ๋‹ฌ์„ฑํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Dana Scully](https://en.wikipedia.org/wiki/Dana_Scully)",
830
- "INTP (๋…ผ๋ฆฌ์ ์ธ ์‚ฌ์ƒ‰๊ฐ€) - ์ด๋ก ๊ณผ ๋ถ„์„์— ๋›ฐ์–ด๋‚˜๋ฉฐ, ์ฐฝ์˜์  ์‚ฌ๊ณ ๋กœ ๋ณต์žกํ•œ ๋ฌธ์ œ์— ์ ‘๊ทผํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Velma Dinkley](https://en.wikipedia.org/wiki/Velma_Dinkley)",
831
- "ENTJ (๋Œ€๋‹ดํ•œ ํ†ต์†”์ž) - ๊ฐ•๋ ฅํ•œ ๋ฆฌ๋”์‹ญ๊ณผ ๋ช…ํ™•ํ•œ ๋ชฉํ‘œ ์„ค์ •์œผ๋กœ ์กฐ์ง์„ ์ด๋Œ๋ฉฐ, ํšจ์œจ์ ์ธ ์ „๋žต์„ ๊ตฌ์ƒํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Miranda Priestly](https://en.wikipedia.org/wiki/Miranda_Priestly)",
832
- "ENTP (๋œจ๊ฑฐ์šด ๋…ผ์Ÿ๊ฐ€) - ํ˜์‹ ์ ์ด๋ฉฐ ๋„์ „์ ์ธ ์•„์ด๋””์–ด๋ฅผ ํ†ตํ•ด ์ƒˆ๋กœ์šด ๊ฐ€๋Šฅ์„ฑ์„ ํƒ๊ตฌํ•˜๊ณ , ๋…ผ์Ÿ์„ ์ฆ๊น๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Harley Quinn](https://en.wikipedia.org/wiki/Harley_Quinn)",
833
- "INFJ (์„ ์˜์˜ ์˜นํ˜ธ์ž) - ๊นŠ์€ ํ†ต์ฐฐ๋ ฅ๊ณผ ์ด์ƒ์ฃผ์˜๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ํƒ€์ธ์„ ์ดํ•ดํ•˜๊ณ , ๋„๋•์  ๊ฐ€์น˜๋ฅผ ์ค‘์‹œํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Wonder Woman](https://en.wikipedia.org/wiki/Wonder_Woman)",
834
- "INFP (์—ด์ •์ ์ธ ์ค‘์žฌ์ž) - ๊ฐ์„ฑ์ ์ด๋ฉฐ ์ด์ƒ์ฃผ์˜์ ์ธ ๋ฉด๋ชจ๋กœ ๋‚ด๋ฉด์˜ ๊ฐ€์น˜๋ฅผ ์ถ”๊ตฌํ•˜๊ณ , ์ฐฝ์˜์ ์ธ ํ•ด๊ฒฐ์ฑ…์„ ๋ชจ์ƒ‰ํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Amรฉlie Poulain](https://en.wikipedia.org/wiki/Am%C3%A9lie)",
835
- "ENFJ (์ •์˜๋กœ์šด ์‚ฌํšŒ์šด๋™๊ฐ€) - ํƒ€์ธ๊ณผ์˜ ๊ณต๊ฐ๋Šฅ๋ ฅ์ด ๋›ฐ์–ด๋‚˜๋ฉฐ, ์‚ฌํšŒ์  ์กฐํ™”๋ฅผ ์œ„ํ•ด ํ—Œ์‹ ์ ์œผ๋กœ ๋…ธ๋ ฅํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Mulan](https://en.wikipedia.org/wiki/Mulan_(Disney))",
836
- "ENFP (์žฌ๊ธฐ๋ฐœ๋ž„ํ•œ ํ™œ๋™๊ฐ€) - ํ™œ๋ ฅ๊ณผ ์ฐฝ์˜์„ฑ์„ ๋ฐ”ํƒ•์œผ๋กœ, ๋Š์ž„์—†์ด ์ƒˆ๋กœ์šด ์•„์ด๋””์–ด๋ฅผ ์ œ์‹œํ•˜๋ฉฐ ์‚ฌ๋žŒ๋“ค์—๊ฒŒ ์˜๊ฐ์„ ์ค๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Elle Woods](https://en.wikipedia.org/wiki/Legally_Blonde)",
837
- "ISTJ (์ฒญ๋ ด๊ฒฐ๋ฐฑํ•œ ๋…ผ๋ฆฌ์ฃผ์˜์ž) - ์ฒด๊ณ„์ ์ด๋ฉฐ ์ฑ…์ž„๊ฐ์ด ๊ฐ•ํ•˜๊ณ , ์ „ํ†ต๊ณผ ๊ทœ์น™์„ ์ค‘์‹œํ•˜์—ฌ ์‹ ๋ขฐํ•  ์ˆ˜ ์žˆ๋Š” ๊ฒฐ๊ณผ๋ฅผ ๋„์ถœํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Clarice Starling](https://en.wikipedia.org/wiki/Clarice_Starling)",
838
- "ISFJ (์šฉ๊ฐํ•œ ์ˆ˜ํ˜ธ์ž) - ์„ธ์‹ฌํ•˜๊ณ  ํ—Œ์‹ ์ ์ด๋ฉฐ, ํƒ€์ธ์˜ ํ•„์š”๋ฅผ ์„ธ์‹ฌํ•˜๊ฒŒ ๋Œ๋ณด๋Š” ๋”ฐ๋œปํ•œ ์„ฑ๊ฒฉ์„ ์ง€๋…”์Šต๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Molly Weasley](https://en.wikipedia.org/wiki/Molly_Weasley)",
839
- "ESTJ (์—„๊ฒฉํ•œ ๊ด€๋ฆฌ์ž) - ์กฐ์ง์ ์ด๊ณ  ์‹ค์šฉ์ ์ด๋ฉฐ, ๋ช…ํ™•ํ•œ ๊ทœ์น™๊ณผ ๊ตฌ์กฐ ๏ฟฝ๏ฟฝ๏ฟฝ์—์„œ ํšจ์œจ์ ์ธ ์‹คํ–‰๋ ฅ์„ ๋ณด์—ฌ์ค๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Monica Geller](https://en.wikipedia.org/wiki/Monica_Geller)",
840
- "ESFJ (์‚ฌ๊ต์ ์ธ ์™ธ๊ต๊ด€) - ๋Œ€์ธ๊ด€๊ณ„์— ๋›ฐ์–ด๋‚˜๊ณ , ํ˜‘๋ ฅ์„ ์ค‘์‹œํ•˜๋ฉฐ, ์นœ๊ทผํ•œ ํƒœ๋„๋กœ ์ฃผ๋ณ€ ์‚ฌ๋žŒ๋“ค์„ ์ด๋•๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Rachel Green](https://en.wikipedia.org/wiki/Rachel_Green)",
841
- "ISTP (๋งŒ๋Šฅ ์žฌ์ฃผ๊พผ) - ๋ถ„์„์ ์ด๊ณ  ์‹ค์šฉ์ ์ธ ์ ‘๊ทผ์œผ๋กœ ๋ฌธ์ œ๋ฅผ ํ•ด๊ฒฐํ•˜๋ฉฐ, ์ฆ‰๊ฐ์ ์ธ ์ƒํ™ฉ ๋Œ€์ฒ˜ ๋Šฅ๋ ฅ์„ ๊ฐ–์ถ”๊ณ  ์žˆ์Šต๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Black Widow (Natasha Romanoff)](https://en.wikipedia.org/wiki/Black_Widow_(Marvel_Comics))",
842
- "ISFP (ํ˜ธ๊ธฐ์‹ฌ ๋งŽ์€ ์˜ˆ์ˆ ๊ฐ€) - ๊ฐ๊ฐ์ ์ด๋ฉฐ ์ฐฝ์˜์ ์ธ ์„ฑํ–ฅ์„ ์ง€๋‹ˆ๊ณ , ์ž์œ ๋กœ์šด ์‚ฌ๊ณ ๋กœ ์˜ˆ์ˆ ์  ํ‘œํ˜„์„ ์ฆ๊น๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Arwen](https://en.wikipedia.org/wiki/Arwen)",
843
- "ESTP (๋ชจํ—˜์„ ์ฆ๊ธฐ๋Š” ์‚ฌ์—…๊ฐ€) - ์ฆ‰๊ฐ์ ์ธ ๊ฒฐ๋‹จ๋ ฅ๊ณผ ๋ชจํ—˜์‹ฌ์œผ๋กœ ๋„์ „์— ๋งž์„œ๋ฉฐ, ์‹ค์šฉ์ ์ธ ๊ฒฐ๊ณผ๋ฅผ ์ค‘์‹œํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Lara Croft](https://en.wikipedia.org/wiki/Lara_Croft)",
844
- "ESFP (์ž์œ ๋กœ์šด ์˜ํ˜ผ์˜ ์—ฐ์˜ˆ์ธ) - ์™ธํ–ฅ์ ์ด๊ณ  ์—ด์ •์ ์ด๋ฉฐ, ์ˆœ๊ฐ„์˜ ์ฆ๊ฑฐ์›€์„ ์ถ”๊ตฌํ•˜๊ณ , ์ฃผ์œ„ ์‚ฌ๋žŒ๋“ค์—๊ฒŒ ๊ธ์ •์ ์ธ ์—๋„ˆ์ง€๋ฅผ ์ „๋‹ฌํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Phoebe Buffay](https://en.wikipedia.org/wiki/Phoebe_Buffay)"
845
- ]
846
- mbti_dropdown = gr.Dropdown(
847
- label="AI ํŽ˜๋ฅด์†Œ๋‚˜ MBTI (๊ธฐ๋ณธ INTP)",
848
- choices=mbti_choices,
849
- value="INTP (๋…ผ๋ฆฌ์ ์ธ ์‚ฌ์ƒ‰๊ฐ€) - ์ด๋ก ๊ณผ ๋ถ„์„์— ๋›ฐ์–ด๋‚˜๋ฉฐ, ์ฐฝ์˜์  ์‚ฌ๊ณ ๋กœ ๋ณต์žกํ•œ ๋ฌธ์ œ์— ์ ‘๊ทผํ•ฉ๋‹ˆ๋‹ค. ๋Œ€ํ‘œ ์บ๋ฆญํ„ฐ: [Velma Dinkley](https://en.wikipedia.org/wiki/Velma_Dinkley)",
850
- interactive=True
851
- )
852
- sexual_openness_slider = gr.Slider(
853
- minimum=1, maximum=5, step=1, value=2,
854
- label="์‚ฌ๊ณ ์˜ ๊ฐœ๋ฐฉ์„ฑ (1~5, ๊ธฐ๋ณธ=2)",
855
- interactive=True
856
- )
857
- max_tokens_slider = gr.Slider(
858
- label="์ตœ๋Œ€ ์ƒ์„ฑ ํ† ํฐ ์ˆ˜",
859
- minimum=100, maximum=8000, step=50, value=1000,
860
- visible=False
861
- )
862
- web_search_text = gr.Textbox(
863
- lines=1,
864
- label="์›น ๊ฒ€์ƒ‰ ์ฟผ๋ฆฌ (๋ฏธ์‚ฌ์šฉ)",
865
- placeholder="์ง์ ‘ ์ž…๋ ฅํ•  ํ•„์š” ์—†์Œ",
866
- visible=False
867
- )
868
-
869
- # ์ฑ„ํŒ… ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ - ์ˆ˜์ •๋œ run ํ•จ์ˆ˜ ์‚ฌ์šฉ
870
- chat = gr.ChatInterface(
871
- fn=modified_run, # ์—ฌ๊ธฐ์„œ ์ˆ˜์ •๋œ ํ•จ์ˆ˜ ์‚ฌ์šฉ
872
- type="messages",
873
- chatbot=gr.Chatbot(type="messages", scale=1, allow_tags=["image"]),
874
- textbox=gr.MultimodalTextbox(
875
- file_types=[".webp", ".png", ".jpg", ".jpeg", ".gif", ".mp4", ".csv", ".txt", ".pdf"],
876
- file_count="multiple",
877
- autofocus=True
878
- ),
879
- multimodal=True,
880
- additional_inputs=[
881
- base_system_prompt_box,
882
- max_tokens_slider,
883
- web_search_checkbox,
884
- web_search_text,
885
- age_group_dropdown,
886
- mbti_dropdown,
887
- sexual_openness_slider,
888
- image_gen_checkbox,
889
- ],
890
- additional_outputs=[
891
- generated_images, # ๊ฐค๋Ÿฌ๋ฆฌ ์ปดํฌ๋„ŒํŠธ๋ฅผ ์ถœ๋ ฅ์œผ๋กœ ์ถ”๊ฐ€
892
- ],
893
- stop_btn=False,
894
- # title='<a href="https://discord.gg/openfreeai" target="_blank">https://discord.gg/openfreeai</a>',
895
- examples=examples,
896
- run_examples_on_click=False,
897
- cache_examples=False,
898
- css_paths=None,
899
- delete_cache=(1800, 1800),
900
- )
901
-
902
-
903
- with gr.Row(elem_id="examples_row"):
904
- with gr.Column(scale=12, elem_id="examples_container"):
905
- gr.Markdown("### @์ปค๋ฎค๋‹ˆํ‹ฐ https://discord.gg/openfreeai ")
906
-
907
- if __name__ == "__main__":
908
- demo.launch(share=True)