openfree commited on
Commit
61279f3
ยท
verified ยท
1 Parent(s): 6ed5f50

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +264 -172
app.py CHANGED
@@ -1,9 +1,10 @@
1
  import gradio as gr
2
  import os
3
- from typing import List, Dict, Any, Optional
4
  import hashlib
5
  from datetime import datetime
6
  import numpy as np
 
7
 
8
  # PDF ์ฒ˜๋ฆฌ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ
9
  try:
@@ -20,64 +21,89 @@ except ImportError:
20
  ST_AVAILABLE = False
21
  print("โš ๏ธ Sentence Transformers not installed. Install with: pip install sentence-transformers")
22
 
23
- # Custom CSS for gradient background and styling
24
  custom_css = """
25
  .gradio-container {
26
- background: linear-gradient(135deg, #667eea 0%, #764ba2 25%, #f093fb 50%, #4facfe 75%, #00f2fe 100%);
27
- background-size: 400% 400%;
28
- animation: gradient-animation 15s ease infinite;
29
  min-height: 100vh;
 
30
  }
31
- @keyframes gradient-animation {
32
- 0% { background-position: 0% 50%; }
33
- 50% { background-position: 100% 50%; }
34
- 100% { background-position: 0% 50%; }
35
- }
36
- .dark .gradio-container {
37
- background: linear-gradient(135deg, #1a1a2e 0%, #16213e 25%, #0f3460 50%, #533483 75%, #e94560 100%);
38
- background-size: 400% 400%;
39
- animation: gradient-animation 15s ease infinite;
40
- }
41
  .main-container {
42
- background-color: rgba(255, 255, 255, 0.95);
43
- backdrop-filter: blur(10px);
44
- border-radius: 20px;
45
- padding: 20px;
46
- box-shadow: 0 8px 32px 0 rgba(31, 38, 135, 0.37);
47
- border: 1px solid rgba(255, 255, 255, 0.18);
48
- margin: 10px;
49
  }
50
- .dark .main-container {
51
- background-color: rgba(30, 30, 30, 0.95);
52
- border: 1px solid rgba(255, 255, 255, 0.1);
 
53
  }
 
 
54
  .pdf-status {
55
- padding: 10px;
56
- border-radius: 10px;
57
- margin: 10px 0;
58
- font-size: 0.9em;
 
 
59
  }
 
60
  .pdf-success {
61
- background-color: rgba(52, 211, 153, 0.2);
62
- border: 1px solid rgba(52, 211, 153, 0.5);
63
- color: #10b981;
64
  }
 
65
  .pdf-error {
66
- background-color: rgba(248, 113, 113, 0.2);
67
- border: 1px solid rgba(248, 113, 113, 0.5);
68
- color: #ef4444;
69
  }
 
70
  .pdf-info {
71
- background-color: rgba(59, 130, 246, 0.2);
72
- border: 1px solid rgba(59, 130, 246, 0.5);
73
- color: #3b82f6;
 
 
 
 
 
 
74
  }
 
 
75
  .rag-context {
76
- background-color: rgba(251, 191, 36, 0.1);
77
  border-left: 4px solid #f59e0b;
78
- padding: 10px;
79
- margin: 10px 0;
80
- border-radius: 5px;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  }
82
  """
83
 
@@ -230,12 +256,12 @@ class PDFRAGSystem:
230
  for doc_id in doc_ids:
231
  if doc_id in self.document_chunks:
232
  chunks = self.document_chunks[doc_id]
233
- for chunk in chunks[:top_k]: # ์ฒ˜์Œ ๋ช‡ ๊ฐœ๋งŒ ์‚ฌ์šฉ
234
  chunk_lower = chunk.lower()
235
  score = sum(1 for keyword in query_keywords if keyword in chunk_lower)
236
  if score > 0:
237
  all_relevant_chunks.append({
238
- "content": chunk[:500], # ๊ธธ์ด ์ œํ•œ
239
  "doc_name": self.documents[doc_id]["metadata"]["file_name"],
240
  "similarity": score / len(query_keywords) if query_keywords else 0
241
  })
@@ -253,7 +279,7 @@ class PDFRAGSystem:
253
 
254
  # ํ”„๋กฌํ”„ํŠธ ๊ตฌ์„ฑ
255
  prompt_parts = []
256
- prompt_parts.append("๋‹ค์Œ ๋ฌธ์„œ ๋‚ด์šฉ์„ ์ฐธ๊ณ ํ•˜์—ฌ ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•ด์ฃผ์„ธ์š”:\n")
257
  prompt_parts.append("=" * 50)
258
 
259
  for i, chunk in enumerate(relevant_chunks, 1):
@@ -264,21 +290,22 @@ class PDFRAGSystem:
264
 
265
  prompt_parts.append("=" * 50)
266
  prompt_parts.append(f"\n์งˆ๋ฌธ: {query}")
267
- prompt_parts.append("\n์œ„ ์ฐธ๊ณ ๋ฌธ์„œ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ์ž์„ธํ•˜๊ณ  ์ •ํ™•ํ•˜๊ฒŒ ๋‹ต๋ณ€ํ•ด์ฃผ์„ธ์š”:")
268
 
269
  return "\n".join(prompt_parts)
270
 
271
  # RAG ์‹œ์Šคํ…œ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
272
  rag_system = PDFRAGSystem()
273
 
274
- # State variable to track current model
275
  current_model = gr.State("openai/gpt-oss-120b")
 
276
 
277
  def upload_pdf(file):
278
  """PDF ํŒŒ์ผ ์—…๋กœ๋“œ ์ฒ˜๋ฆฌ"""
279
  if file is None:
280
  return (
281
- gr.update(value="<div class='pdf-status pdf-error'>ํŒŒ์ผ์„ ์„ ํƒํ•ด์ฃผ์„ธ์š”</div>"),
282
  gr.update(choices=[]),
283
  gr.update(value=False)
284
  )
@@ -296,7 +323,7 @@ def upload_pdf(file):
296
  if result["success"]:
297
  status_html = f"""
298
  <div class="pdf-status pdf-success">
299
- โœ… PDF ์—…๋กœ๋“œ ์„ฑ๊ณต!<br>
300
  ๐Ÿ“„ ํŒŒ์ผ: {result['title']}<br>
301
  ๐Ÿ“‘ ํŽ˜์ด์ง€: {result['pages']}ํŽ˜์ด์ง€<br>
302
  ๐Ÿ” ์ฒญํฌ: {result['chunks']}๊ฐœ ์ƒ์„ฑ
@@ -334,80 +361,99 @@ def clear_documents():
334
  rag_system.embeddings_store = {}
335
 
336
  return (
337
- gr.update(value="<div class='pdf-status pdf-success'>โœ… ๋ชจ๋“  ๋ฌธ์„œ๊ฐ€ ์‚ญ์ œ๋˜์—ˆ์Šต๋‹ˆ๋‹ค</div>"),
338
  gr.update(choices=[], value=[]),
339
  gr.update(value=False)
340
  )
341
 
342
  def switch_model(model_choice):
343
  """Function to switch between models"""
344
- return gr.update(visible=False), gr.update(visible=True), model_choice
 
 
 
345
 
346
- def create_rag_context_display(query, selected_docs, top_k):
347
- """RAG ์ปจํ…์ŠคํŠธ ํ‘œ์‹œ์šฉ HTML ์ƒ์„ฑ"""
348
- if not selected_docs:
349
- return ""
350
 
351
- doc_ids = [doc.split(":")[0] for doc in selected_docs]
352
- chunks = rag_system.search_relevant_chunks(query, doc_ids, top_k)
353
 
354
- if not chunks:
355
- return ""
 
 
 
 
 
 
 
356
 
357
- html = "<div class='rag-context'><strong>๐Ÿ“š ์ฐธ๊ณ  ๋ฌธ์„œ:</strong><br>"
358
- for i, chunk in enumerate(chunks, 1):
359
- html += f"<br>{i}. {chunk['doc_name']} (์œ ์‚ฌ๋„: {chunk['similarity']:.2f})<br>"
360
- html += f"<small>{chunk['content'][:200]}...</small><br>"
361
- html += "</div>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
362
 
363
- return html
364
 
365
- # Main interface
366
- with gr.Blocks(fill_height=True, theme="Nymbo/Nymbo_Theme", css=custom_css) as demo:
367
- # JavaScript to handle message passing
368
- gr.HTML("""
369
- <script>
370
- function sendToModel(processedMsg) {
371
- // This function would send the processed message to the model
372
- console.log("Sending to model:", processedMsg);
373
- }
374
- </script>
375
- """)
376
 
377
  with gr.Row():
378
  # Sidebar
379
  with gr.Column(scale=1):
380
  with gr.Group(elem_classes="main-container"):
381
- gr.Markdown("# ๐Ÿš€ Inference Provider + RAG")
382
  gr.Markdown(
383
- "OpenAI GPT-OSS models with PDF RAG support. "
384
- "Sign in with your Hugging Face account to use this API."
385
  )
386
 
 
 
 
387
  # Model selection
388
  model_dropdown = gr.Dropdown(
389
  choices=["openai/gpt-oss-120b", "openai/gpt-oss-20b"],
390
  value="openai/gpt-oss-120b",
391
- label="๐Ÿ“Š Select Model",
392
- info="Choose between different model sizes"
393
  )
394
 
395
- # Login button
396
- login_button = gr.LoginButton("Sign in with Hugging Face", size="lg")
397
-
398
  # Reload button to apply model change
399
- reload_btn = gr.Button("๐Ÿ”„ Apply Model Change", variant="primary", size="lg")
400
 
401
  # RAG Settings
402
- with gr.Accordion("๐Ÿ“š PDF RAG Settings", open=True):
403
  pdf_upload = gr.File(
404
- label="Upload PDF",
405
  file_types=[".pdf"],
406
  type="filepath"
407
  )
408
 
409
  upload_status = gr.HTML(
410
- value="<div class='pdf-status pdf-info'>๐Ÿ“ค PDF๋ฅผ ์—…๋กœ๋“œํ•˜์—ฌ ๋ฌธ์„œ ๊ธฐ๋ฐ˜ ๋‹ต๋ณ€์„ ๋ฐ›์œผ์„ธ์š”</div>"
411
  )
412
 
413
  document_list = gr.CheckboxGroup(
@@ -416,10 +462,11 @@ with gr.Blocks(fill_height=True, theme="Nymbo/Nymbo_Theme", css=custom_css) as d
416
  info="์ฐธ๊ณ ํ•  ๋ฌธ์„œ๋ฅผ ์„ ํƒํ•˜์„ธ์š”"
417
  )
418
 
419
- clear_btn = gr.Button("๐Ÿ—‘๏ธ ๋ชจ๋“  ๋ฌธ์„œ ์‚ญ์ œ", size="sm")
 
420
 
421
  enable_rag = gr.Checkbox(
422
- label="RAG ํ™œ์„ฑํ™”",
423
  value=False,
424
  info="์„ ํƒํ•œ ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹ต๋ณ€ ์ƒ์„ฑ"
425
  )
@@ -430,25 +477,26 @@ with gr.Blocks(fill_height=True, theme="Nymbo/Nymbo_Theme", css=custom_css) as d
430
  value=3,
431
  step=1,
432
  label="์ฐธ์กฐ ์ฒญํฌ ์ˆ˜",
433
- info="๋‹ต๋ณ€ ์ƒ์„ฑ์‹œ ์ฐธ๊ณ ํ•  ๋ฌธ์„œ ์กฐ๊ฐ ๊ฐœ์ˆ˜"
434
  )
435
 
436
  # Additional options
437
- with gr.Accordion("โš™๏ธ Advanced Options", open=False):
438
- gr.Markdown("*These options will be available after model implementation*")
439
  temperature = gr.Slider(
440
  minimum=0,
441
  maximum=2,
442
  value=0.7,
443
  step=0.1,
444
- label="Temperature"
 
445
  )
446
  max_tokens = gr.Slider(
447
  minimum=1,
448
  maximum=4096,
449
  value=512,
450
  step=1,
451
- label="Max Tokens"
 
452
  )
453
 
454
  # Main chat area
@@ -461,53 +509,67 @@ with gr.Blocks(fill_height=True, theme="Nymbo/Nymbo_Theme", css=custom_css) as d
461
  value="<div class='pdf-status pdf-info'>๐Ÿ” RAG: <strong>๋น„ํ™œ์„ฑํ™”</strong></div>"
462
  )
463
 
464
- # RAG ์ปจํ…์ŠคํŠธ ํ‘œ์‹œ ์˜์—ญ
465
- rag_context_display = gr.HTML(value="", visible=False)
466
-
467
- # Container for model interfaces
468
  with gr.Column(visible=True) as model_120b_container:
469
- gr.Markdown("### Model: openai/gpt-oss-120b")
 
 
 
 
 
 
470
 
471
- # RAG ์ฒ˜๋ฆฌ๋ฅผ ์œ„ํ•œ ์ปค์Šคํ…€ ์ธํ„ฐํŽ˜์ด์Šค
472
- with gr.Group():
473
- # ์‚ฌ์šฉ์ž ์ž…๋ ฅ ํ…์ŠคํŠธ๋ฐ•์Šค
474
- user_input = gr.Textbox(
475
- label="๋ฉ”์‹œ์ง€ ์ž…๋ ฅ",
476
- placeholder="๋ฌธ์„œ์— ๋Œ€ํ•ด ์งˆ๋ฌธํ•˜๊ฑฐ๋‚˜ ์ผ๋ฐ˜ ๋Œ€ํ™”๋ฅผ ์‹œ์ž‘ํ•˜์„ธ์š”...",
477
- lines=2
478
  )
 
 
 
 
479
 
480
- with gr.Row():
481
- send_btn = gr.Button("๐Ÿ“ค ์ „์†ก", variant="primary")
482
- clear_chat_btn = gr.Button("๐Ÿ—‘๏ธ ๋Œ€ํ™” ์ดˆ๊ธฐํ™”")
483
-
484
- # ์›๋ณธ ๋ชจ๋ธ ๋กœ๋“œ
485
- original_model = gr.load(
486
- "models/openai/gpt-oss-120b",
487
- accept_token=login_button,
488
- provider="fireworks-ai"
489
  )
490
 
491
  with gr.Column(visible=False) as model_20b_container:
492
- gr.Markdown("### Model: openai/gpt-oss-20b")
 
 
 
 
 
 
493
 
494
- with gr.Group():
495
- # ์‚ฌ์šฉ์ž ์ž…๋ ฅ ํ…์ŠคํŠธ๋ฐ•์Šค (20b์šฉ)
496
- user_input_20b = gr.Textbox(
497
- label="๋ฉ”์‹œ์ง€ ์ž…๋ ฅ",
498
- placeholder="๋ฌธ์„œ์— ๋Œ€ํ•ด ์งˆ๋ฌธํ•˜๊ฑฐ๋‚˜ ์ผ๋ฐ˜ ๋Œ€ํ™”๋ฅผ ์‹œ์ž‘ํ•˜์„ธ์š”...",
499
- lines=2
500
  )
 
 
 
 
501
 
502
- with gr.Row():
503
- send_btn_20b = gr.Button("๐Ÿ“ค ์ „์†ก", variant="primary")
504
- clear_chat_btn_20b = gr.Button("๐Ÿ—‘๏ธ ๋Œ€ํ™” ์ดˆ๊ธฐํ™”")
505
-
506
- # ์›๋ณธ ๋ชจ๋ธ ๋กœ๋“œ
507
- original_model_20b = gr.load(
508
- "models/openai/gpt-oss-20b",
509
- accept_token=login_button,
510
- provider="fireworks-ai"
511
  )
512
 
513
  # Event Handlers
@@ -528,7 +590,7 @@ with gr.Blocks(fill_height=True, theme="Nymbo/Nymbo_Theme", css=custom_css) as d
528
  # RAG ์ƒํƒœ ์—…๋ฐ์ดํŠธ
529
  enable_rag.change(
530
  fn=lambda x: gr.update(
531
- value=f"<div class='pdf-status pdf-info'>๐Ÿ” RAG: <strong>{'ํ™œ์„ฑํ™”' if x else '๋น„ํ™œ์„ฑํ™”'}</strong></div>"
532
  ),
533
  inputs=[enable_rag],
534
  outputs=[rag_status]
@@ -540,59 +602,89 @@ with gr.Blocks(fill_height=True, theme="Nymbo/Nymbo_Theme", css=custom_css) as d
540
  inputs=[model_dropdown],
541
  outputs=[model_120b_container, model_20b_container, current_model]
542
  ).then(
543
- fn=lambda: gr.Info("Model switched successfully!"),
544
  inputs=[],
545
  outputs=[]
546
  )
547
 
548
- # Update visibility based on dropdown selection
549
- def update_visibility(model_choice):
550
- if model_choice == "openai/gpt-oss-120b":
551
- return gr.update(visible=True), gr.update(visible=False)
552
- else:
553
- return gr.update(visible=False), gr.update(visible=True)
554
-
555
- model_dropdown.change(
556
- fn=update_visibility,
557
- inputs=[model_dropdown],
558
- outputs=[model_120b_container, model_20b_container]
 
559
  )
560
 
561
- # ๋ฉ”์‹œ์ง€ ์ „์†ก ์ฒ˜๋ฆฌ (RAG ํฌํ•จ)
562
- def process_message(message, enable_rag, selected_docs, top_k):
563
- """๋ฉ”์‹œ์ง€๋ฅผ RAG๋กœ ์ฒ˜๋ฆฌํ•˜์—ฌ ๋ชจ๋ธ์— ์ „์†ก"""
564
- if enable_rag and selected_docs:
565
- doc_ids = [doc.split(":")[0] for doc in selected_docs]
566
- enhanced_message = rag_system.create_rag_prompt(message, doc_ids, top_k)
567
- context_html = create_rag_context_display(message, selected_docs, top_k)
568
- return enhanced_message, gr.update(value=context_html, visible=True)
569
- else:
570
- return message, gr.update(value="", visible=False)
 
 
571
 
572
- # 120b ๋ชจ๋ธ์šฉ ์ด๋ฒคํŠธ
573
- send_btn.click(
574
- fn=process_message,
575
- inputs=[user_input, enable_rag, document_list, top_k_chunks],
576
- outputs=[user_input, rag_context_display]
577
  )
578
 
579
- user_input.submit(
580
- fn=process_message,
581
- inputs=[user_input, enable_rag, document_list, top_k_chunks],
582
- outputs=[user_input, rag_context_display]
 
 
 
 
 
 
 
 
583
  )
584
 
585
- # 20b ๋ชจ๋ธ์šฉ ์ด๋ฒคํŠธ
586
  send_btn_20b.click(
587
- fn=process_message,
588
- inputs=[user_input_20b, enable_rag, document_list, top_k_chunks],
589
- outputs=[user_input_20b, rag_context_display]
 
 
 
 
 
 
 
590
  )
591
 
592
- user_input_20b.submit(
593
- fn=process_message,
594
- inputs=[user_input_20b, enable_rag, document_list, top_k_chunks],
595
- outputs=[user_input_20b, rag_context_display]
596
  )
597
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
598
  demo.launch()
 
1
  import gradio as gr
2
  import os
3
+ from typing import List, Dict, Any, Optional, Tuple
4
  import hashlib
5
  from datetime import datetime
6
  import numpy as np
7
+ from gradio_client import Client
8
 
9
  # PDF ์ฒ˜๋ฆฌ ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ
10
  try:
 
21
  ST_AVAILABLE = False
22
  print("โš ๏ธ Sentence Transformers not installed. Install with: pip install sentence-transformers")
23
 
24
+ # Soft and bright custom CSS
25
  custom_css = """
26
  .gradio-container {
27
+ background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
 
 
28
  min-height: 100vh;
29
+ font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
30
  }
31
+
 
 
 
 
 
 
 
 
 
32
  .main-container {
33
+ background: rgba(255, 255, 255, 0.98);
34
+ border-radius: 16px;
35
+ padding: 24px;
36
+ box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
37
+ border: 1px solid rgba(0, 0, 0, 0.05);
38
+ margin: 12px;
 
39
  }
40
+
41
+ .main-container:hover {
42
+ box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05);
43
+ transition: all 0.3s ease;
44
  }
45
+
46
+ /* Status messages styling */
47
  .pdf-status {
48
+ padding: 12px 16px;
49
+ border-radius: 12px;
50
+ margin: 12px 0;
51
+ font-size: 0.95rem;
52
+ font-weight: 500;
53
+ backdrop-filter: blur(10px);
54
  }
55
+
56
  .pdf-success {
57
+ background: linear-gradient(135deg, #d4edda 0%, #c3e6cb 100%);
58
+ border: 1px solid #b1dfbb;
59
+ color: #155724;
60
  }
61
+
62
  .pdf-error {
63
+ background: linear-gradient(135deg, #f8d7da 0%, #f5c6cb 100%);
64
+ border: 1px solid #f1aeb5;
65
+ color: #721c24;
66
  }
67
+
68
  .pdf-info {
69
+ background: linear-gradient(135deg, #d1ecf1 0%, #bee5eb 100%);
70
+ border: 1px solid #9ec5d8;
71
+ color: #0c5460;
72
+ }
73
+
74
+ .pdf-warning {
75
+ background: linear-gradient(135deg, #fff3cd 0%, #ffeeba 100%);
76
+ border: 1px solid #ffeaa7;
77
+ color: #856404;
78
  }
79
+
80
+ /* RAG context display */
81
  .rag-context {
82
+ background: linear-gradient(135deg, #fef3c7 0%, #fde68a 100%);
83
  border-left: 4px solid #f59e0b;
84
+ padding: 16px;
85
+ margin: 16px 0;
86
+ border-radius: 8px;
87
+ font-size: 0.9rem;
88
+ }
89
+
90
+ /* Chat message styling */
91
+ .message {
92
+ padding: 12px 16px;
93
+ margin: 8px 4px;
94
+ border-radius: 12px;
95
+ max-width: 80%;
96
+ }
97
+
98
+ .user-message {
99
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
100
+ color: white;
101
+ margin-left: auto;
102
+ }
103
+
104
+ .bot-message {
105
+ background: #f3f4f6;
106
+ color: #1f2937;
107
  }
108
  """
109
 
 
256
  for doc_id in doc_ids:
257
  if doc_id in self.document_chunks:
258
  chunks = self.document_chunks[doc_id]
259
+ for chunk in chunks[:top_k]:
260
  chunk_lower = chunk.lower()
261
  score = sum(1 for keyword in query_keywords if keyword in chunk_lower)
262
  if score > 0:
263
  all_relevant_chunks.append({
264
+ "content": chunk[:500],
265
  "doc_name": self.documents[doc_id]["metadata"]["file_name"],
266
  "similarity": score / len(query_keywords) if query_keywords else 0
267
  })
 
279
 
280
  # ํ”„๋กฌํ”„ํŠธ ๊ตฌ์„ฑ
281
  prompt_parts = []
282
+ prompt_parts.append("์•„๋ž˜ ์ฐธ๊ณ  ๋ฌธ์„œ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•ด์ฃผ์„ธ์š”.\n")
283
  prompt_parts.append("=" * 50)
284
 
285
  for i, chunk in enumerate(relevant_chunks, 1):
 
290
 
291
  prompt_parts.append("=" * 50)
292
  prompt_parts.append(f"\n์งˆ๋ฌธ: {query}")
293
+ prompt_parts.append("\n์œ„ ์ฐธ๊ณ ๋ฌธ์„œ์˜ ๋‚ด์šฉ์„ ๋ฐ”ํƒ•์œผ๋กœ ์ •ํ™•ํ•˜๊ณ  ์ƒ์„ธํ•˜๊ฒŒ ๋‹ต๋ณ€ํ•ด์ฃผ์„ธ์š”:")
294
 
295
  return "\n".join(prompt_parts)
296
 
297
  # RAG ์‹œ์Šคํ…œ ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ
298
  rag_system = PDFRAGSystem()
299
 
300
+ # State variables
301
  current_model = gr.State("openai/gpt-oss-120b")
302
+ conversation_history = gr.State([])
303
 
304
  def upload_pdf(file):
305
  """PDF ํŒŒ์ผ ์—…๋กœ๋“œ ์ฒ˜๋ฆฌ"""
306
  if file is None:
307
  return (
308
+ gr.update(value="<div class='pdf-status pdf-warning'>๐Ÿ“ ํŒŒ์ผ์„ ์„ ํƒํ•ด์ฃผ์„ธ์š”</div>"),
309
  gr.update(choices=[]),
310
  gr.update(value=False)
311
  )
 
323
  if result["success"]:
324
  status_html = f"""
325
  <div class="pdf-status pdf-success">
326
+ โœ… PDF ์—…๋กœ๋“œ ์™„๋ฃŒ<br>
327
  ๐Ÿ“„ ํŒŒ์ผ: {result['title']}<br>
328
  ๐Ÿ“‘ ํŽ˜์ด์ง€: {result['pages']}ํŽ˜์ด์ง€<br>
329
  ๐Ÿ” ์ฒญํฌ: {result['chunks']}๊ฐœ ์ƒ์„ฑ
 
361
  rag_system.embeddings_store = {}
362
 
363
  return (
364
+ gr.update(value="<div class='pdf-status pdf-info'>๐Ÿ—‘๏ธ ๋ชจ๋“  ๋ฌธ์„œ๊ฐ€ ์‚ญ์ œ๋˜์—ˆ์Šต๋‹ˆ๋‹ค</div>"),
365
  gr.update(choices=[], value=[]),
366
  gr.update(value=False)
367
  )
368
 
369
  def switch_model(model_choice):
370
  """Function to switch between models"""
371
+ if model_choice == "openai/gpt-oss-120b":
372
+ return gr.update(visible=True), gr.update(visible=False), model_choice
373
+ else:
374
+ return gr.update(visible=False), gr.update(visible=True), model_choice
375
 
376
+ def chat_with_rag(message, history, model_name, enable_rag, selected_docs, top_k, temperature, max_tokens):
377
+ """RAG๋ฅผ ์ ์šฉํ•œ ์ฑ„ํŒ… ํ•จ์ˆ˜"""
 
 
378
 
379
+ if not message:
380
+ return history
381
 
382
+ # RAG ์ ์šฉ
383
+ if enable_rag and selected_docs:
384
+ doc_ids = [doc.split(":")[0] for doc in selected_docs]
385
+ enhanced_message = rag_system.create_rag_prompt(message, doc_ids, top_k)
386
+
387
+ # ๋””๋ฒ„๊ทธ: RAG ์ ์šฉ ํ™•์ธ
388
+ print(f"RAG ์ ์šฉ๋จ - ์›๋ณธ: {len(message)}์ž, ๊ฐ•ํ™”: {len(enhanced_message)}์ž")
389
+ else:
390
+ enhanced_message = message
391
 
392
+ try:
393
+ # ์—ฌ๊ธฐ์„œ ์‹ค์ œ ๋ชจ๋ธ API๋ฅผ ํ˜ธ์ถœํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค
394
+ # ์ž„์‹œ๋กœ ๋ชจ์˜ ์‘๋‹ต ์ƒ์„ฑ
395
+ if enable_rag and selected_docs:
396
+ response = f"""๐Ÿ“š [RAG ๊ธฐ๋ฐ˜ ๋‹ต๋ณ€]
397
+
398
+ ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹ต๋ณ€๋“œ๋ฆฝ๋‹ˆ๋‹ค:
399
+
400
+ {enhanced_message[:500]}...
401
+
402
+ [์ฐธ๊ณ : ์‹ค์ œ ๋ชจ๋ธ API ์—ฐ๊ฒฐ ํ•„์š”]
403
+ """
404
+ else:
405
+ response = f"""๐Ÿ’ฌ [์ผ๋ฐ˜ ๋‹ต๋ณ€]
406
+
407
+ ์งˆ๋ฌธ: {message}
408
+
409
+ [์ฐธ๊ณ : ์‹ค์ œ ๋ชจ๋ธ API ์—ฐ๊ฒฐ ํ•„์š”]
410
+ """
411
+
412
+ # ๋Œ€ํ™” ๊ธฐ๋ก์— ์ถ”๊ฐ€
413
+ history.append([message, response])
414
+
415
+ except Exception as e:
416
+ response = f"โŒ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
417
+ history.append([message, response])
418
 
419
+ return history
420
 
421
+ # Main interface with soft theme
422
+ with gr.Blocks(fill_height=True, theme=gr.themes.Soft(), css=custom_css) as demo:
 
 
 
 
 
 
 
 
 
423
 
424
  with gr.Row():
425
  # Sidebar
426
  with gr.Column(scale=1):
427
  with gr.Group(elem_classes="main-container"):
428
+ gr.Markdown("# ๐Ÿค– AI Chat + RAG")
429
  gr.Markdown(
430
+ "OpenAI GPT-OSS ๋ชจ๋ธ๊ณผ PDF ๋ฌธ์„œ ๊ธฐ๋ฐ˜ ๋‹ต๋ณ€ ์‹œ์Šคํ…œ์ž…๋‹ˆ๋‹ค."
 
431
  )
432
 
433
+ # Login button
434
+ login_button = gr.LoginButton("๐Ÿ” Hugging Face ๋กœ๊ทธ์ธ", size="lg")
435
+
436
  # Model selection
437
  model_dropdown = gr.Dropdown(
438
  choices=["openai/gpt-oss-120b", "openai/gpt-oss-20b"],
439
  value="openai/gpt-oss-120b",
440
+ label="๐Ÿ“Š ๋ชจ๋ธ ์„ ํƒ",
441
+ info="์›ํ•˜๋Š” ๋ชจ๋ธ ํฌ๊ธฐ๋ฅผ ์„ ํƒํ•˜์„ธ์š”"
442
  )
443
 
 
 
 
444
  # Reload button to apply model change
445
+ reload_btn = gr.Button("๐Ÿ”„ ๋ชจ๋ธ ๋ณ€๊ฒฝ ์ ์šฉ", variant="primary", size="lg")
446
 
447
  # RAG Settings
448
+ with gr.Accordion("๐Ÿ“š PDF RAG ์„ค์ •", open=True):
449
  pdf_upload = gr.File(
450
+ label="๐Ÿ“ค PDF ์—…๋กœ๋“œ",
451
  file_types=[".pdf"],
452
  type="filepath"
453
  )
454
 
455
  upload_status = gr.HTML(
456
+ value="<div class='pdf-status pdf-info'>๐Ÿ“ PDF๋ฅผ ์—…๋กœ๋“œํ•˜์—ฌ ๋ฌธ์„œ ๊ธฐ๋ฐ˜ ๋‹ต๋ณ€์„ ๋ฐ›์œผ์„ธ์š”</div>"
457
  )
458
 
459
  document_list = gr.CheckboxGroup(
 
462
  info="์ฐธ๊ณ ํ•  ๋ฌธ์„œ๋ฅผ ์„ ํƒํ•˜์„ธ์š”"
463
  )
464
 
465
+ with gr.Row():
466
+ clear_btn = gr.Button("๐Ÿ—‘๏ธ ๋ชจ๋“  ๋ฌธ์„œ ์‚ญ์ œ", size="sm", variant="secondary")
467
 
468
  enable_rag = gr.Checkbox(
469
+ label="โœจ RAG ํ™œ์„ฑํ™”",
470
  value=False,
471
  info="์„ ํƒํ•œ ๋ฌธ์„œ๋ฅผ ์ฐธ๊ณ ํ•˜์—ฌ ๋‹ต๋ณ€ ์ƒ์„ฑ"
472
  )
 
477
  value=3,
478
  step=1,
479
  label="์ฐธ์กฐ ์ฒญํฌ ์ˆ˜",
480
+ info="๋‹ต๋ณ€์‹œ ์ฐธ๊ณ ํ•  ๋ฌธ์„œ ์กฐ๊ฐ ๊ฐœ์ˆ˜"
481
  )
482
 
483
  # Additional options
484
+ with gr.Accordion("๐ŸŽ›๏ธ ๋ชจ๋ธ ์˜ต์…˜", open=False):
 
485
  temperature = gr.Slider(
486
  minimum=0,
487
  maximum=2,
488
  value=0.7,
489
  step=0.1,
490
+ label="Temperature",
491
+ info="๋‚ฎ์„์ˆ˜๋ก ์ผ๊ด€์„ฑ ์žˆ๊ณ , ๋†’์„์ˆ˜๋ก ์ฐฝ์˜์ ์ž…๋‹ˆ๋‹ค"
492
  )
493
  max_tokens = gr.Slider(
494
  minimum=1,
495
  maximum=4096,
496
  value=512,
497
  step=1,
498
+ label="Max Tokens",
499
+ info="์ƒ์„ฑํ•  ์ตœ๋Œ€ ํ† ํฐ ์ˆ˜"
500
  )
501
 
502
  # Main chat area
 
509
  value="<div class='pdf-status pdf-info'>๐Ÿ” RAG: <strong>๋น„ํ™œ์„ฑํ™”</strong></div>"
510
  )
511
 
512
+ # ํ†ตํ•ฉ๋œ ์ฑ„ํŒ… ์ธํ„ฐํŽ˜์ด์Šค (๋ชจ๋ธ๋ณ„๋กœ ํ•˜๋‚˜์”ฉ)
 
 
 
513
  with gr.Column(visible=True) as model_120b_container:
514
+ gr.Markdown("### ๐Ÿš€ Model: openai/gpt-oss-120b")
515
+
516
+ chatbot_120b = gr.Chatbot(
517
+ height=400,
518
+ show_label=False,
519
+ elem_classes="chatbot"
520
+ )
521
 
522
+ with gr.Row():
523
+ msg_120b = gr.Textbox(
524
+ placeholder="๋ฉ”์‹œ์ง€๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”... (Enter๋กœ ์ „์†ก)",
525
+ show_label=False,
526
+ scale=4,
527
+ container=False
 
528
  )
529
+ send_btn_120b = gr.Button("๐Ÿ“ค ์ „์†ก", variant="primary", scale=1)
530
+
531
+ with gr.Row():
532
+ clear_btn_120b = gr.Button("๐Ÿ—‘๏ธ ๋Œ€ํ™” ์ดˆ๊ธฐํ™”", variant="secondary", size="sm")
533
 
534
+ # ์˜ˆ์ œ ์งˆ๋ฌธ๋“ค
535
+ gr.Examples(
536
+ examples=[
537
+ "๋ฌธ์„œ์˜ ์ฃผ์š” ๋‚ด์šฉ์„ ์š”์•ฝํ•ด์ฃผ์„ธ์š”",
538
+ "์ด ๋ฌธ์„œ์—์„œ ๊ฐ€์žฅ ์ค‘์š”ํ•œ ํฌ์ธํŠธ๋Š” ๋ฌด์—‡์ธ๊ฐ€์š”?",
539
+ "๋ฌธ์„œ์— ์–ธ๊ธ‰๋œ ๋‚ ์งœ์™€ ์ผ์ •์„ ์•Œ๋ ค์ฃผ์„ธ์š”"
540
+ ],
541
+ inputs=msg_120b
 
542
  )
543
 
544
  with gr.Column(visible=False) as model_20b_container:
545
+ gr.Markdown("### ๐Ÿš€ Model: openai/gpt-oss-20b")
546
+
547
+ chatbot_20b = gr.Chatbot(
548
+ height=400,
549
+ show_label=False,
550
+ elem_classes="chatbot"
551
+ )
552
 
553
+ with gr.Row():
554
+ msg_20b = gr.Textbox(
555
+ placeholder="๋ฉ”์‹œ์ง€๋ฅผ ์ž…๋ ฅํ•˜์„ธ์š”... (Enter๋กœ ์ „์†ก)",
556
+ show_label=False,
557
+ scale=4,
558
+ container=False
559
  )
560
+ send_btn_20b = gr.Button("๐Ÿ“ค ์ „์†ก", variant="primary", scale=1)
561
+
562
+ with gr.Row():
563
+ clear_btn_20b = gr.Button("๐Ÿ—‘๏ธ ๋Œ€ํ™” ์ดˆ๊ธฐํ™”", variant="secondary", size="sm")
564
 
565
+ # ์˜ˆ์ œ ์งˆ๋ฌธ๋“ค
566
+ gr.Examples(
567
+ examples=[
568
+ "๋ฌธ์„œ์˜ ์ฃผ์š” ๋‚ด์šฉ์„ ์š”์•ฝํ•ด์ฃผ์„ธ์š”",
569
+ "์ด ๋ฌธ์„œ์—์„œ ๊ฐ€์žฅ ์ค‘์š”ํ•œ ํฌ์ธํŠธ๋Š” ๋ฌด์—‡์ธ๊ฐ€์š”?",
570
+ "๋ฌธ์„œ์— ์–ธ๊ธ‰๋œ ๋‚ ์งœ์™€ ์ผ์ •์„ ์•Œ๋ ค์ฃผ์„ธ์š”"
571
+ ],
572
+ inputs=msg_20b
 
573
  )
574
 
575
  # Event Handlers
 
590
  # RAG ์ƒํƒœ ์—…๋ฐ์ดํŠธ
591
  enable_rag.change(
592
  fn=lambda x: gr.update(
593
+ value=f"<div class='pdf-status pdf-info'>๐Ÿ” RAG: <strong>{'โœ… ํ™œ์„ฑํ™”' if x else 'โญ• ๋น„ํ™œ์„ฑํ™”'}</strong></div>"
594
  ),
595
  inputs=[enable_rag],
596
  outputs=[rag_status]
 
602
  inputs=[model_dropdown],
603
  outputs=[model_120b_container, model_20b_container, current_model]
604
  ).then(
605
+ fn=lambda: gr.Info("โœ… ๋ชจ๋ธ์ด ์„ฑ๊ณต์ ์œผ๋กœ ์ „ํ™˜๋˜์—ˆ์Šต๋‹ˆ๋‹ค!"),
606
  inputs=[],
607
  outputs=[]
608
  )
609
 
610
+ # 120b ๋ชจ๋ธ ์ฑ„ํŒ… ์ด๋ฒคํŠธ
611
+ msg_120b.submit(
612
+ fn=lambda msg, hist: chat_with_rag(
613
+ msg, hist, "openai/gpt-oss-120b",
614
+ enable_rag.value, document_list.value, top_k_chunks.value,
615
+ temperature.value, max_tokens.value
616
+ ),
617
+ inputs=[msg_120b, chatbot_120b],
618
+ outputs=[chatbot_120b]
619
+ ).then(
620
+ fn=lambda: "",
621
+ outputs=[msg_120b]
622
  )
623
 
624
+ send_btn_120b.click(
625
+ fn=lambda msg, hist: chat_with_rag(
626
+ msg, hist, "openai/gpt-oss-120b",
627
+ enable_rag.value, document_list.value, top_k_chunks.value,
628
+ temperature.value, max_tokens.value
629
+ ),
630
+ inputs=[msg_120b, chatbot_120b],
631
+ outputs=[chatbot_120b]
632
+ ).then(
633
+ fn=lambda: "",
634
+ outputs=[msg_120b]
635
+ )
636
 
637
+ clear_btn_120b.click(
638
+ fn=lambda: [],
639
+ outputs=[chatbot_120b]
 
 
640
  )
641
 
642
+ # 20b ๋ชจ๋ธ ์ฑ„ํŒ… ์ด๋ฒคํŠธ
643
+ msg_20b.submit(
644
+ fn=lambda msg, hist: chat_with_rag(
645
+ msg, hist, "openai/gpt-oss-20b",
646
+ enable_rag.value, document_list.value, top_k_chunks.value,
647
+ temperature.value, max_tokens.value
648
+ ),
649
+ inputs=[msg_20b, chatbot_20b],
650
+ outputs=[chatbot_20b]
651
+ ).then(
652
+ fn=lambda: "",
653
+ outputs=[msg_20b]
654
  )
655
 
 
656
  send_btn_20b.click(
657
+ fn=lambda msg, hist: chat_with_rag(
658
+ msg, hist, "openai/gpt-oss-20b",
659
+ enable_rag.value, document_list.value, top_k_chunks.value,
660
+ temperature.value, max_tokens.value
661
+ ),
662
+ inputs=[msg_20b, chatbot_20b],
663
+ outputs=[chatbot_20b]
664
+ ).then(
665
+ fn=lambda: "",
666
+ outputs=[msg_20b]
667
  )
668
 
669
+ clear_btn_20b.click(
670
+ fn=lambda: [],
671
+ outputs=[chatbot_20b]
 
672
  )
673
 
674
+ # ์‹ค์ œ ๋ชจ๋ธ API ์—ฐ๊ฒฐ์„ ์œ„ํ•œ ํ•จ์ˆ˜ (๊ตฌํ˜„ ํ•„์š”)
675
+ def connect_to_model_api(model_name, message, temperature, max_tokens):
676
+ """
677
+ ์‹ค์ œ ๋ชจ๋ธ API์— ์—ฐ๊ฒฐํ•˜๋Š” ํ•จ์ˆ˜
678
+ TODO: ์—ฌ๊ธฐ์— ์‹ค์ œ API ํ˜ธ์ถœ ์ฝ”๋“œ๋ฅผ ๊ตฌํ˜„ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค
679
+
680
+ ์˜ˆ์‹œ:
681
+ - OpenAI API
682
+ - Hugging Face Inference API
683
+ - Custom model endpoint
684
+ """
685
+ # client = Client(f"models/{model_name}")
686
+ # response = client.predict(message, temperature=temperature, max_tokens=max_tokens)
687
+ # return response
688
+ pass
689
+
690
  demo.launch()