openfree commited on
Commit
6f1521b
ยท
1 Parent(s): a714b7a

Update app-backup.py

Browse files
Files changed (1) hide show
  1. app-backup.py +76 -88
app-backup.py CHANGED
@@ -83,7 +83,7 @@ def do_web_search(query: str) -> str:
83
  # ๋ชจ๋ธ/ํ”„๋กœ์„ธ์„œ ๋กœ๋”ฉ
84
  ##############################################################################
85
  MAX_CONTENT_CHARS = 4000
86
- model_id = os.getenv("MODEL_ID", "google/gemma-3-27b-it")
87
 
88
  processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
89
  model = Gemma3ForConditionalGeneration.from_pretrained(
@@ -142,6 +142,7 @@ def pdf_to_markdown(pdf_path: str) -> str:
142
  page_text = page.extract_text() or ""
143
  page_text = page_text.strip()
144
  if page_text:
 
145
  if len(page_text) > MAX_CONTENT_CHARS // max_pages:
146
  page_text = page_text[:MAX_CONTENT_CHARS // max_pages] + "...(truncated)"
147
  text_chunks.append(f"## Page {page_num+1}\n\n{page_text}\n")
@@ -277,6 +278,7 @@ def process_interleaved_images(message: dict) -> list[dict]:
277
  elif part.strip():
278
  content.append({"type": "text", "text": part.strip()})
279
  else:
 
280
  if isinstance(part, str) and part != "<image>":
281
  content.append({"type": "text", "text": part})
282
  return content
@@ -329,6 +331,7 @@ def process_new_user_message(message: dict) -> list[dict]:
329
 
330
  if "<image>" in message["text"] and image_files:
331
  interleaved_content = process_interleaved_images({"text": message["text"], "files": image_files})
 
332
  if content_list and content_list[0]["type"] == "text":
333
  content_list = content_list[1:]
334
  return interleaved_content + content_list
@@ -388,6 +391,7 @@ def run(
388
  try:
389
  combined_system_msg = ""
390
 
 
391
  if system_prompt.strip():
392
  combined_system_msg += f"[System Prompt]\n{system_prompt.strip()}\n\n"
393
 
@@ -398,6 +402,8 @@ def run(
398
  logger.info(f"[Auto WebSearch Keyword] {ws_query!r}")
399
  ws_result = do_web_search(ws_query)
400
  combined_system_msg += f"[Search top-20 Full Items Based on user prompt]\n{ws_result}\n\n"
 
 
401
  else:
402
  combined_system_msg += "[No valid keywords found, skipping WebSearch]\n\n"
403
 
@@ -467,7 +473,6 @@ examples = [
467
  [
468
  {
469
  "text": "๋‘ PDF ํŒŒ์ผ ๋‚ด์šฉ์„ ๋น„๊ตํ•˜๋ผ.",
470
- "files": ["assets/additional-examples/pdf.pdf"],
471
  "files": [
472
  "assets/additional-examples/before.pdf",
473
  "assets/additional-examples/after.pdf",
@@ -562,7 +567,7 @@ examples = [
562
 
563
 
564
  ##############################################################################
565
- # Gradio UI (Blocks) ๊ตฌ์„ฑ
566
  ##############################################################################
567
  css = """
568
  body {
@@ -584,8 +589,8 @@ body {
584
  color: #333;
585
  text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.2);
586
  }
587
- .fillable {
588
- width: 95% !important;
589
  max-width: unset !important;
590
  }
591
  #examples_container {
@@ -595,12 +600,6 @@ body {
595
  #examples_row {
596
  justify-content: center;
597
  }
598
- .sidebar {
599
- background: rgba(255, 255, 255, 0.98);
600
- border-radius: 10px;
601
- padding: 20px;
602
- box-shadow: 0 4px 15px rgba(0, 0, 0, 0.2);
603
- }
604
  button, .btn {
605
  background: linear-gradient(90deg, #ff8a00, #e52e71);
606
  border: none;
@@ -619,92 +618,81 @@ button:hover, .btn:hover {
619
  """
620
 
621
  title_html = """
622
- <h1 align="center" style="margin-bottom: 0.2em;"> ๐Ÿค— Vidraft-G3-27B : Multimodal + VLM + Deep Research </h1>
623
  <p align="center" style="font-size:1.1em; color:#555;">
624
- Multimodal Chat Interface + Optional Web Search
 
 
625
  </p>
626
  """
627
 
628
- with gr.Blocks(css=css, title="Vidraft-G3-27B") as demo:
629
  gr.Markdown(title_html)
630
 
631
- with gr.Row():
632
- # Left Sidebar
633
- with gr.Column(scale=3, variant="panel"):
634
- gr.Markdown("### Menu / Options")
635
- with gr.Row():
636
- web_search_checkbox = gr.Checkbox(
637
- label="Web Search",
638
- value=False,
639
- info="Check to enable a Deep Research(auto keywords) before the chat reply"
640
- )
641
- web_search_text = gr.Textbox(
642
- lines=1,
643
- label="(Unused) Web Search Query",
644
- placeholder="No direct input needed"
645
- )
646
-
647
- gr.Markdown("---")
648
- gr.Markdown("#### System Prompt")
649
- system_prompt_box = gr.Textbox(
650
- lines=3,
651
- value=(
652
- "You are a deeply thoughtful AI. Consider problems thoroughly and derive "
653
- "correct solutions through systematic reasoning. Please answer in korean."
654
- ),
655
- )
656
-
657
- max_tokens_slider = gr.Slider(
658
- label="Max New Tokens",
659
- minimum=100,
660
- maximum=8000,
661
- step=50,
662
- value=2000, # GPU ๋ฉ”๋ชจ๋ฆฌ ์ ˆ์•ฝ ์œ„ํ•ด ๊ธฐ๋ณธ๊ฐ’ ์•ฝ๊ฐ„ ์ถ•์†Œ
663
- )
664
-
665
- gr.Markdown("<br><br>")
666
-
667
- # Main ChatInterface
668
- with gr.Column(scale=7):
669
- chat = gr.ChatInterface(
670
- fn=run,
671
- type="messages",
672
- chatbot=gr.Chatbot(type="messages", scale=1, allow_tags=["image"]),
673
- textbox=gr.MultimodalTextbox(
674
- file_types=[
675
- ".webp", ".png", ".jpg", ".jpeg", ".gif",
676
- ".mp4", ".csv", ".txt", ".pdf"
677
- ],
678
- file_count="multiple",
679
- autofocus=True
680
- ),
681
- multimodal=True,
682
- additional_inputs=[
683
- system_prompt_box,
684
- max_tokens_slider,
685
- web_search_checkbox,
686
- web_search_text,
687
- ],
688
- stop_btn=False,
689
- title="Vidraft-G3-27B",
690
- examples=examples,
691
- run_examples_on_click=False,
692
- cache_examples=False,
693
- css_paths=None,
694
- delete_cache=(1800, 1800),
695
- )
696
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
697
  with gr.Row(elem_id="examples_row"):
698
  with gr.Column(scale=12, elem_id="examples_container"):
699
  gr.Markdown("### Example Inputs (click to load)")
700
- gr.Examples(
701
- examples=examples,
702
- inputs=[],
703
- cache_examples=False
704
- )
705
 
706
- if __name__ == "__main__":
707
- # share=True ์‹œ HF Spaces์—์„œ ๊ฒฝ๊ณ  ๋ฐœ์ƒ - ๋กœ์ปฌ์—์„œ๋งŒ ๋™์ž‘
708
- # demo.launch(share=True)
709
- demo.launch()
710
 
 
 
 
 
83
  # ๋ชจ๋ธ/ํ”„๋กœ์„ธ์„œ ๋กœ๋”ฉ
84
  ##############################################################################
85
  MAX_CONTENT_CHARS = 4000
86
+ model_id = os.getenv("MODEL_ID", "VIDraft/Gemma3-R1945-27B")
87
 
88
  processor = AutoProcessor.from_pretrained(model_id, padding_side="left")
89
  model = Gemma3ForConditionalGeneration.from_pretrained(
 
142
  page_text = page.extract_text() or ""
143
  page_text = page_text.strip()
144
  if page_text:
145
+ # ํŽ˜์ด์ง€๋ณ„ ์ตœ๋Œ€์น˜ ์ž˜๋ผ์„œ ์‚ฌ์šฉ
146
  if len(page_text) > MAX_CONTENT_CHARS // max_pages:
147
  page_text = page_text[:MAX_CONTENT_CHARS // max_pages] + "...(truncated)"
148
  text_chunks.append(f"## Page {page_num+1}\n\n{page_text}\n")
 
278
  elif part.strip():
279
  content.append({"type": "text", "text": part.strip()})
280
  else:
281
+ # ๋นˆ ๋ฌธ์ž์—ด๋„ content์— ์ถ”๊ฐ€๋  ์ˆ˜ ์žˆ์œผ๋ฏ€๋กœ ํ•„์š”ํ•œ ๊ฒฝ์šฐ ์กฐ์ •
282
  if isinstance(part, str) and part != "<image>":
283
  content.append({"type": "text", "text": part})
284
  return content
 
331
 
332
  if "<image>" in message["text"] and image_files:
333
  interleaved_content = process_interleaved_images({"text": message["text"], "files": image_files})
334
+ # ์ด๋ฏธ text ๋ณธ๋ฌธ์— <image>๊ฐ€ ํฌํ•จ๋œ ๊ฒฝ์šฐ, ์ค‘๋ณต ์ฒ˜๋ฆฌ๋ฅผ ํ”ผํ•˜๊ธฐ ์œ„ํ•ด ํ•„์š” ์‹œ ์กฐ์ •
335
  if content_list and content_list[0]["type"] == "text":
336
  content_list = content_list[1:]
337
  return interleaved_content + content_list
 
391
  try:
392
  combined_system_msg = ""
393
 
394
+ # ๋‚ด๋ถ€์ ์œผ๋กœ๋งŒ ์‚ฌ์šฉ (UI์—์„œ๋Š” ๋ณด์ด์ง€ ์•Š์Œ)
395
  if system_prompt.strip():
396
  combined_system_msg += f"[System Prompt]\n{system_prompt.strip()}\n\n"
397
 
 
402
  logger.info(f"[Auto WebSearch Keyword] {ws_query!r}")
403
  ws_result = do_web_search(ws_query)
404
  combined_system_msg += f"[Search top-20 Full Items Based on user prompt]\n{ws_result}\n\n"
405
+ # >>> ์ถ”๊ฐ€๋œ ์•ˆ๋‚ด ๋ฌธ๊ตฌ (๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ์˜ link ๋“ฑ ์ถœ์ฒ˜๋ฅผ ํ™œ์šฉ)
406
+ combined_system_msg += "[์ฐธ๊ณ : ์œ„ ๊ฒ€์ƒ‰๊ฒฐ๊ณผ ๋‚ด์šฉ๊ณผ link๋ฅผ ์ถœ์ฒ˜๋กœ ์ธ์šฉํ•˜์—ฌ ๋‹ต๋ณ€ํ•ด ์ฃผ์„ธ์š”.]\n\n"
407
  else:
408
  combined_system_msg += "[No valid keywords found, skipping WebSearch]\n\n"
409
 
 
473
  [
474
  {
475
  "text": "๋‘ PDF ํŒŒ์ผ ๋‚ด์šฉ์„ ๋น„๊ตํ•˜๋ผ.",
 
476
  "files": [
477
  "assets/additional-examples/before.pdf",
478
  "assets/additional-examples/after.pdf",
 
567
 
568
 
569
  ##############################################################################
570
+ # Gradio UI (Blocks) ๊ตฌ์„ฑ (์ขŒ์ธก ์‚ฌ์ด๋“œ ๋ฉ”๋‰ด ์—†์ด ์ „์ฒดํ™”๋ฉด ์ฑ„ํŒ…)
571
  ##############################################################################
572
  css = """
573
  body {
 
589
  color: #333;
590
  text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.2);
591
  }
592
+ .fillable {
593
+ width: 95% !important;
594
  max-width: unset !important;
595
  }
596
  #examples_container {
 
600
  #examples_row {
601
  justify-content: center;
602
  }
 
 
 
 
 
 
603
  button, .btn {
604
  background: linear-gradient(90deg, #ff8a00, #e52e71);
605
  border: none;
 
618
  """
619
 
620
  title_html = """
621
+ <h1 align="center" style="margin-bottom: 0.2em; font-size: 1.6em;"> ๐Ÿค— Gemma3-R1945-27B </h1>
622
  <p align="center" style="font-size:1.1em; color:#555;">
623
+ โœ…Agentic AI Platform โœ…Reasoning & Uncensored โœ…Multimodal & VLM โœ…Deep-Research & RAG <br>
624
+ Operates on an โœ…'NVIDIA A100 GPU' as an independent local server, enhancing security and preventing information leakage.<br>
625
+ @Based by 'MS Gemma-3-27b' / @Powered by 'MOUSE-II'(VIDRAFT)
626
  </p>
627
  """
628
 
629
+ with gr.Blocks(css=css, title="Gemma3-R1945-27B") as demo:
630
  gr.Markdown(title_html)
631
 
632
+ # ์›น์„œ์น˜ ์˜ต์…˜์€ ํ™”๋ฉด์— ํ‘œ์‹œ (ํ•˜์ง€๋งŒ ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ, ํ† ํฐ ์Šฌ๋ผ์ด๋” ๋“ฑ์€ ๊ฐ์ถค)
633
+ web_search_checkbox = gr.Checkbox(
634
+ label="Use Web Search (์ž๋™ ํ‚ค์›Œ๋“œ ์ถ”์ถœ)",
635
+ value=False
636
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
637
 
638
+ # ๋‚ด๋ถ€์ ์œผ๋กœ ์“ฐ์ด์ง€๋งŒ ํ™”๋ฉด์—๋Š” ๋…ธ์ถœ๋˜์ง€ ์•Š๋„๋ก ์„ค์ •
639
+ system_prompt_box = gr.Textbox(
640
+ lines=3,
641
+ value="You are a deep thinking AI, you may use extremely long chains of thought to deeply consider the problem and deliberate with yourself via systematic reasoning processes to help come to a correct solution prior to answering. You should enclose your thoughts and internal monologue inside tags, and then provide your solution or response to the problem. Please answer in Korean.You have the ability to read English sources, but you **must always speak in Korean**.Even if the search results are in English, answer in Korean.",
642
+ visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
643
+ )
644
+
645
+ max_tokens_slider = gr.Slider(
646
+ label="Max New Tokens",
647
+ minimum=100,
648
+ maximum=8000,
649
+ step=50,
650
+ value=1000,
651
+ visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
652
+ )
653
+
654
+ web_search_text = gr.Textbox(
655
+ lines=1,
656
+ label="(Unused) Web Search Query",
657
+ placeholder="No direct input needed",
658
+ visible=False # ํ™”๋ฉด์—์„œ ๊ฐ์ถค
659
+ )
660
+
661
+ # ์ฑ„ํŒ… ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
662
+ chat = gr.ChatInterface(
663
+ fn=run,
664
+ type="messages",
665
+ chatbot=gr.Chatbot(type="messages", scale=1, allow_tags=["image"]),
666
+ textbox=gr.MultimodalTextbox(
667
+ file_types=[
668
+ ".webp", ".png", ".jpg", ".jpeg", ".gif",
669
+ ".mp4", ".csv", ".txt", ".pdf"
670
+ ],
671
+ file_count="multiple",
672
+ autofocus=True
673
+ ),
674
+ multimodal=True,
675
+ additional_inputs=[
676
+ system_prompt_box,
677
+ max_tokens_slider,
678
+ web_search_checkbox,
679
+ web_search_text,
680
+ ],
681
+ stop_btn=False,
682
+ title='<a href="https://discord.gg/openfreeai" target="_blank">https://discord.gg/openfreeai</a>',
683
+ examples=examples,
684
+ run_examples_on_click=False,
685
+ cache_examples=False,
686
+ css_paths=None,
687
+ delete_cache=(1800, 1800),
688
+ )
689
+
690
+ # ์˜ˆ์ œ ์„น์…˜ - ์ด๋ฏธ ChatInterface์— examples๊ฐ€ ์„ค์ •๋˜์–ด ์žˆ์œผ๋ฏ€๋กœ ์—ฌ๊ธฐ์„œ๋Š” ์„ค๋ช…๋งŒ ํ‘œ์‹œ
691
  with gr.Row(elem_id="examples_row"):
692
  with gr.Column(scale=12, elem_id="examples_container"):
693
  gr.Markdown("### Example Inputs (click to load)")
 
 
 
 
 
694
 
 
 
 
 
695
 
696
+ if __name__ == "__main__":
697
+ # ๋กœ์ปฌ์—์„œ๏ฟฝ๏ฟฝ๏ฟฝ ์‹คํ–‰ ์‹œ
698
+ demo.launch()