hsuwill000 commited on
Commit
d7d5739
·
verified ·
1 Parent(s): 3e39aa5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -1
app.py CHANGED
@@ -13,6 +13,14 @@ hf_hub.snapshot_download(model_id, local_dir=model_path)
13
  pipe = ov_genai.LLMPipeline(model_path, "CPU")
14
  #pipe.start_chat()
15
 
 
 
 
 
 
 
 
 
16
  # 建立推論函式:使用 streamer 並回傳 generator 結果
17
  def generate_stream(prompt):
18
  q = queue.Queue()
@@ -47,7 +55,12 @@ with gr.Blocks(css="""
47
  }
48
  """) as demo:
49
  gr.Markdown("## 🧠 OpenVINO Streaming Demo with Gradio Textbox")
50
-
 
 
 
 
 
51
  textbox_input = gr.Textbox(label="Prompt", lines=1, placeholder="Enter prompt here...")
52
  textbox_output = gr.Textbox(label="Output", elem_id="scrollbox", lines=10)
53
 
 
13
  pipe = ov_genai.LLMPipeline(model_path, "CPU")
14
  #pipe.start_chat()
15
 
16
+ def start_chat():
17
+ pipe.start_chat()
18
+ return "✅ 開始對話!"
19
+
20
+ def finish_chat():
21
+ pipe.finish_chat()
22
+ return "🛑 結束對話!"
23
+
24
  # 建立推論函式:使用 streamer 並回傳 generator 結果
25
  def generate_stream(prompt):
26
  q = queue.Queue()
 
55
  }
56
  """) as demo:
57
  gr.Markdown("## 🧠 OpenVINO Streaming Demo with Gradio Textbox")
58
+
59
+ start_btn = gr.Button("開始對話")
60
+ end_btn = gr.Button("結束對話")
61
+ start_btn.click(fn=start_chat, outputs=status_box)
62
+ end_btn.click(fn=finish_chat, outputs=status_box)
63
+
64
  textbox_input = gr.Textbox(label="Prompt", lines=1, placeholder="Enter prompt here...")
65
  textbox_output = gr.Textbox(label="Output", elem_id="scrollbox", lines=10)
66