hsuwill000 commited on
Commit
6587c04
·
verified ·
1 Parent(s): a211782

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -21
app.py CHANGED
@@ -19,33 +19,26 @@ tokenizer.set_chat_template(tokenizer.chat_template)
19
 
20
 
21
  def generate_response(prompt):
22
- full_response = ""
23
- tokenpersec = "計算中..."
24
- global response_box # 宣告 response_box 為全域變數
25
-
26
- def streamer(subword):
27
- nonlocal full_response
28
- full_response += subword
29
- response_box.update(value=full_response) # 更新 Textbox 的內容
30
- return ov_genai.StreamingStatus.RUNNING
31
-
32
  try:
33
- # 使用流式生成
34
- generated = pipe.generate(prompt, streamer=streamer, max_new_tokens=100)
35
- tokenpersec = f'{generated.perf_metrics.get_throughput().mean:.2f}' # 恢復原本計算 tokenpersec 的方式
36
 
37
- return tokenpersec, full_response
38
  except Exception as e:
39
  return "發生錯誤", "發生錯誤", f"生成回應時發生錯誤:{e}"
40
-
41
 
42
  # 建立 Gradio 介面
43
- with gr.Blocks() as demo:
44
- input_box = gr.Textbox(lines=5, label="輸入提示 (Prompt)")
45
- response_box = gr.Textbox(label="回應", streaming=True) # 啟用 streaming
46
- speed_box = gr.Textbox(label="tokens/sec")
47
-
48
- input_box.change(fn=generate_response, inputs=input_box, outputs=[speed_box, response_box]) # 將輸入框的改變連接到 generate_response 函數
 
 
 
 
49
 
50
  if __name__ == "__main__":
51
  demo.launch()
 
19
 
20
 
21
  def generate_response(prompt):
 
 
 
 
 
 
 
 
 
 
22
  try:
23
+ generated = pipe.generate([prompt], max_length=1024)
24
+ tokenpersec=f'{generated.perf_metrics.get_throughput().mean:.2f}'
 
25
 
26
+ return tokenpersec, generated
27
  except Exception as e:
28
  return "發生錯誤", "發生錯誤", f"生成回應時發生錯誤:{e}"
29
+
30
 
31
  # 建立 Gradio 介面
32
+ demo = gr.Interface(
33
+ fn=generate_response,
34
+ inputs=gr.Textbox(lines=1, label="輸入提示 (Prompt)"),
35
+ outputs=[
36
+ gr.Textbox(label="tokens/sec"),
37
+ gr.Textbox(label="回應")
38
+ ],
39
+ title="Qwen3-0.6B-int4-ov ",
40
+ description="基於 Qwen3-0.6B-int4-ov 推理應用,支援思考過程分離與 GUI。"
41
+ )
42
 
43
  if __name__ == "__main__":
44
  demo.launch()