Spaces:
Runtime error
Runtime error
| import socket | |
| import subprocess | |
| import gradio as gr | |
| from openai import OpenAI | |
| def get_local_ip(): | |
| # 建立一個 UDP socket,連到外部伺服器(不會真的發送資料) | |
| s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) | |
| try: | |
| # 這裡用 Google 的公共 DNS IP 來確保路徑有效 | |
| s.connect(("8.8.8.8", 80)) | |
| ip = s.getsockname()[0] | |
| except Exception: | |
| ip = "127.0.0.1" | |
| finally: | |
| s.close() | |
| return ip | |
| print("本機 IP:", get_local_ip()) | |
| # ✅ 設定 base URL 連接本地 llama.cpp API | |
| client = OpenAI( | |
| base_url="http://0.0.0.0:8000/v1", | |
| api_key="sk-local", # llama.cpp 不檢查內容,只要有就行 | |
| timeout=600 | |
| ) | |
| # ✅ 回應函式(改成 stream 模式) | |
| def respond( | |
| message, | |
| history: list[tuple[str, str]], | |
| system_message, | |
| max_tokens, | |
| temperature, | |
| top_p, | |
| ): | |
| messages = [{"role": "system", "content": system_message}] | |
| for user, assistant in history: | |
| if user: | |
| messages.append({"role": "user", "content": user}) | |
| if assistant: | |
| messages.append({"role": "assistant", "content": assistant}) | |
| messages.append({"role": "user", "content": message}) | |
| try: | |
| # 🔹 修改 1: 開啟 stream 模式 | |
| stream = client.chat.completions.create( | |
| model="qwen3", # ⚠️ 替換成你 llama.cpp 載入的模型名稱 | |
| messages=messages, | |
| max_tokens=max_tokens, | |
| temperature=temperature, | |
| top_p=top_p, | |
| stream=True, | |
| ) | |
| output = "" | |
| # 🔹 修改 2: 逐步處理流式回應 | |
| for chunk in stream: | |
| delta = chunk.choices[0].delta.content or "" | |
| output += delta | |
| yield output # ✅ 即時回傳給 Gradio | |
| except Exception as e: | |
| print(f"[Error] {e}") | |
| yield "⚠️ Llama.cpp server 沒有回應,請稍後再試。" | |
| # ✅ Gradio 介面(修改 3: 啟用 generator) | |
| demo = gr.ChatInterface( | |
| respond, | |
| additional_inputs=[ | |
| gr.Textbox(value="You are a friendly assistant.", label="System message"), | |
| gr.Slider(minimum=1, maximum=4096, value=1024, step=1, label="Max new tokens"), | |
| gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"), | |
| gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"), | |
| ], | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |