CMLL's picture
Update app.py
e75e07f verified
raw
history blame
2.59 kB
import gradio as gr
from huggingface_hub import hf_hub_download
import os
import subprocess
# 模型下载链接
model_url = "https://huggingface.co/CMLL/ZhongJing-2-1_8b-GGUF/resolve/main/ZhongJing1_5-1_8b-q4_0.gguf"
# 下载模型函数
def download_model(url, model_dir="models"):
os.makedirs(model_dir, exist_ok=True)
model_path = hf_hub_download(repo_id="CMLL/ZhongJing-2-1_8b-GGUF", filename="ZhongJing1_5-1_8b-q4_0.gguf", local_dir=model_dir)
return model_path
# 安装环境函数
def setup_environment():
if not os.path.exists("llama.cpp"):
os.system("git clone https://github.com/ggerganov/llama.cpp.git")
os.system("cd llama.cpp && mkdir build && cd build && cmake .. && make")
model_path = download_model(model_url)
prompts_dir = "llama.cpp/prompts"
os.makedirs(prompts_dir, exist_ok=True)
with open(os.path.join(prompts_dir, "TcmChat.txt"), "w") as f:
f.write("You are a helpful TCM medical assistant named 仲景中医大语言模型.\n")
return "Environment setup complete. Model downloaded and prompts file created."
# 聊天函数
def chat_with_model(user_input, history):
prompt = f"User: {user_input}\nAssistant:"
with open(os.path.join("llama.cpp/prompts", "TcmChat.txt"), "a") as f:
f.write(prompt + "\n")
# 执行命令并捕获输出
command = f"./llama.cpp/build/bin/main -m models/ZhongJing1_5-1_8b-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r \"User:\" -f llama.cpp/prompts/chat-with-bob.txt"
result = subprocess.run(command, shell=True, capture_output=True, text=True)
# 解析输出
output_lines = result.stdout.split("\n")
response = ""
capture = False
for line in output_lines:
if "User:" in line:
capture = True
if capture:
response += line + "\n"
response = response.strip()
history.append((user_input, response))
return history, history
# 创建 Gradio 界面
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
state = gr.State([])
with gr.Row():
with gr.Column():
setup_btn = gr.Button("Setup Environment")
setup_output = gr.Textbox(label="Setup Output")
with gr.Column():
user_input = gr.Textbox(show_label=False, placeholder="Enter your message...")
submit_btn = gr.Button("Submit")
setup_btn.click(setup_environment, outputs=setup_output)
submit_btn.click(chat_with_model, [user_input, state], [chatbot, state])
if __name__ == "__main__":
demo.launch()