|
import gradio as gr |
|
|
|
import os |
|
import json |
|
import pandas as pd |
|
|
|
import vercel_ai |
|
client = vercel_ai.Client() |
|
|
|
prompt_set_list = {} |
|
for prompt_file in os.listdir("prompt_set"): |
|
key = prompt_file |
|
if '.csv' in key: |
|
df = pd.read_csv("prompt_set/" + prompt_file) |
|
prompt_dict = dict(zip(df['act'], df['prompt'])) |
|
else: |
|
with open("prompt_set/" + prompt_file, encoding='utf-8') as f: |
|
ds = json.load(f) |
|
prompt_dict = {item["act"]: item["prompt"] for item in ds} |
|
prompt_set_list[key] = prompt_dict |
|
|
|
with gr.Blocks() as demo: |
|
chatbot = gr.Chatbot([], label='AI') |
|
msg = gr.Textbox(value="", label='请输入:') |
|
with gr.Row(): |
|
clear = gr.Button("清空对话", scale=2) |
|
chat_mode = gr.Checkbox(value=True, label='聊天模式', interactive=True, scale=1) |
|
|
|
with gr.Row(): |
|
default_prompt_set = "1 中文提示词.json" |
|
prompt_set_name = gr.Dropdown(prompt_set_list.keys(), value=default_prompt_set, label='提示词集合') |
|
prompt_name = gr.Dropdown(prompt_set_list[default_prompt_set].keys(), label='提示词', min_width=20) |
|
with gr.Row(): |
|
model_name = gr.Dropdown(client.model_ids, value="openai:gpt-3.5-turbo", label='模型') |
|
|
|
def change_prompt_set(prompt_set_name): |
|
return gr.Dropdown.update(choices=list(prompt_set_list[prompt_set_name].keys())) |
|
|
|
def change_prompt(prompt_set_name, prompt_name): |
|
return gr.update(value=prompt_set_list[prompt_set_name][prompt_name]) |
|
|
|
def user(user_message, history = []): |
|
return gr.update(value="", interactive=False), history + [[user_message, None]] |
|
|
|
def bot(history, model_name, system_msg, chat_mode): |
|
history[-1][1] = '' |
|
|
|
messages = [] |
|
for user_message, assistant_message in history[:-1]: |
|
messages.append({"role": "user", "content": user_message}) |
|
messages.append({"role": "assistant", "content": assistant_message}) |
|
messages.append({"role": "user", "content": history[-1][0]}) |
|
|
|
|
|
bot_msg = client.generate(model_name, history[-1][0]) |
|
|
|
for c in bot_msg: |
|
history[-1][1] += c |
|
|
|
yield history |
|
|
|
def empty_chat(): |
|
client = vercel_ai.Client() |
|
return None |
|
response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( |
|
bot, [chatbot, model_name, chat_mode], chatbot |
|
) |
|
prompt_set_name.select(change_prompt_set, prompt_set_name, prompt_name) |
|
prompt_name.select(change_prompt, [prompt_set_name, prompt_name]) |
|
|
|
response.then(lambda: gr.update(interactive=True), None, [msg], queue=False) |
|
clear.click(empty_chat, None, [chatbot], queue=False) |
|
|
|
demo.title = "AI Chat" |
|
demo.queue() |
|
demo.launch() |