File size: 2,819 Bytes
538e50d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import gradio as gr

import os
import json
import pandas as pd

import vercel_ai
client = vercel_ai.Client()

prompt_set_list = {}
for prompt_file in os.listdir("prompt_set"):
    key = prompt_file
    if '.csv' in key:
        df = pd.read_csv("prompt_set/" + prompt_file)
        prompt_dict = dict(zip(df['act'], df['prompt']))
    else:
        with open("prompt_set/" + prompt_file, encoding='utf-8') as f:
            ds = json.load(f)
        prompt_dict = {item["act"]: item["prompt"] for item in ds}
    prompt_set_list[key] = prompt_dict

with gr.Blocks() as demo:
    chatbot = gr.Chatbot([], label='AI')
    msg = gr.Textbox(value="", label='请输入:')
    with gr.Row():
        clear = gr.Button("清空对话", scale=2)
        chat_mode = gr.Checkbox(value=True, label='聊天模式', interactive=True, scale=1)

    with gr.Row():
        default_prompt_set = "1 中文提示词.json"
        prompt_set_name = gr.Dropdown(prompt_set_list.keys(), value=default_prompt_set, label='提示词集合')
        prompt_name = gr.Dropdown(prompt_set_list[default_prompt_set].keys(), label='提示词', min_width=20)
    with gr.Row():
        model_name = gr.Dropdown(client.model_ids, value="openai:gpt-3.5-turbo", label='模型')

    def change_prompt_set(prompt_set_name):
        return gr.Dropdown.update(choices=list(prompt_set_list[prompt_set_name].keys()))

    def change_prompt(prompt_set_name, prompt_name):
        return gr.update(value=prompt_set_list[prompt_set_name][prompt_name])

    def user(user_message, history = []):
        return gr.update(value="", interactive=False), history + [[user_message, None]]

    def bot(history, model_name, system_msg, chat_mode):
        history[-1][1] = ''

        messages = []         
        for user_message, assistant_message in history[:-1]:
            messages.append({"role": "user", "content": user_message})
            messages.append({"role": "assistant", "content": assistant_message})
        messages.append({"role": "user", "content": history[-1][0]})
        # print(messages)

        bot_msg = client.generate(model_name, history[-1][0])

        for c in bot_msg:
            history[-1][1] += c
            # print(c, flush=True, end='')
            yield history

    def empty_chat():
        client = vercel_ai.Client()
        return None
    response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
        bot, [chatbot, model_name, chat_mode], chatbot
    )
    prompt_set_name.select(change_prompt_set, prompt_set_name, prompt_name)
    prompt_name.select(change_prompt, [prompt_set_name, prompt_name])

    response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)
    clear.click(empty_chat, None, [chatbot], queue=False)

demo.title = "AI Chat"
demo.queue()
demo.launch()