File size: 1,705 Bytes
87daab4
 
 
 
 
 
22d26a9
 
87daab4
22d26a9
87daab4
 
 
 
 
 
 
22d26a9
87daab4
 
 
 
22d26a9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87daab4
 
 
551db13
 
22d26a9
551db13
22d26a9
551db13
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import gradio as gr
import matplotlib.pyplot as plt
import io
import numpy as np
import base64
from PIL import Image
import requests
import json

# 将图像转换为 base64,以便在 gradio 中显示
def get_image_data(plt):
    buf = io.BytesIO()
    plt.savefig(buf, format='PNG')
    buf.seek(0)
    img = Image.open(buf)
    return img

# 执行 Python 代码并生成图像
def execute_code(code):
    exec(code)
    return get_image_data(plt)

def gpt_inference(base_url, model, openai_key, prompt):
    
    newprompt = f'Write Python code that does the following: \n\n{prompt}\n\nNote, the code is going to be executed in a Jupyter Python kernel.\n\nLast instruction, and this is the most important, just return code. No other outputs, as your full response will directly be executed in the kernel.'
    
    headers = {
        'Content-Type': 'application/json',
        'Authorization': f'Bearer {openai_key}'
    }

    data = {
      "model": model,
      "messages": [
        {
          "role": "system",
          "content": "You are a helpful assistant."
        },
        {
          "role": "user",
          "content": newprompt
        }
      ]
    }

    response = requests.post(f"{base_url}/v1/chat/completions", headers=headers, data=json.dumps(data))
    response_json = response.json()
    code = response_json['choices'][0]['message']['content'].strip()
    img = execute_code(code)
    return img

iface = gr.Interface(
    fn=gpt_inference, 
    inputs=["text", gr.inputs.Dropdown(choices=["gpt3.5-turbo", "gpt4"], label="Model"), "text", "text"], 
    outputs=gr.outputs.Image(type="pil"),
    input_labels=["Base URL", "Model", "OpenAI Key","Prompt"]
)
iface.launch()