File size: 3,288 Bytes
4965971
f91b12b
b767805
4965971
6504035
 
b767805
6504035
 
 
 
b767805
 
6504035
f91b12b
 
 
 
b767805
4965971
fafe777
6504035
b767805
f91b12b
b767805
 
 
 
 
f91b12b
 
 
b767805
 
 
 
 
 
 
 
f91b12b
6504035
b767805
 
 
 
 
 
 
 
 
 
fafe777
f91b12b
 
 
 
 
 
 
b767805
f91b12b
b767805
f91b12b
b767805
f91b12b
 
b767805
f91b12b
 
 
 
 
 
 
6504035
fafe777
 
 
6504035
fafe777
6504035
 
f91b12b
6504035
f91b12b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
from flask import Flask, request, Response, stream_with_context, jsonify
from openai import OpenAI
import requests
import json

app = Flask(__name__)

@app.route('/')
def index():
    return "Hello, this is the root page of your Flask application!"

@app.route('/<path:subpath>', methods=['POST'])
def forward_to_target(subpath):
    try:
        # 构建目标 URL
        target_url = f'https://{subpath}'
        print(f"Target URL: {target_url}")  # 调试信息

        # 获取请求数据
        data = request.json
        print(f"Request data: {data}")  # 调试信息

        # 检查是否是特定路径需要特殊处理
        if 'v1/chat/completions' in subpath:
            auth_header = request.headers.get('Authorization')
            if not auth_header or not auth_header.startswith('Bearer '):
                return jsonify({"error": "Unauthorized"}), 401

            api_key = auth_header.split(" ")[1]
            #target_url = f"https://{subpath.split('/')[0]}"
            target_url = f"https://{subpath.split('/')[0]}/v1"
            '''
            model = data['model']
            messages = data['messages']
            temperature = data.get('temperature', 0.7)  # 默认值0.7
            top_p = data.get('top_p', 1.0)              # 默认值1.0
            n = data.get('n', 1)                        # 默认值1
            stream = data.get('stream', False)          # 默认值False
            functions = data.get('functions', None)     # Functions for function calling
            function_call = data.get('function_call', None)  # Specific function call request
            

            payload = {
                'model': model,
                'messages': messages,
                'temperature': temperature,
                'top_p': top_p,
                'n': n,
                'stream': stream,
                'functions': functions,
                'function_call': function_call
            }
            print(f"Payload: {payload}")  # 调试信息
            '''
            # 创建每个请求的 OpenAI 客户端实例
            client = OpenAI(
                api_key=api_key,
                base_url=target_url
            )
            stream = data.get('stream',False)          # 默认值False
            if stream:
                # 处理流式响应
                def generate():
                    response = client.chat.completions.create(**data)

                    for chunk in response:
                        yield f"data: {json.dumps(chunk)}\n\n"

                return Response(stream_with_context(generate()), content_type='text/event-stream')
            else:
                response = client.chat.completions.create(**data)
                response_dict = response.to_dict()  # 转换为字典
                #print(f"Response: {response_dict}")  # 调试信息
                return jsonify(response_dict)
                

    except requests.exceptions.RequestException as e:
        print(f"RequestException: {e}")  # 调试信息
        return jsonify({"error": str(e)}), 500
    except Exception as e:
        print(f"Exception: {e}")  # 调试信息
        return jsonify({"error": str(e)}), 500


if __name__ == "__main__":
    app.run(host='0.0.0.0', port=4500, threaded=True)