File size: 2,113 Bytes
d0dd276
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
from typing import List, Dict, Optional, Union, Literal, Any
from pydantic import BaseModel, Field

# openAI 请求
class ChatCompletionRequest(BaseModel):
    model: str
    messages: List[Dict[str, Any]]
    temperature: float = 0.7
    top_p: Optional[float] = None
    top_k: Optional[float] = None
    n: int = 1
    stream: bool = False
    stop: Optional[Union[str, List[str]]] = None
    max_tokens: Optional[int] = None
    presence_penalty: Optional[float] = 0.0
    frequency_penalty: Optional[float] = 0.0
    seed: Optional[int] = None
    logprobs: Optional[int] = None
    response_logprobs: Optional[bool] = None
    thinking_budget: Optional[int] = None
    reasoning_effort : Optional[str] = None
    # 函数调用
    tools: Optional[List[Dict[str, Any]]] = None
    tool_choice: Optional[Union[Literal["none", "auto"], Dict[str, Any]]] = "auto"

# gemini 请求
class ChatRequestGemini(BaseModel):
    contents: List[Dict[str, Any]]
    system_instruction: Optional[Dict[str, Any]]= None
    systemInstruction: Optional[Dict[str, Any]]= None
    safetySettings: Optional[List[Dict[str, Any]]] = None
    generationConfig: Optional[Dict[str, Any]] = None
    tools: Optional[List[Dict[str, Any]]] = None

# AI模型请求包装
class AIRequest(BaseModel):
    payload: Optional[ChatRequestGemini] = None
    model: Optional[str] = None
    stream: bool = False
    format_type: Optional[str] = "gemini"

class Usage(BaseModel):
    prompt_tokens: int = 0
    completion_tokens: int = 0
    total_tokens: int = 0

class ChatCompletionResponse(BaseModel):
    id: str
    object: Literal["chat.completion"]
    created: int
    model: str
    choices: List[Any]
    usage: Usage = Field(default_factory=Usage)

class ErrorResponse(BaseModel):
    message: str
    type: str
    param: Optional[str] = None
    code: Optional[str] = None

class ModelList(BaseModel):
    object: str = "list"
    data: List[Dict[str, Any]]

class ChatResponseGemini(BaseModel):
    candidates: Optional[List[Any]] = None
    promptFeedback: Optional[Any] = None
    usageMetadata: Optional[Dict[str, int]] = None