Spaces:
Running
Running
File size: 1,913 Bytes
9963145 0185608 9963145 d12a6b6 9963145 0185608 9963145 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
from pydantic import BaseModel, Field
from typing import List, Optional, Union, Dict, Any
# OpenAI Models
class OpenAIChatMessage(BaseModel):
role: str
content: Union[str, List[Dict[str, Any]]]
reasoning_content: Optional[str] = None
class OpenAIChatCompletionRequest(BaseModel):
model: str
messages: List[OpenAIChatMessage]
stream: bool = False
temperature: Optional[float] = None
top_p: Optional[float] = None
max_tokens: Optional[int] = None
stop: Optional[Union[str, List[str]]] = None
frequency_penalty: Optional[float] = None
presence_penalty: Optional[float] = None
n: Optional[int] = None
seed: Optional[int] = None
response_format: Optional[Dict[str, Any]] = None
class Config:
extra = "allow" # Allow additional fields not explicitly defined
class OpenAIChatCompletionChoice(BaseModel):
index: int
message: OpenAIChatMessage
finish_reason: Optional[str] = None
class OpenAIChatCompletionResponse(BaseModel):
id: str
object: str
created: int
model: str
choices: List[OpenAIChatCompletionChoice]
class OpenAIDelta(BaseModel):
content: Optional[str] = None
reasoning_content: Optional[str] = None
class OpenAIChatCompletionStreamChoice(BaseModel):
index: int
delta: OpenAIDelta
finish_reason: Optional[str] = None
class OpenAIChatCompletionStreamResponse(BaseModel):
id: str
object: str
created: int
model: str
choices: List[OpenAIChatCompletionStreamChoice]
# Gemini Models
class GeminiPart(BaseModel):
text: str
class GeminiContent(BaseModel):
role: str
parts: List[GeminiPart]
class GeminiRequest(BaseModel):
contents: List[GeminiContent]
class GeminiCandidate(BaseModel):
content: GeminiContent
finish_reason: Optional[str] = None
index: int
class GeminiResponse(BaseModel):
candidates: List[GeminiCandidate] |