genai_service / schemas.py
ahmed-eisa's picture
talk to web page app
6d73c15
raw
history blame
1.97 kB
from typing import Literal
from datetime import datetime
from typing import Annotated, Literal
from uuid import uuid4
from pydantic import BaseModel, Field, HttpUrl, IPvAnyAddress, PositiveInt,AfterValidator,validate_call
VoicePresets = Literal["v2/en_speaker_1", "v2/en_speaker_9"]
class ModelRequest(BaseModel):
prompt: Annotated[str, Field(min_length=1, max_length=10000)]
class ModelResponse(BaseModel):
request_id: Annotated[str, Field(default_factory=lambda: uuid4().hex)]
ip: Annotated[str, IPvAnyAddress] | None
content: Annotated[str | None, Field(min_length=0, max_length=10000)]
created_at: datetime = datetime.now()
class TextModelRequest(ModelRequest):
model: Literal["gpt-3.5-turbo", "gpt-4o"]
temperature: Annotated[float, Field(ge=0.0, le=1.0, default=0.0)]
class TextModelResponse(ModelResponse):
tokens: Annotated[int|None, Field(ge=0)]
ImageSize = Annotated[tuple[PositiveInt, PositiveInt], "Width and height of an image in pixels"]
SupportedModels = Annotated[
Literal["tinysd", "sd1.5"], "Supported Image Generation Models"
]
@validate_call
def is_square_image(value: ImageSize) -> ImageSize:
if value[0] / value[1] != 1:
raise ValueError("Only square images are supported")
if value[0] not in [512, 1024]:
raise ValueError(f"Invalid output size: {value} - expected 512 or 1024")
return value
@validate_call
def is_valid_inference_step(
num_inference_steps: int, model: SupportedModels
) -> int:
if model == "tinysd" and num_inference_steps > 2000:
raise ValueError(
"TinySD model cannot have more than 2000 inference steps"
)
return num_inference_steps
class ImageModelRequest(ModelRequest):
model: SupportedModels
output_size: ImageSize
num_inference_steps: Annotated[int, Field(ge=0, le=2000)] = 200
class ImageModelResponse(ModelResponse):
size: ImageSize
url: Annotated[str, HttpUrl] | None = None