diff --git a/ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt b/ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt new file mode 100644 index 0000000000000000000000000000000000000000..7c3f2b8802a1a4458f2076ae1fb2b259c7227a7f --- /dev/null +++ b/ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/exp_avg_sq.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7daeb05f2108583059f06c773407e45d3127b2cc8fb034af671fcaa23d5af3a7 +size 33555627 diff --git a/ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/fp32.pt b/ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/fp32.pt new file mode 100644 index 0000000000000000000000000000000000000000..bbf04ad80d2d2d6759dd404674beac997c4fbfcc --- /dev/null +++ b/ckpts/universal/global_step40/zero/18.mlp.dense_h_to_4h_swiglu.weight/fp32.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ad139000d9de29cc3a4a3a97941d0e8696bda1f464c744a140168c3c0f9acd2 +size 33555533 diff --git a/ckpts/universal/global_step40/zero/4.attention.query_key_value.weight/exp_avg_sq.pt b/ckpts/universal/global_step40/zero/4.attention.query_key_value.weight/exp_avg_sq.pt new file mode 100644 index 0000000000000000000000000000000000000000..6dc29d489475779ef6e5590b3e2b37f01bb430d4 --- /dev/null +++ b/ckpts/universal/global_step40/zero/4.attention.query_key_value.weight/exp_avg_sq.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fcb850c6c2a9a94952078771e93da0384ef661a5745b6fdbc9e597189563de75 +size 50332843 diff --git a/venv/lib/python3.10/site-packages/__pycache__/__editable___lm_eval_0_4_2_finder.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/__editable___lm_eval_0_4_2_finder.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..05577488734336ca9e0e947a8f79587f0c879cd2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/__editable___lm_eval_0_4_2_finder.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/__pycache__/isympy.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/isympy.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0b8bfb1780d2ae2e1551ca6123746d1052b6d1e7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/isympy.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9bfa90de33509b7b2e2dc7578e6d02d1e68f230d Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/six.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/__pycache__/sqlitedict.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/sqlitedict.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25efbbff2ee7f855ed21992a7e28a06fab0471a7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/sqlitedict.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5210d21c4b3369ea27c3f857b606cefe80d26a1a Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/threadpoolctl.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc b/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31e14389aefe78b8ddf1b7b4ef7fffa0d1bee224 Binary files /dev/null and b/venv/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/numpy.libs/libquadmath-96973f99.so.0.0.0 b/venv/lib/python3.10/site-packages/numpy.libs/libquadmath-96973f99.so.0.0.0 new file mode 100644 index 0000000000000000000000000000000000000000..05e193bdd18b0edbec3774904c97407a4ff0afbe Binary files /dev/null and b/venv/lib/python3.10/site-packages/numpy.libs/libquadmath-96973f99.so.0.0.0 differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__init__.py b/venv/lib/python3.10/site-packages/pydantic/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8880b4f11cf370ca56cae9d0bce30c6393e92b1d --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/__init__.py @@ -0,0 +1,400 @@ +import typing + +from ._migration import getattr_migration +from .version import VERSION + +if typing.TYPE_CHECKING: + # import of virtually everything is supported via `__getattr__` below, + # but we need them here for type checking and IDE support + import pydantic_core + from pydantic_core.core_schema import ( + FieldSerializationInfo, + SerializationInfo, + SerializerFunctionWrapHandler, + ValidationInfo, + ValidatorFunctionWrapHandler, + ) + + from . import dataclasses + from ._internal._generate_schema import GenerateSchema as GenerateSchema + from .aliases import AliasChoices, AliasGenerator, AliasPath + from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler + from .config import ConfigDict, with_config + from .errors import * + from .fields import Field, PrivateAttr, computed_field + from .functional_serializers import ( + PlainSerializer, + SerializeAsAny, + WrapSerializer, + field_serializer, + model_serializer, + ) + from .functional_validators import ( + AfterValidator, + BeforeValidator, + InstanceOf, + PlainValidator, + SkipValidation, + WrapValidator, + field_validator, + model_validator, + ) + from .json_schema import WithJsonSchema + from .main import * + from .networks import * + from .type_adapter import TypeAdapter + from .types import * + from .validate_call_decorator import validate_call + from .warnings import PydanticDeprecatedSince20, PydanticDeprecatedSince26, PydanticDeprecationWarning + + # this encourages pycharm to import `ValidationError` from here, not pydantic_core + ValidationError = pydantic_core.ValidationError + from .deprecated.class_validators import root_validator, validator + from .deprecated.config import BaseConfig, Extra + from .deprecated.tools import * + from .root_model import RootModel + +__version__ = VERSION +__all__ = ( + # dataclasses + 'dataclasses', + # functional validators + 'field_validator', + 'model_validator', + 'AfterValidator', + 'BeforeValidator', + 'PlainValidator', + 'WrapValidator', + 'SkipValidation', + 'InstanceOf', + # JSON Schema + 'WithJsonSchema', + # deprecated V1 functional validators, these are imported via `__getattr__` below + 'root_validator', + 'validator', + # functional serializers + 'field_serializer', + 'model_serializer', + 'PlainSerializer', + 'SerializeAsAny', + 'WrapSerializer', + # config + 'ConfigDict', + 'with_config', + # deprecated V1 config, these are imported via `__getattr__` below + 'BaseConfig', + 'Extra', + # validate_call + 'validate_call', + # errors + 'PydanticErrorCodes', + 'PydanticUserError', + 'PydanticSchemaGenerationError', + 'PydanticImportError', + 'PydanticUndefinedAnnotation', + 'PydanticInvalidForJsonSchema', + # fields + 'Field', + 'computed_field', + 'PrivateAttr', + # alias + 'AliasChoices', + 'AliasGenerator', + 'AliasPath', + # main + 'BaseModel', + 'create_model', + # network + 'AnyUrl', + 'AnyHttpUrl', + 'FileUrl', + 'HttpUrl', + 'FtpUrl', + 'WebsocketUrl', + 'AnyWebsocketUrl', + 'UrlConstraints', + 'EmailStr', + 'NameEmail', + 'IPvAnyAddress', + 'IPvAnyInterface', + 'IPvAnyNetwork', + 'PostgresDsn', + 'CockroachDsn', + 'AmqpDsn', + 'RedisDsn', + 'MongoDsn', + 'KafkaDsn', + 'NatsDsn', + 'MySQLDsn', + 'MariaDBDsn', + 'ClickHouseDsn', + 'validate_email', + # root_model + 'RootModel', + # deprecated tools, these are imported via `__getattr__` below + 'parse_obj_as', + 'schema_of', + 'schema_json_of', + # types + 'Strict', + 'StrictStr', + 'conbytes', + 'conlist', + 'conset', + 'confrozenset', + 'constr', + 'StringConstraints', + 'ImportString', + 'conint', + 'PositiveInt', + 'NegativeInt', + 'NonNegativeInt', + 'NonPositiveInt', + 'confloat', + 'PositiveFloat', + 'NegativeFloat', + 'NonNegativeFloat', + 'NonPositiveFloat', + 'FiniteFloat', + 'condecimal', + 'condate', + 'UUID1', + 'UUID3', + 'UUID4', + 'UUID5', + 'FilePath', + 'DirectoryPath', + 'NewPath', + 'Json', + 'Secret', + 'SecretStr', + 'SecretBytes', + 'StrictBool', + 'StrictBytes', + 'StrictInt', + 'StrictFloat', + 'PaymentCardNumber', + 'ByteSize', + 'PastDate', + 'FutureDate', + 'PastDatetime', + 'FutureDatetime', + 'AwareDatetime', + 'NaiveDatetime', + 'AllowInfNan', + 'EncoderProtocol', + 'EncodedBytes', + 'EncodedStr', + 'Base64Encoder', + 'Base64Bytes', + 'Base64Str', + 'Base64UrlBytes', + 'Base64UrlStr', + 'GetPydanticSchema', + 'Tag', + 'Discriminator', + 'JsonValue', + # type_adapter + 'TypeAdapter', + # version + '__version__', + 'VERSION', + # warnings + 'PydanticDeprecatedSince20', + 'PydanticDeprecatedSince26', + 'PydanticDeprecationWarning', + # annotated handlers + 'GetCoreSchemaHandler', + 'GetJsonSchemaHandler', + # generate schema from ._internal + 'GenerateSchema', + # pydantic_core + 'ValidationError', + 'ValidationInfo', + 'SerializationInfo', + 'ValidatorFunctionWrapHandler', + 'FieldSerializationInfo', + 'SerializerFunctionWrapHandler', + 'OnErrorOmit', +) + +# A mapping of {: (package, )} defining dynamic imports +_dynamic_imports: 'dict[str, tuple[str, str]]' = { + 'dataclasses': (__package__, '__module__'), + # functional validators + 'field_validator': (__package__, '.functional_validators'), + 'model_validator': (__package__, '.functional_validators'), + 'AfterValidator': (__package__, '.functional_validators'), + 'BeforeValidator': (__package__, '.functional_validators'), + 'PlainValidator': (__package__, '.functional_validators'), + 'WrapValidator': (__package__, '.functional_validators'), + 'SkipValidation': (__package__, '.functional_validators'), + 'InstanceOf': (__package__, '.functional_validators'), + # JSON Schema + 'WithJsonSchema': (__package__, '.json_schema'), + # functional serializers + 'field_serializer': (__package__, '.functional_serializers'), + 'model_serializer': (__package__, '.functional_serializers'), + 'PlainSerializer': (__package__, '.functional_serializers'), + 'SerializeAsAny': (__package__, '.functional_serializers'), + 'WrapSerializer': (__package__, '.functional_serializers'), + # config + 'ConfigDict': (__package__, '.config'), + 'with_config': (__package__, '.config'), + # validate call + 'validate_call': (__package__, '.validate_call_decorator'), + # errors + 'PydanticErrorCodes': (__package__, '.errors'), + 'PydanticUserError': (__package__, '.errors'), + 'PydanticSchemaGenerationError': (__package__, '.errors'), + 'PydanticImportError': (__package__, '.errors'), + 'PydanticUndefinedAnnotation': (__package__, '.errors'), + 'PydanticInvalidForJsonSchema': (__package__, '.errors'), + # fields + 'Field': (__package__, '.fields'), + 'computed_field': (__package__, '.fields'), + 'PrivateAttr': (__package__, '.fields'), + # alias + 'AliasChoices': (__package__, '.aliases'), + 'AliasGenerator': (__package__, '.aliases'), + 'AliasPath': (__package__, '.aliases'), + # main + 'BaseModel': (__package__, '.main'), + 'create_model': (__package__, '.main'), + # network + 'AnyUrl': (__package__, '.networks'), + 'AnyHttpUrl': (__package__, '.networks'), + 'FileUrl': (__package__, '.networks'), + 'HttpUrl': (__package__, '.networks'), + 'FtpUrl': (__package__, '.networks'), + 'WebsocketUrl': (__package__, '.networks'), + 'AnyWebsocketUrl': (__package__, '.networks'), + 'UrlConstraints': (__package__, '.networks'), + 'EmailStr': (__package__, '.networks'), + 'NameEmail': (__package__, '.networks'), + 'IPvAnyAddress': (__package__, '.networks'), + 'IPvAnyInterface': (__package__, '.networks'), + 'IPvAnyNetwork': (__package__, '.networks'), + 'PostgresDsn': (__package__, '.networks'), + 'CockroachDsn': (__package__, '.networks'), + 'AmqpDsn': (__package__, '.networks'), + 'RedisDsn': (__package__, '.networks'), + 'MongoDsn': (__package__, '.networks'), + 'KafkaDsn': (__package__, '.networks'), + 'NatsDsn': (__package__, '.networks'), + 'MySQLDsn': (__package__, '.networks'), + 'MariaDBDsn': (__package__, '.networks'), + 'ClickHouseDsn': (__package__, '.networks'), + 'validate_email': (__package__, '.networks'), + # root_model + 'RootModel': (__package__, '.root_model'), + # types + 'Strict': (__package__, '.types'), + 'StrictStr': (__package__, '.types'), + 'conbytes': (__package__, '.types'), + 'conlist': (__package__, '.types'), + 'conset': (__package__, '.types'), + 'confrozenset': (__package__, '.types'), + 'constr': (__package__, '.types'), + 'StringConstraints': (__package__, '.types'), + 'ImportString': (__package__, '.types'), + 'conint': (__package__, '.types'), + 'PositiveInt': (__package__, '.types'), + 'NegativeInt': (__package__, '.types'), + 'NonNegativeInt': (__package__, '.types'), + 'NonPositiveInt': (__package__, '.types'), + 'confloat': (__package__, '.types'), + 'PositiveFloat': (__package__, '.types'), + 'NegativeFloat': (__package__, '.types'), + 'NonNegativeFloat': (__package__, '.types'), + 'NonPositiveFloat': (__package__, '.types'), + 'FiniteFloat': (__package__, '.types'), + 'condecimal': (__package__, '.types'), + 'condate': (__package__, '.types'), + 'UUID1': (__package__, '.types'), + 'UUID3': (__package__, '.types'), + 'UUID4': (__package__, '.types'), + 'UUID5': (__package__, '.types'), + 'FilePath': (__package__, '.types'), + 'DirectoryPath': (__package__, '.types'), + 'NewPath': (__package__, '.types'), + 'Json': (__package__, '.types'), + 'Secret': (__package__, '.types'), + 'SecretStr': (__package__, '.types'), + 'SecretBytes': (__package__, '.types'), + 'StrictBool': (__package__, '.types'), + 'StrictBytes': (__package__, '.types'), + 'StrictInt': (__package__, '.types'), + 'StrictFloat': (__package__, '.types'), + 'PaymentCardNumber': (__package__, '.types'), + 'ByteSize': (__package__, '.types'), + 'PastDate': (__package__, '.types'), + 'FutureDate': (__package__, '.types'), + 'PastDatetime': (__package__, '.types'), + 'FutureDatetime': (__package__, '.types'), + 'AwareDatetime': (__package__, '.types'), + 'NaiveDatetime': (__package__, '.types'), + 'AllowInfNan': (__package__, '.types'), + 'EncoderProtocol': (__package__, '.types'), + 'EncodedBytes': (__package__, '.types'), + 'EncodedStr': (__package__, '.types'), + 'Base64Encoder': (__package__, '.types'), + 'Base64Bytes': (__package__, '.types'), + 'Base64Str': (__package__, '.types'), + 'Base64UrlBytes': (__package__, '.types'), + 'Base64UrlStr': (__package__, '.types'), + 'GetPydanticSchema': (__package__, '.types'), + 'Tag': (__package__, '.types'), + 'Discriminator': (__package__, '.types'), + 'JsonValue': (__package__, '.types'), + 'OnErrorOmit': (__package__, '.types'), + # type_adapter + 'TypeAdapter': (__package__, '.type_adapter'), + # warnings + 'PydanticDeprecatedSince20': (__package__, '.warnings'), + 'PydanticDeprecatedSince26': (__package__, '.warnings'), + 'PydanticDeprecationWarning': (__package__, '.warnings'), + # annotated handlers + 'GetCoreSchemaHandler': (__package__, '.annotated_handlers'), + 'GetJsonSchemaHandler': (__package__, '.annotated_handlers'), + # generate schema from ._internal + 'GenerateSchema': (__package__, '._internal._generate_schema'), + # pydantic_core stuff + 'ValidationError': ('pydantic_core', '.'), + 'ValidationInfo': ('pydantic_core', '.core_schema'), + 'SerializationInfo': ('pydantic_core', '.core_schema'), + 'ValidatorFunctionWrapHandler': ('pydantic_core', '.core_schema'), + 'FieldSerializationInfo': ('pydantic_core', '.core_schema'), + 'SerializerFunctionWrapHandler': ('pydantic_core', '.core_schema'), + # deprecated, mostly not included in __all__ + 'root_validator': (__package__, '.deprecated.class_validators'), + 'validator': (__package__, '.deprecated.class_validators'), + 'BaseConfig': (__package__, '.deprecated.config'), + 'Extra': (__package__, '.deprecated.config'), + 'parse_obj_as': (__package__, '.deprecated.tools'), + 'schema_of': (__package__, '.deprecated.tools'), + 'schema_json_of': (__package__, '.deprecated.tools'), + 'FieldValidationInfo': ('pydantic_core', '.core_schema'), +} + +_getattr_migration = getattr_migration(__name__) + + +def __getattr__(attr_name: str) -> object: + dynamic_attr = _dynamic_imports.get(attr_name) + if dynamic_attr is None: + return _getattr_migration(attr_name) + + package, module_name = dynamic_attr + + from importlib import import_module + + if module_name == '__module__': + return import_module(f'.{attr_name}', package=package) + else: + module = import_module(module_name, package=package) + return getattr(module, attr_name) + + +def __dir__() -> 'list[str]': + return list(__all__) diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..66c894a9b2c939f19b1d4c4e323f11385ea7097b Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/_migration.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/_migration.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf24d655929c331ec5280dbfdc378e93fbdb4011 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/_migration.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/alias_generators.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/alias_generators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c82b2f028c4dffa3155d5479ee9fedd2597e48f Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/alias_generators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/aliases.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/aliases.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f5bb482e341ee0abb4b63d2a628818ee1c72cc7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/aliases.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/annotated_handlers.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/annotated_handlers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2aaf20626a270f02935b86e1a88e76ffb314d797 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/annotated_handlers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/class_validators.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/class_validators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8670b65d6ed0457c28492e2e18e69df684ab75ac Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/class_validators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/color.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/color.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4dcf01bf81c577131471e1804b9fbb8bad389edb Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/color.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/config.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/config.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c102dbe0759702157d738e5eb5899ef00c8b269b Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/config.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/dataclasses.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/dataclasses.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53fdb6c139d94b7c1cabff3d541f8565970993bd Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/dataclasses.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/datetime_parse.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/datetime_parse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..feb7b84b4a2affad154a4349178b5026d622341f Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/datetime_parse.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/env_settings.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/env_settings.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78c25eb10f70b6e6f415e5aab394ebfe961a61cf Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/env_settings.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/error_wrappers.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/error_wrappers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d580be070a0706ddbca623273186f0371f5571e9 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/error_wrappers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/errors.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/errors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ead369da6a8c1a341fc37d5b7807661ec7387669 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/errors.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/fields.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/fields.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa88243cad97f1fa87f1468e2680f061e0e1e801 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/fields.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/functional_serializers.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/functional_serializers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb069071bec62c5abf9311eb19c731aa98827008 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/functional_serializers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/functional_validators.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/functional_validators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9836b5261170f6f0dcfad24f378291dd6d6435f2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/functional_validators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/generics.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/generics.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3dd54c778a6d9e99bdaa794bb61c66a019fbd1fe Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/generics.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/json.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/json.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..924cfe2e618551f7212752d8a2254cc591d65e5a Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/json.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/json_schema.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/json_schema.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f307562172ac752dc3dd7a7c12b41fde2bc08016 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/json_schema.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/mypy.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/mypy.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4c90c9912d61b02ea01ad30b9508600fef75e1e0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/mypy.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/networks.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/networks.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc16a0946bb7eb3c5b2bfa0647dd058bd62c3531 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/networks.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/parse.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/parse.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ceed54a48f915b54abc05e3ea3bcbab8b7d5f37 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/parse.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/root_model.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/root_model.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0cad7e914d19a10125ac7770d3a20ce0248fcdbb Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/root_model.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/schema.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/schema.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc7e9965d98daf6a9fa77bcab1a54814b9ac60da Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/schema.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/tools.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/tools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c57f5ada4772e2d76668058811994afb5510b3c Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/tools.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/types.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c56df5abb04727ebe40d255b98789b6aa756317 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/types.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/typing.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/typing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17ca4b19f4d16df4ba92d3827675ca5c11dad15d Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/typing.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/utils.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7def5314fe2bfd9312daeeb457dbf6259f06222f Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/utils.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/validate_call_decorator.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/validate_call_decorator.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60c9613a5be74eb477295c980492e166ab848166 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/validate_call_decorator.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/validators.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/validators.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7bb6d0bbf8b10a1d89e9fa8ab3fe0e3d321e3b7f Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/validators.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..629d167ae4675315a9a3852579cc9589cb1f78db Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/__pycache__/warnings.cpython-310.pyc b/venv/lib/python3.10/site-packages/pydantic/__pycache__/warnings.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be0775713de4217734b0d51b6609a9a9358062a6 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pydantic/__pycache__/warnings.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pydantic/_migration.py b/venv/lib/python3.10/site-packages/pydantic/_migration.py new file mode 100644 index 0000000000000000000000000000000000000000..c8478a624efe9a054588be610233d026a8f8fcd0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/_migration.py @@ -0,0 +1,308 @@ +import sys +from typing import Any, Callable, Dict + +from .version import version_short + +MOVED_IN_V2 = { + 'pydantic.utils:version_info': 'pydantic.version:version_info', + 'pydantic.error_wrappers:ValidationError': 'pydantic:ValidationError', + 'pydantic.utils:to_camel': 'pydantic.alias_generators:to_pascal', + 'pydantic.utils:to_lower_camel': 'pydantic.alias_generators:to_camel', + 'pydantic:PyObject': 'pydantic.types:ImportString', + 'pydantic.types:PyObject': 'pydantic.types:ImportString', + 'pydantic.generics:GenericModel': 'pydantic.BaseModel', +} + +DEPRECATED_MOVED_IN_V2 = { + 'pydantic.tools:schema_of': 'pydantic.deprecated.tools:schema_of', + 'pydantic.tools:parse_obj_as': 'pydantic.deprecated.tools:parse_obj_as', + 'pydantic.tools:schema_json_of': 'pydantic.deprecated.tools:schema_json_of', + 'pydantic.json:pydantic_encoder': 'pydantic.deprecated.json:pydantic_encoder', + 'pydantic:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments', + 'pydantic.json:custom_pydantic_encoder': 'pydantic.deprecated.json:custom_pydantic_encoder', + 'pydantic.json:timedelta_isoformat': 'pydantic.deprecated.json:timedelta_isoformat', + 'pydantic.decorator:validate_arguments': 'pydantic.deprecated.decorator:validate_arguments', + 'pydantic.class_validators:validator': 'pydantic.deprecated.class_validators:validator', + 'pydantic.class_validators:root_validator': 'pydantic.deprecated.class_validators:root_validator', + 'pydantic.config:BaseConfig': 'pydantic.deprecated.config:BaseConfig', + 'pydantic.config:Extra': 'pydantic.deprecated.config:Extra', +} + +REDIRECT_TO_V1 = { + f'pydantic.utils:{obj}': f'pydantic.v1.utils:{obj}' + for obj in ( + 'deep_update', + 'GetterDict', + 'lenient_issubclass', + 'lenient_isinstance', + 'is_valid_field', + 'update_not_none', + 'import_string', + 'Representation', + 'ROOT_KEY', + 'smart_deepcopy', + 'sequence_like', + ) +} + + +REMOVED_IN_V2 = { + 'pydantic:ConstrainedBytes', + 'pydantic:ConstrainedDate', + 'pydantic:ConstrainedDecimal', + 'pydantic:ConstrainedFloat', + 'pydantic:ConstrainedFrozenSet', + 'pydantic:ConstrainedInt', + 'pydantic:ConstrainedList', + 'pydantic:ConstrainedSet', + 'pydantic:ConstrainedStr', + 'pydantic:JsonWrapper', + 'pydantic:NoneBytes', + 'pydantic:NoneStr', + 'pydantic:NoneStrBytes', + 'pydantic:Protocol', + 'pydantic:Required', + 'pydantic:StrBytes', + 'pydantic:compiled', + 'pydantic.config:get_config', + 'pydantic.config:inherit_config', + 'pydantic.config:prepare_config', + 'pydantic:create_model_from_namedtuple', + 'pydantic:create_model_from_typeddict', + 'pydantic.dataclasses:create_pydantic_model_from_dataclass', + 'pydantic.dataclasses:make_dataclass_validator', + 'pydantic.dataclasses:set_validation', + 'pydantic.datetime_parse:parse_date', + 'pydantic.datetime_parse:parse_time', + 'pydantic.datetime_parse:parse_datetime', + 'pydantic.datetime_parse:parse_duration', + 'pydantic.error_wrappers:ErrorWrapper', + 'pydantic.errors:AnyStrMaxLengthError', + 'pydantic.errors:AnyStrMinLengthError', + 'pydantic.errors:ArbitraryTypeError', + 'pydantic.errors:BoolError', + 'pydantic.errors:BytesError', + 'pydantic.errors:CallableError', + 'pydantic.errors:ClassError', + 'pydantic.errors:ColorError', + 'pydantic.errors:ConfigError', + 'pydantic.errors:DataclassTypeError', + 'pydantic.errors:DateError', + 'pydantic.errors:DateNotInTheFutureError', + 'pydantic.errors:DateNotInThePastError', + 'pydantic.errors:DateTimeError', + 'pydantic.errors:DecimalError', + 'pydantic.errors:DecimalIsNotFiniteError', + 'pydantic.errors:DecimalMaxDigitsError', + 'pydantic.errors:DecimalMaxPlacesError', + 'pydantic.errors:DecimalWholeDigitsError', + 'pydantic.errors:DictError', + 'pydantic.errors:DurationError', + 'pydantic.errors:EmailError', + 'pydantic.errors:EnumError', + 'pydantic.errors:EnumMemberError', + 'pydantic.errors:ExtraError', + 'pydantic.errors:FloatError', + 'pydantic.errors:FrozenSetError', + 'pydantic.errors:FrozenSetMaxLengthError', + 'pydantic.errors:FrozenSetMinLengthError', + 'pydantic.errors:HashableError', + 'pydantic.errors:IPv4AddressError', + 'pydantic.errors:IPv4InterfaceError', + 'pydantic.errors:IPv4NetworkError', + 'pydantic.errors:IPv6AddressError', + 'pydantic.errors:IPv6InterfaceError', + 'pydantic.errors:IPv6NetworkError', + 'pydantic.errors:IPvAnyAddressError', + 'pydantic.errors:IPvAnyInterfaceError', + 'pydantic.errors:IPvAnyNetworkError', + 'pydantic.errors:IntEnumError', + 'pydantic.errors:IntegerError', + 'pydantic.errors:InvalidByteSize', + 'pydantic.errors:InvalidByteSizeUnit', + 'pydantic.errors:InvalidDiscriminator', + 'pydantic.errors:InvalidLengthForBrand', + 'pydantic.errors:JsonError', + 'pydantic.errors:JsonTypeError', + 'pydantic.errors:ListError', + 'pydantic.errors:ListMaxLengthError', + 'pydantic.errors:ListMinLengthError', + 'pydantic.errors:ListUniqueItemsError', + 'pydantic.errors:LuhnValidationError', + 'pydantic.errors:MissingDiscriminator', + 'pydantic.errors:MissingError', + 'pydantic.errors:NoneIsAllowedError', + 'pydantic.errors:NoneIsNotAllowedError', + 'pydantic.errors:NotDigitError', + 'pydantic.errors:NotNoneError', + 'pydantic.errors:NumberNotGeError', + 'pydantic.errors:NumberNotGtError', + 'pydantic.errors:NumberNotLeError', + 'pydantic.errors:NumberNotLtError', + 'pydantic.errors:NumberNotMultipleError', + 'pydantic.errors:PathError', + 'pydantic.errors:PathNotADirectoryError', + 'pydantic.errors:PathNotAFileError', + 'pydantic.errors:PathNotExistsError', + 'pydantic.errors:PatternError', + 'pydantic.errors:PyObjectError', + 'pydantic.errors:PydanticTypeError', + 'pydantic.errors:PydanticValueError', + 'pydantic.errors:SequenceError', + 'pydantic.errors:SetError', + 'pydantic.errors:SetMaxLengthError', + 'pydantic.errors:SetMinLengthError', + 'pydantic.errors:StrError', + 'pydantic.errors:StrRegexError', + 'pydantic.errors:StrictBoolError', + 'pydantic.errors:SubclassError', + 'pydantic.errors:TimeError', + 'pydantic.errors:TupleError', + 'pydantic.errors:TupleLengthError', + 'pydantic.errors:UUIDError', + 'pydantic.errors:UUIDVersionError', + 'pydantic.errors:UrlError', + 'pydantic.errors:UrlExtraError', + 'pydantic.errors:UrlHostError', + 'pydantic.errors:UrlHostTldError', + 'pydantic.errors:UrlPortError', + 'pydantic.errors:UrlSchemeError', + 'pydantic.errors:UrlSchemePermittedError', + 'pydantic.errors:UrlUserInfoError', + 'pydantic.errors:WrongConstantError', + 'pydantic.main:validate_model', + 'pydantic.networks:stricturl', + 'pydantic:parse_file_as', + 'pydantic:parse_raw_as', + 'pydantic:stricturl', + 'pydantic.tools:parse_file_as', + 'pydantic.tools:parse_raw_as', + 'pydantic.types:ConstrainedBytes', + 'pydantic.types:ConstrainedDate', + 'pydantic.types:ConstrainedDecimal', + 'pydantic.types:ConstrainedFloat', + 'pydantic.types:ConstrainedFrozenSet', + 'pydantic.types:ConstrainedInt', + 'pydantic.types:ConstrainedList', + 'pydantic.types:ConstrainedSet', + 'pydantic.types:ConstrainedStr', + 'pydantic.types:JsonWrapper', + 'pydantic.types:NoneBytes', + 'pydantic.types:NoneStr', + 'pydantic.types:NoneStrBytes', + 'pydantic.types:StrBytes', + 'pydantic.typing:evaluate_forwardref', + 'pydantic.typing:AbstractSetIntStr', + 'pydantic.typing:AnyCallable', + 'pydantic.typing:AnyClassMethod', + 'pydantic.typing:CallableGenerator', + 'pydantic.typing:DictAny', + 'pydantic.typing:DictIntStrAny', + 'pydantic.typing:DictStrAny', + 'pydantic.typing:IntStr', + 'pydantic.typing:ListStr', + 'pydantic.typing:MappingIntStrAny', + 'pydantic.typing:NoArgAnyCallable', + 'pydantic.typing:NoneType', + 'pydantic.typing:ReprArgs', + 'pydantic.typing:SetStr', + 'pydantic.typing:StrPath', + 'pydantic.typing:TupleGenerator', + 'pydantic.typing:WithArgsTypes', + 'pydantic.typing:all_literal_values', + 'pydantic.typing:display_as_type', + 'pydantic.typing:get_all_type_hints', + 'pydantic.typing:get_args', + 'pydantic.typing:get_origin', + 'pydantic.typing:get_sub_types', + 'pydantic.typing:is_callable_type', + 'pydantic.typing:is_classvar', + 'pydantic.typing:is_finalvar', + 'pydantic.typing:is_literal_type', + 'pydantic.typing:is_namedtuple', + 'pydantic.typing:is_new_type', + 'pydantic.typing:is_none_type', + 'pydantic.typing:is_typeddict', + 'pydantic.typing:is_typeddict_special', + 'pydantic.typing:is_union', + 'pydantic.typing:new_type_supertype', + 'pydantic.typing:resolve_annotations', + 'pydantic.typing:typing_base', + 'pydantic.typing:update_field_forward_refs', + 'pydantic.typing:update_model_forward_refs', + 'pydantic.utils:ClassAttribute', + 'pydantic.utils:DUNDER_ATTRIBUTES', + 'pydantic.utils:PyObjectStr', + 'pydantic.utils:ValueItems', + 'pydantic.utils:almost_equal_floats', + 'pydantic.utils:get_discriminator_alias_and_values', + 'pydantic.utils:get_model', + 'pydantic.utils:get_unique_discriminator_alias', + 'pydantic.utils:in_ipython', + 'pydantic.utils:is_valid_identifier', + 'pydantic.utils:path_type', + 'pydantic.utils:validate_field_name', + 'pydantic:validate_model', +} + + +def getattr_migration(module: str) -> Callable[[str], Any]: + """Implement PEP 562 for objects that were either moved or removed on the migration + to V2. + + Args: + module: The module name. + + Returns: + A callable that will raise an error if the object is not found. + """ + # This avoids circular import with errors.py. + from .errors import PydanticImportError + + def wrapper(name: str) -> object: + """Raise an error if the object is not found, or warn if it was moved. + + In case it was moved, it still returns the object. + + Args: + name: The object name. + + Returns: + The object. + """ + if name == '__path__': + raise AttributeError(f'module {module!r} has no attribute {name!r}') + + import warnings + + from ._internal._validators import import_string + + import_path = f'{module}:{name}' + if import_path in MOVED_IN_V2.keys(): + new_location = MOVED_IN_V2[import_path] + warnings.warn(f'`{import_path}` has been moved to `{new_location}`.') + return import_string(MOVED_IN_V2[import_path]) + if import_path in DEPRECATED_MOVED_IN_V2: + # skip the warning here because a deprecation warning will be raised elsewhere + return import_string(DEPRECATED_MOVED_IN_V2[import_path]) + if import_path in REDIRECT_TO_V1: + new_location = REDIRECT_TO_V1[import_path] + warnings.warn( + f'`{import_path}` has been removed. We are importing from `{new_location}` instead.' + 'See the migration guide for more details: https://docs.pydantic.dev/latest/migration/' + ) + return import_string(REDIRECT_TO_V1[import_path]) + if import_path == 'pydantic:BaseSettings': + raise PydanticImportError( + '`BaseSettings` has been moved to the `pydantic-settings` package. ' + f'See https://docs.pydantic.dev/{version_short()}/migration/#basesettings-has-moved-to-pydantic-settings ' + 'for more details.' + ) + if import_path in REMOVED_IN_V2: + raise PydanticImportError(f'`{import_path}` has been removed in V2.') + globals: Dict[str, Any] = sys.modules[module].__dict__ + if name in globals: + return globals[name] + raise AttributeError(f'module {module!r} has no attribute {name!r}') + + return wrapper diff --git a/venv/lib/python3.10/site-packages/pydantic/alias_generators.py b/venv/lib/python3.10/site-packages/pydantic/alias_generators.py new file mode 100644 index 0000000000000000000000000000000000000000..155e66e0c4d7469e32ed28870ea7b4a80a4beb45 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/alias_generators.py @@ -0,0 +1,50 @@ +"""Alias generators for converting between different capitalization conventions.""" +import re + +__all__ = ('to_pascal', 'to_camel', 'to_snake') + + +def to_pascal(snake: str) -> str: + """Convert a snake_case string to PascalCase. + + Args: + snake: The string to convert. + + Returns: + The PascalCase string. + """ + camel = snake.title() + return re.sub('([0-9A-Za-z])_(?=[0-9A-Z])', lambda m: m.group(1), camel) + + +def to_camel(snake: str) -> str: + """Convert a snake_case string to camelCase. + + Args: + snake: The string to convert. + + Returns: + The converted camelCase string. + """ + camel = to_pascal(snake) + return re.sub('(^_*[A-Z])', lambda m: m.group(1).lower(), camel) + + +def to_snake(camel: str) -> str: + """Convert a PascalCase or camelCase string to snake_case. + + Args: + camel: The string to convert. + + Returns: + The converted string in snake_case. + """ + # Handle the sequence of uppercase letters followed by a lowercase letter + snake = re.sub(r'([A-Z]+)([A-Z][a-z])', lambda m: f'{m.group(1)}_{m.group(2)}', camel) + # Insert an underscore between a lowercase letter and an uppercase letter + snake = re.sub(r'([a-z])([A-Z])', lambda m: f'{m.group(1)}_{m.group(2)}', snake) + # Insert an underscore between a digit and an uppercase letter + snake = re.sub(r'([0-9])([A-Z])', lambda m: f'{m.group(1)}_{m.group(2)}', snake) + # Insert an underscore between a lowercase letter and a digit + snake = re.sub(r'([a-z])([0-9])', lambda m: f'{m.group(1)}_{m.group(2)}', snake) + return snake.lower() diff --git a/venv/lib/python3.10/site-packages/pydantic/aliases.py b/venv/lib/python3.10/site-packages/pydantic/aliases.py new file mode 100644 index 0000000000000000000000000000000000000000..a4659be4d813110e2cdebdab3c28cc5c872034da --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/aliases.py @@ -0,0 +1,131 @@ +"""Support for alias configurations.""" +from __future__ import annotations + +import dataclasses +from typing import Any, Callable, Literal + +from pydantic_core import PydanticUndefined + +from ._internal import _internal_dataclass + +__all__ = ('AliasGenerator', 'AliasPath', 'AliasChoices') + + +@dataclasses.dataclass(**_internal_dataclass.slots_true) +class AliasPath: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/alias#aliaspath-and-aliaschoices + + A data class used by `validation_alias` as a convenience to create aliases. + + Attributes: + path: A list of string or integer aliases. + """ + + path: list[int | str] + + def __init__(self, first_arg: str, *args: str | int) -> None: + self.path = [first_arg] + list(args) + + def convert_to_aliases(self) -> list[str | int]: + """Converts arguments to a list of string or integer aliases. + + Returns: + The list of aliases. + """ + return self.path + + def search_dict_for_path(self, d: dict) -> Any: + """Searches a dictionary for the path specified by the alias. + + Returns: + The value at the specified path, or `PydanticUndefined` if the path is not found. + """ + v = d + for k in self.path: + if isinstance(v, str): + # disallow indexing into a str, like for AliasPath('x', 0) and x='abc' + return PydanticUndefined + try: + v = v[k] + except (KeyError, IndexError, TypeError): + return PydanticUndefined + return v + + +@dataclasses.dataclass(**_internal_dataclass.slots_true) +class AliasChoices: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/alias#aliaspath-and-aliaschoices + + A data class used by `validation_alias` as a convenience to create aliases. + + Attributes: + choices: A list containing a string or `AliasPath`. + """ + + choices: list[str | AliasPath] + + def __init__(self, first_choice: str | AliasPath, *choices: str | AliasPath) -> None: + self.choices = [first_choice] + list(choices) + + def convert_to_aliases(self) -> list[list[str | int]]: + """Converts arguments to a list of lists containing string or integer aliases. + + Returns: + The list of aliases. + """ + aliases: list[list[str | int]] = [] + for c in self.choices: + if isinstance(c, AliasPath): + aliases.append(c.convert_to_aliases()) + else: + aliases.append([c]) + return aliases + + +@dataclasses.dataclass(**_internal_dataclass.slots_true) +class AliasGenerator: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/alias#using-an-aliasgenerator + + A data class used by `alias_generator` as a convenience to create various aliases. + + Attributes: + alias: A callable that takes a field name and returns an alias for it. + validation_alias: A callable that takes a field name and returns a validation alias for it. + serialization_alias: A callable that takes a field name and returns a serialization alias for it. + """ + + alias: Callable[[str], str] | None = None + validation_alias: Callable[[str], str | AliasPath | AliasChoices] | None = None + serialization_alias: Callable[[str], str] | None = None + + def _generate_alias( + self, + alias_kind: Literal['alias', 'validation_alias', 'serialization_alias'], + allowed_types: tuple[type[str] | type[AliasPath] | type[AliasChoices], ...], + field_name: str, + ) -> str | AliasPath | AliasChoices | None: + """Generate an alias of the specified kind. Returns None if the alias generator is None. + + Raises: + TypeError: If the alias generator produces an invalid type. + """ + alias = None + if alias_generator := getattr(self, alias_kind): + alias = alias_generator(field_name) + if alias and not isinstance(alias, allowed_types): + raise TypeError( + f'Invalid `{alias_kind}` type. `{alias_kind}` generator must produce one of `{allowed_types}`' + ) + return alias + + def generate_aliases(self, field_name: str) -> tuple[str | None, str | AliasPath | AliasChoices | None, str | None]: + """Generate `alias`, `validation_alias`, and `serialization_alias` for a field. + + Returns: + A tuple of three aliases - validation, alias, and serialization. + """ + alias = self._generate_alias('alias', (str,), field_name) + validation_alias = self._generate_alias('validation_alias', (str, AliasChoices, AliasPath), field_name) + serialization_alias = self._generate_alias('serialization_alias', (str,), field_name) + + return alias, validation_alias, serialization_alias # type: ignore diff --git a/venv/lib/python3.10/site-packages/pydantic/annotated_handlers.py b/venv/lib/python3.10/site-packages/pydantic/annotated_handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..b1a4487286ae0041960a5443ce85bb3f8cb329be --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/annotated_handlers.py @@ -0,0 +1,120 @@ +"""Type annotations to use with `__get_pydantic_core_schema__` and `__get_pydantic_json_schema__`.""" +from __future__ import annotations as _annotations + +from typing import TYPE_CHECKING, Any, Union + +from pydantic_core import core_schema + +if TYPE_CHECKING: + from .json_schema import JsonSchemaMode, JsonSchemaValue + + CoreSchemaOrField = Union[ + core_schema.CoreSchema, + core_schema.ModelField, + core_schema.DataclassField, + core_schema.TypedDictField, + core_schema.ComputedField, + ] + +__all__ = 'GetJsonSchemaHandler', 'GetCoreSchemaHandler' + + +class GetJsonSchemaHandler: + """Handler to call into the next JSON schema generation function. + + Attributes: + mode: Json schema mode, can be `validation` or `serialization`. + """ + + mode: JsonSchemaMode + + def __call__(self, core_schema: CoreSchemaOrField, /) -> JsonSchemaValue: + """Call the inner handler and get the JsonSchemaValue it returns. + This will call the next JSON schema modifying function up until it calls + into `pydantic.json_schema.GenerateJsonSchema`, which will raise a + `pydantic.errors.PydanticInvalidForJsonSchema` error if it cannot generate + a JSON schema. + + Args: + core_schema: A `pydantic_core.core_schema.CoreSchema`. + + Returns: + JsonSchemaValue: The JSON schema generated by the inner JSON schema modify + functions. + """ + raise NotImplementedError + + def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue, /) -> JsonSchemaValue: + """Get the real schema for a `{"$ref": ...}` schema. + If the schema given is not a `$ref` schema, it will be returned as is. + This means you don't have to check before calling this function. + + Args: + maybe_ref_json_schema: A JsonSchemaValue which may be a `$ref` schema. + + Raises: + LookupError: If the ref is not found. + + Returns: + JsonSchemaValue: A JsonSchemaValue that has no `$ref`. + """ + raise NotImplementedError + + +class GetCoreSchemaHandler: + """Handler to call into the next CoreSchema schema generation function.""" + + def __call__(self, source_type: Any, /) -> core_schema.CoreSchema: + """Call the inner handler and get the CoreSchema it returns. + This will call the next CoreSchema modifying function up until it calls + into Pydantic's internal schema generation machinery, which will raise a + `pydantic.errors.PydanticSchemaGenerationError` error if it cannot generate + a CoreSchema for the given source type. + + Args: + source_type: The input type. + + Returns: + CoreSchema: The `pydantic-core` CoreSchema generated. + """ + raise NotImplementedError + + def generate_schema(self, source_type: Any, /) -> core_schema.CoreSchema: + """Generate a schema unrelated to the current context. + Use this function if e.g. you are handling schema generation for a sequence + and want to generate a schema for its items. + Otherwise, you may end up doing something like applying a `min_length` constraint + that was intended for the sequence itself to its items! + + Args: + source_type: The input type. + + Returns: + CoreSchema: The `pydantic-core` CoreSchema generated. + """ + raise NotImplementedError + + def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema, /) -> core_schema.CoreSchema: + """Get the real schema for a `definition-ref` schema. + If the schema given is not a `definition-ref` schema, it will be returned as is. + This means you don't have to check before calling this function. + + Args: + maybe_ref_schema: A `CoreSchema`, `ref`-based or not. + + Raises: + LookupError: If the `ref` is not found. + + Returns: + A concrete `CoreSchema`. + """ + raise NotImplementedError + + @property + def field_name(self) -> str | None: + """Get the name of the closest field to this validator.""" + raise NotImplementedError + + def _get_types_namespace(self) -> dict[str, Any] | None: + """Internal method used during type resolution for serializer annotations.""" + raise NotImplementedError diff --git a/venv/lib/python3.10/site-packages/pydantic/class_validators.py b/venv/lib/python3.10/site-packages/pydantic/class_validators.py new file mode 100644 index 0000000000000000000000000000000000000000..2ff72ae53b6485ca5def8d0893debfd985f6f92e --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/class_validators.py @@ -0,0 +1,4 @@ +"""`class_validators` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/color.py b/venv/lib/python3.10/site-packages/pydantic/color.py new file mode 100644 index 0000000000000000000000000000000000000000..108bb8faec713eb1a7040e479d8b5cd647a587c7 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/color.py @@ -0,0 +1,603 @@ +"""Color definitions are used as per the CSS3 +[CSS Color Module Level 3](http://www.w3.org/TR/css3-color/#svg-color) specification. + +A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. + +In these cases the _last_ color when sorted alphabetically takes preferences, +eg. `Color((0, 255, 255)).as_named() == 'cyan'` because "cyan" comes after "aqua". + +Warning: Deprecated + The `Color` class is deprecated, use `pydantic_extra_types` instead. + See [`pydantic-extra-types.Color`](../usage/types/extra_types/color_types.md) + for more information. +""" +import math +import re +from colorsys import hls_to_rgb, rgb_to_hls +from typing import Any, Callable, Optional, Tuple, Type, Union, cast + +from pydantic_core import CoreSchema, PydanticCustomError, core_schema +from typing_extensions import deprecated + +from ._internal import _repr +from ._internal._schema_generation_shared import GetJsonSchemaHandler as _GetJsonSchemaHandler +from .json_schema import JsonSchemaValue +from .warnings import PydanticDeprecatedSince20 + +ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] +ColorType = Union[ColorTuple, str] +HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]] + + +class RGBA: + """Internal use only as a representation of a color.""" + + __slots__ = 'r', 'g', 'b', 'alpha', '_tuple' + + def __init__(self, r: float, g: float, b: float, alpha: Optional[float]): + self.r = r + self.g = g + self.b = b + self.alpha = alpha + + self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha) + + def __getitem__(self, item: Any) -> Any: + return self._tuple[item] + + +# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached +_r_255 = r'(\d{1,3}(?:\.\d+)?)' +_r_comma = r'\s*,\s*' +_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' +_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' +_r_sl = r'(\d{1,3}(?:\.\d+)?)%' +r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*' +r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*' +# CSS3 RGB examples: rgb(0, 0, 0), rgba(0, 0, 0, 0.5), rgba(0, 0, 0, 50%) +r_rgb = rf'\s*rgba?\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}(?:{_r_comma}{_r_alpha})?\s*\)\s*' +# CSS3 HSL examples: hsl(270, 60%, 50%), hsla(270, 60%, 50%, 0.5), hsla(270, 60%, 50%, 50%) +r_hsl = rf'\s*hsla?\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}(?:{_r_comma}{_r_alpha})?\s*\)\s*' +# CSS4 RGB examples: rgb(0 0 0), rgb(0 0 0 / 0.5), rgb(0 0 0 / 50%), rgba(0 0 0 / 50%) +r_rgb_v4_style = rf'\s*rgba?\(\s*{_r_255}\s+{_r_255}\s+{_r_255}(?:\s*/\s*{_r_alpha})?\s*\)\s*' +# CSS4 HSL examples: hsl(270 60% 50%), hsl(270 60% 50% / 0.5), hsl(270 60% 50% / 50%), hsla(270 60% 50% / 50%) +r_hsl_v4_style = rf'\s*hsla?\(\s*{_r_h}\s+{_r_sl}\s+{_r_sl}(?:\s*/\s*{_r_alpha})?\s*\)\s*' + +# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used +repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'} +rads = 2 * math.pi + + +@deprecated( + 'The `Color` class is deprecated, use `pydantic_extra_types` instead. ' + 'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_color/.', + category=PydanticDeprecatedSince20, +) +class Color(_repr.Representation): + """Represents a color.""" + + __slots__ = '_original', '_rgba' + + def __init__(self, value: ColorType) -> None: + self._rgba: RGBA + self._original: ColorType + if isinstance(value, (tuple, list)): + self._rgba = parse_tuple(value) + elif isinstance(value, str): + self._rgba = parse_str(value) + elif isinstance(value, Color): + self._rgba = value._rgba + value = value._original + else: + raise PydanticCustomError( + 'color_error', 'value is not a valid color: value must be a tuple, list or string' + ) + + # if we've got here value must be a valid color + self._original = value + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = {} + field_schema.update(type='string', format='color') + return field_schema + + def original(self) -> ColorType: + """Original value passed to `Color`.""" + return self._original + + def as_named(self, *, fallback: bool = False) -> str: + """Returns the name of the color if it can be found in `COLORS_BY_VALUE` dictionary, + otherwise returns the hexadecimal representation of the color or raises `ValueError`. + + Args: + fallback: If True, falls back to returning the hexadecimal representation of + the color instead of raising a ValueError when no named color is found. + + Returns: + The name of the color, or the hexadecimal representation of the color. + + Raises: + ValueError: When no named color is found and fallback is `False`. + """ + if self._rgba.alpha is None: + rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) + try: + return COLORS_BY_VALUE[rgb] + except KeyError as e: + if fallback: + return self.as_hex() + else: + raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e + else: + return self.as_hex() + + def as_hex(self) -> str: + """Returns the hexadecimal representation of the color. + + Hex string representing the color can be 3, 4, 6, or 8 characters depending on whether the string + a "short" representation of the color is possible and whether there's an alpha channel. + + Returns: + The hexadecimal representation of the color. + """ + values = [float_to_255(c) for c in self._rgba[:3]] + if self._rgba.alpha is not None: + values.append(float_to_255(self._rgba.alpha)) + + as_hex = ''.join(f'{v:02x}' for v in values) + if all(c in repeat_colors for c in values): + as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2)) + return '#' + as_hex + + def as_rgb(self) -> str: + """Color as an `rgb(, , )` or `rgba(, , , )` string.""" + if self._rgba.alpha is None: + return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})' + else: + return ( + f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, ' + f'{round(self._alpha_float(), 2)})' + ) + + def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: + """Returns the color as an RGB or RGBA tuple. + + Args: + alpha: Whether to include the alpha channel. There are three options for this input: + + - `None` (default): Include alpha only if it's set. (e.g. not `None`) + - `True`: Always include alpha. + - `False`: Always omit alpha. + + Returns: + A tuple that contains the values of the red, green, and blue channels in the range 0 to 255. + If alpha is included, it is in the range 0 to 1. + """ + r, g, b = (float_to_255(c) for c in self._rgba[:3]) + if alpha is None: + if self._rgba.alpha is None: + return r, g, b + else: + return r, g, b, self._alpha_float() + elif alpha: + return r, g, b, self._alpha_float() + else: + # alpha is False + return r, g, b + + def as_hsl(self) -> str: + """Color as an `hsl(, , )` or `hsl(, , , )` string.""" + if self._rgba.alpha is None: + h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore + return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})' + else: + h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore + return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})' + + def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: + """Returns the color as an HSL or HSLA tuple. + + Args: + alpha: Whether to include the alpha channel. + + - `None` (default): Include the alpha channel only if it's set (e.g. not `None`). + - `True`: Always include alpha. + - `False`: Always omit alpha. + + Returns: + The color as a tuple of hue, saturation, lightness, and alpha (if included). + All elements are in the range 0 to 1. + + Note: + This is HSL as used in HTML and most other places, not HLS as used in Python's `colorsys`. + """ + h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) # noqa: E741 + if alpha is None: + if self._rgba.alpha is None: + return h, s, l + else: + return h, s, l, self._alpha_float() + if alpha: + return h, s, l, self._alpha_float() + else: + # alpha is False + return h, s, l + + def _alpha_float(self) -> float: + return 1 if self._rgba.alpha is None else self._rgba.alpha + + @classmethod + def __get_pydantic_core_schema__( + cls, source: Type[Any], handler: Callable[[Any], CoreSchema] + ) -> core_schema.CoreSchema: + return core_schema.with_info_plain_validator_function( + cls._validate, serialization=core_schema.to_string_ser_schema() + ) + + @classmethod + def _validate(cls, __input_value: Any, _: Any) -> 'Color': + return cls(__input_value) + + def __str__(self) -> str: + return self.as_named(fallback=True) + + def __repr_args__(self) -> '_repr.ReprArgs': + return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] + + def __eq__(self, other: Any) -> bool: + return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple() + + def __hash__(self) -> int: + return hash(self.as_rgb_tuple()) + + +def parse_tuple(value: Tuple[Any, ...]) -> RGBA: + """Parse a tuple or list to get RGBA values. + + Args: + value: A tuple or list. + + Returns: + An `RGBA` tuple parsed from the input tuple. + + Raises: + PydanticCustomError: If tuple is not valid. + """ + if len(value) == 3: + r, g, b = (parse_color_value(v) for v in value) + return RGBA(r, g, b, None) + elif len(value) == 4: + r, g, b = (parse_color_value(v) for v in value[:3]) + return RGBA(r, g, b, parse_float_alpha(value[3])) + else: + raise PydanticCustomError('color_error', 'value is not a valid color: tuples must have length 3 or 4') + + +def parse_str(value: str) -> RGBA: + """Parse a string representing a color to an RGBA tuple. + + Possible formats for the input string include: + + * named color, see `COLORS_BY_NAME` + * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) + * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) + * `rgb(, , )` + * `rgba(, , , )` + + Args: + value: A string representing a color. + + Returns: + An `RGBA` tuple parsed from the input string. + + Raises: + ValueError: If the input string cannot be parsed to an RGBA tuple. + """ + value_lower = value.lower() + try: + r, g, b = COLORS_BY_NAME[value_lower] + except KeyError: + pass + else: + return ints_to_rgba(r, g, b, None) + + m = re.fullmatch(r_hex_short, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v * 2, 16) for v in rgb) + if a: + alpha: Optional[float] = int(a * 2, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_hex_long, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v, 16) for v in rgb) + if a: + alpha = int(a, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_rgb, value_lower) or re.fullmatch(r_rgb_v4_style, value_lower) + if m: + return ints_to_rgba(*m.groups()) # type: ignore + + m = re.fullmatch(r_hsl, value_lower) or re.fullmatch(r_hsl_v4_style, value_lower) + if m: + return parse_hsl(*m.groups()) # type: ignore + + raise PydanticCustomError('color_error', 'value is not a valid color: string not recognised as a valid color') + + +def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float] = None) -> RGBA: + """Converts integer or string values for RGB color and an optional alpha value to an `RGBA` object. + + Args: + r: An integer or string representing the red color value. + g: An integer or string representing the green color value. + b: An integer or string representing the blue color value. + alpha: A float representing the alpha value. Defaults to None. + + Returns: + An instance of the `RGBA` class with the corresponding color and alpha values. + """ + return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) + + +def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: + """Parse the color value provided and return a number between 0 and 1. + + Args: + value: An integer or string color value. + max_val: Maximum range value. Defaults to 255. + + Raises: + PydanticCustomError: If the value is not a valid color. + + Returns: + A number between 0 and 1. + """ + try: + color = float(value) + except ValueError: + raise PydanticCustomError('color_error', 'value is not a valid color: color values must be a valid number') + if 0 <= color <= max_val: + return color / max_val + else: + raise PydanticCustomError( + 'color_error', + 'value is not a valid color: color values must be in the range 0 to {max_val}', + {'max_val': max_val}, + ) + + +def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: + """Parse an alpha value checking it's a valid float in the range 0 to 1. + + Args: + value: The input value to parse. + + Returns: + The parsed value as a float, or `None` if the value was None or equal 1. + + Raises: + PydanticCustomError: If the input value cannot be successfully parsed as a float in the expected range. + """ + if value is None: + return None + try: + if isinstance(value, str) and value.endswith('%'): + alpha = float(value[:-1]) / 100 + else: + alpha = float(value) + except ValueError: + raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be a valid float') + + if math.isclose(alpha, 1): + return None + elif 0 <= alpha <= 1: + return alpha + else: + raise PydanticCustomError('color_error', 'value is not a valid color: alpha values must be in the range 0 to 1') + + +def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA: + """Parse raw hue, saturation, lightness, and alpha values and convert to RGBA. + + Args: + h: The hue value. + h_units: The unit for hue value. + sat: The saturation value. + light: The lightness value. + alpha: Alpha value. + + Returns: + An instance of `RGBA`. + """ + s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100) + + h_value = float(h) + if h_units in {None, 'deg'}: + h_value = h_value % 360 / 360 + elif h_units == 'rad': + h_value = h_value % rads / rads + else: + # turns + h_value = h_value % 1 + + r, g, b = hls_to_rgb(h_value, l_value, s_value) + return RGBA(r, g, b, parse_float_alpha(alpha)) + + +def float_to_255(c: float) -> int: + """Converts a float value between 0 and 1 (inclusive) to an integer between 0 and 255 (inclusive). + + Args: + c: The float value to be converted. Must be between 0 and 1 (inclusive). + + Returns: + The integer equivalent of the given float value rounded to the nearest whole number. + + Raises: + ValueError: If the given float value is outside the acceptable range of 0 to 1 (inclusive). + """ + return int(round(c * 255)) + + +COLORS_BY_NAME = { + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'aqua': (0, 255, 255), + 'aquamarine': (127, 255, 212), + 'azure': (240, 255, 255), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'burlywood': (222, 184, 135), + 'cadetblue': (95, 158, 160), + 'chartreuse': (127, 255, 0), + 'chocolate': (210, 105, 30), + 'coral': (255, 127, 80), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'darkblue': (0, 0, 139), + 'darkcyan': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgray': (169, 169, 169), + 'darkgreen': (0, 100, 0), + 'darkgrey': (169, 169, 169), + 'darkkhaki': (189, 183, 107), + 'darkmagenta': (139, 0, 139), + 'darkolivegreen': (85, 107, 47), + 'darkorange': (255, 140, 0), + 'darkorchid': (153, 50, 204), + 'darkred': (139, 0, 0), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deepskyblue': (0, 191, 255), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'firebrick': (178, 34, 34), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'fuchsia': (255, 0, 255), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'goldenrod': (218, 165, 32), + 'gray': (128, 128, 128), + 'green': (0, 128, 0), + 'greenyellow': (173, 255, 47), + 'grey': (128, 128, 128), + 'honeydew': (240, 255, 240), + 'hotpink': (255, 105, 180), + 'indianred': (205, 92, 92), + 'indigo': (75, 0, 130), + 'ivory': (255, 255, 240), + 'khaki': (240, 230, 140), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lightblue': (173, 216, 230), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgreen': (144, 238, 144), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightsalmon': (255, 160, 122), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightyellow': (255, 255, 224), + 'lime': (0, 255, 0), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'maroon': (128, 0, 0), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumpurple': (147, 112, 219), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navy': (0, 0, 128), + 'oldlace': (253, 245, 230), + 'olive': (128, 128, 0), + 'olivedrab': (107, 142, 35), + 'orange': (255, 165, 0), + 'orangered': (255, 69, 0), + 'orchid': (218, 112, 214), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'paleturquoise': (175, 238, 238), + 'palevioletred': (219, 112, 147), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'plum': (221, 160, 221), + 'powderblue': (176, 224, 230), + 'purple': (128, 0, 128), + 'red': (255, 0, 0), + 'rosybrown': (188, 143, 143), + 'royalblue': (65, 105, 225), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seashell': (255, 245, 238), + 'sienna': (160, 82, 45), + 'silver': (192, 192, 192), + 'skyblue': (135, 206, 235), + 'slateblue': (106, 90, 205), + 'slategray': (112, 128, 144), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'springgreen': (0, 255, 127), + 'steelblue': (70, 130, 180), + 'tan': (210, 180, 140), + 'teal': (0, 128, 128), + 'thistle': (216, 191, 216), + 'tomato': (255, 99, 71), + 'turquoise': (64, 224, 208), + 'violet': (238, 130, 238), + 'wheat': (245, 222, 179), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellowgreen': (154, 205, 50), +} + +COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()} diff --git a/venv/lib/python3.10/site-packages/pydantic/config.py b/venv/lib/python3.10/site-packages/pydantic/config.py new file mode 100644 index 0000000000000000000000000000000000000000..1841a335a3ec86497e1c6d303e03be1b02580027 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/config.py @@ -0,0 +1,1003 @@ +"""Configuration for Pydantic models.""" +from __future__ import annotations as _annotations + +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Type, TypeVar, Union + +from typing_extensions import Literal, TypeAlias, TypedDict + +from ._migration import getattr_migration +from .aliases import AliasGenerator + +if TYPE_CHECKING: + from ._internal._generate_schema import GenerateSchema as _GenerateSchema + +__all__ = ('ConfigDict', 'with_config') + + +JsonValue: TypeAlias = Union[int, float, str, bool, None, List['JsonValue'], 'JsonDict'] +JsonDict: TypeAlias = Dict[str, JsonValue] + +JsonEncoder = Callable[[Any], Any] + +JsonSchemaExtraCallable: TypeAlias = Union[ + Callable[[JsonDict], None], + Callable[[JsonDict, Type[Any]], None], +] + +ExtraValues = Literal['allow', 'ignore', 'forbid'] + + +class ConfigDict(TypedDict, total=False): + """A TypedDict for configuring Pydantic behaviour.""" + + title: str | None + """The title for the generated JSON schema, defaults to the model's name""" + + str_to_lower: bool + """Whether to convert all characters to lowercase for str types. Defaults to `False`.""" + + str_to_upper: bool + """Whether to convert all characters to uppercase for str types. Defaults to `False`.""" + str_strip_whitespace: bool + """Whether to strip leading and trailing whitespace for str types.""" + + str_min_length: int + """The minimum length for str types. Defaults to `None`.""" + + str_max_length: int | None + """The maximum length for str types. Defaults to `None`.""" + + extra: ExtraValues | None + """ + Whether to ignore, allow, or forbid extra attributes during model initialization. Defaults to `'ignore'`. + + You can configure how pydantic handles the attributes that are not defined in the model: + + * `allow` - Allow any extra attributes. + * `forbid` - Forbid any extra attributes. + * `ignore` - Ignore any extra attributes. + + ```py + from pydantic import BaseModel, ConfigDict + + + class User(BaseModel): + model_config = ConfigDict(extra='ignore') # (1)! + + name: str + + + user = User(name='John Doe', age=20) # (2)! + print(user) + #> name='John Doe' + ``` + + 1. This is the default behaviour. + 2. The `age` argument is ignored. + + Instead, with `extra='allow'`, the `age` argument is included: + + ```py + from pydantic import BaseModel, ConfigDict + + + class User(BaseModel): + model_config = ConfigDict(extra='allow') + + name: str + + + user = User(name='John Doe', age=20) # (1)! + print(user) + #> name='John Doe' age=20 + ``` + + 1. The `age` argument is included. + + With `extra='forbid'`, an error is raised: + + ```py + from pydantic import BaseModel, ConfigDict, ValidationError + + + class User(BaseModel): + model_config = ConfigDict(extra='forbid') + + name: str + + + try: + User(name='John Doe', age=20) + except ValidationError as e: + print(e) + ''' + 1 validation error for User + age + Extra inputs are not permitted [type=extra_forbidden, input_value=20, input_type=int] + ''' + ``` + """ + + frozen: bool + """ + Whether models are faux-immutable, i.e. whether `__setattr__` is allowed, and also generates + a `__hash__()` method for the model. This makes instances of the model potentially hashable if all the + attributes are hashable. Defaults to `False`. + + Note: + On V1, the inverse of this setting was called `allow_mutation`, and was `True` by default. + """ + + populate_by_name: bool + """ + Whether an aliased field may be populated by its name as given by the model + attribute, as well as the alias. Defaults to `False`. + + Note: + The name of this configuration setting was changed in **v2.0** from + `allow_population_by_field_name` to `populate_by_name`. + + ```py + from pydantic import BaseModel, ConfigDict, Field + + + class User(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + name: str = Field(alias='full_name') # (1)! + age: int + + + user = User(full_name='John Doe', age=20) # (2)! + print(user) + #> name='John Doe' age=20 + user = User(name='John Doe', age=20) # (3)! + print(user) + #> name='John Doe' age=20 + ``` + + 1. The field `'name'` has an alias `'full_name'`. + 2. The model is populated by the alias `'full_name'`. + 3. The model is populated by the field name `'name'`. + """ + + use_enum_values: bool + """ + Whether to populate models with the `value` property of enums, rather than the raw enum. + This may be useful if you want to serialize `model.model_dump()` later. Defaults to `False`. + + !!! note + If you have an `Optional[Enum]` value that you set a default for, you need to use `validate_default=True` + for said Field to ensure that the `use_enum_values` flag takes effect on the default, as extracting an + enum's value occurs during validation, not serialization. + + ```py + from enum import Enum + from typing import Optional + + from pydantic import BaseModel, ConfigDict, Field + + + class SomeEnum(Enum): + FOO = 'foo' + BAR = 'bar' + BAZ = 'baz' + + + class SomeModel(BaseModel): + model_config = ConfigDict(use_enum_values=True) + + some_enum: SomeEnum + another_enum: Optional[SomeEnum] = Field(default=SomeEnum.FOO, validate_default=True) + + + model1 = SomeModel(some_enum=SomeEnum.BAR) + print(model1.model_dump()) + # {'some_enum': 'bar', 'another_enum': 'foo'} + + model2 = SomeModel(some_enum=SomeEnum.BAR, another_enum=SomeEnum.BAZ) + print(model2.model_dump()) + #> {'some_enum': 'bar', 'another_enum': 'baz'} + ``` + """ + + validate_assignment: bool + """ + Whether to validate the data when the model is changed. Defaults to `False`. + + The default behavior of Pydantic is to validate the data when the model is created. + + In case the user changes the data after the model is created, the model is _not_ revalidated. + + ```py + from pydantic import BaseModel + + class User(BaseModel): + name: str + + user = User(name='John Doe') # (1)! + print(user) + #> name='John Doe' + user.name = 123 # (1)! + print(user) + #> name=123 + ``` + + 1. The validation happens only when the model is created. + 2. The validation does not happen when the data is changed. + + In case you want to revalidate the model when the data is changed, you can use `validate_assignment=True`: + + ```py + from pydantic import BaseModel, ValidationError + + class User(BaseModel, validate_assignment=True): # (1)! + name: str + + user = User(name='John Doe') # (2)! + print(user) + #> name='John Doe' + try: + user.name = 123 # (3)! + except ValidationError as e: + print(e) + ''' + 1 validation error for User + name + Input should be a valid string [type=string_type, input_value=123, input_type=int] + ''' + ``` + + 1. You can either use class keyword arguments, or `model_config` to set `validate_assignment=True`. + 2. The validation happens when the model is created. + 3. The validation _also_ happens when the data is changed. + """ + + arbitrary_types_allowed: bool + """ + Whether arbitrary types are allowed for field types. Defaults to `False`. + + ```py + from pydantic import BaseModel, ConfigDict, ValidationError + + # This is not a pydantic model, it's an arbitrary class + class Pet: + def __init__(self, name: str): + self.name = name + + class Model(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + pet: Pet + owner: str + + pet = Pet(name='Hedwig') + # A simple check of instance type is used to validate the data + model = Model(owner='Harry', pet=pet) + print(model) + #> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry' + print(model.pet) + #> <__main__.Pet object at 0x0123456789ab> + print(model.pet.name) + #> Hedwig + print(type(model.pet)) + #> + try: + # If the value is not an instance of the type, it's invalid + Model(owner='Harry', pet='Hedwig') + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + pet + Input should be an instance of Pet [type=is_instance_of, input_value='Hedwig', input_type=str] + ''' + + # Nothing in the instance of the arbitrary type is checked + # Here name probably should have been a str, but it's not validated + pet2 = Pet(name=42) + model2 = Model(owner='Harry', pet=pet2) + print(model2) + #> pet=<__main__.Pet object at 0x0123456789ab> owner='Harry' + print(model2.pet) + #> <__main__.Pet object at 0x0123456789ab> + print(model2.pet.name) + #> 42 + print(type(model2.pet)) + #> + ``` + """ + + from_attributes: bool + """ + Whether to build models and look up discriminators of tagged unions using python object attributes. + """ + + loc_by_alias: bool + """Whether to use the actual key provided in the data (e.g. alias) for error `loc`s rather than the field's name. Defaults to `True`.""" + + alias_generator: Callable[[str], str] | AliasGenerator | None + """ + A callable that takes a field name and returns an alias for it + or an instance of [`AliasGenerator`][pydantic.aliases.AliasGenerator]. Defaults to `None`. + + When using a callable, the alias generator is used for both validation and serialization. + If you want to use different alias generators for validation and serialization, you can use + [`AliasGenerator`][pydantic.aliases.AliasGenerator] instead. + + If data source field names do not match your code style (e. g. CamelCase fields), + you can automatically generate aliases using `alias_generator`. Here's an example with + a basic callable: + + ```py + from pydantic import BaseModel, ConfigDict + from pydantic.alias_generators import to_pascal + + class Voice(BaseModel): + model_config = ConfigDict(alias_generator=to_pascal) + + name: str + language_code: str + + voice = Voice(Name='Filiz', LanguageCode='tr-TR') + print(voice.language_code) + #> tr-TR + print(voice.model_dump(by_alias=True)) + #> {'Name': 'Filiz', 'LanguageCode': 'tr-TR'} + ``` + + If you want to use different alias generators for validation and serialization, you can use + [`AliasGenerator`][pydantic.aliases.AliasGenerator]. + + ```py + from pydantic import AliasGenerator, BaseModel, ConfigDict + from pydantic.alias_generators import to_camel, to_pascal + + class Athlete(BaseModel): + first_name: str + last_name: str + sport: str + + model_config = ConfigDict( + alias_generator=AliasGenerator( + validation_alias=to_camel, + serialization_alias=to_pascal, + ) + ) + + athlete = Athlete(firstName='John', lastName='Doe', sport='track') + print(athlete.model_dump(by_alias=True)) + #> {'FirstName': 'John', 'LastName': 'Doe', 'Sport': 'track'} + ``` + + Note: + Pydantic offers three built-in alias generators: [`to_pascal`][pydantic.alias_generators.to_pascal], + [`to_camel`][pydantic.alias_generators.to_camel], and [`to_snake`][pydantic.alias_generators.to_snake]. + """ + + ignored_types: tuple[type, ...] + """A tuple of types that may occur as values of class attributes without annotations. This is + typically used for custom descriptors (classes that behave like `property`). If an attribute is set on a + class without an annotation and has a type that is not in this tuple (or otherwise recognized by + _pydantic_), an error will be raised. Defaults to `()`. + """ + + allow_inf_nan: bool + """Whether to allow infinity (`+inf` an `-inf`) and NaN values to float fields. Defaults to `True`.""" + + json_schema_extra: JsonDict | JsonSchemaExtraCallable | None + """A dict or callable to provide extra JSON schema properties. Defaults to `None`.""" + + json_encoders: dict[type[object], JsonEncoder] | None + """ + A `dict` of custom JSON encoders for specific types. Defaults to `None`. + + !!! warning "Deprecated" + This config option is a carryover from v1. + We originally planned to remove it in v2 but didn't have a 1:1 replacement so we are keeping it for now. + It is still deprecated and will likely be removed in the future. + """ + + # new in V2 + strict: bool + """ + _(new in V2)_ If `True`, strict validation is applied to all fields on the model. + + By default, Pydantic attempts to coerce values to the correct type, when possible. + + There are situations in which you may want to disable this behavior, and instead raise an error if a value's type + does not match the field's type annotation. + + To configure strict mode for all fields on a model, you can set `strict=True` on the model. + + ```py + from pydantic import BaseModel, ConfigDict + + class Model(BaseModel): + model_config = ConfigDict(strict=True) + + name: str + age: int + ``` + + See [Strict Mode](../concepts/strict_mode.md) for more details. + + See the [Conversion Table](../concepts/conversion_table.md) for more details on how Pydantic converts data in both + strict and lax modes. + """ + # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never' + revalidate_instances: Literal['always', 'never', 'subclass-instances'] + """ + When and how to revalidate models and dataclasses during validation. Accepts the string + values of `'never'`, `'always'` and `'subclass-instances'`. Defaults to `'never'`. + + - `'never'` will not revalidate models and dataclasses during validation + - `'always'` will revalidate models and dataclasses during validation + - `'subclass-instances'` will revalidate models and dataclasses during validation if the instance is a + subclass of the model or dataclass + + By default, model and dataclass instances are not revalidated during validation. + + ```py + from typing import List + + from pydantic import BaseModel + + class User(BaseModel, revalidate_instances='never'): # (1)! + hobbies: List[str] + + class SubUser(User): + sins: List[str] + + class Transaction(BaseModel): + user: User + + my_user = User(hobbies=['reading']) + t = Transaction(user=my_user) + print(t) + #> user=User(hobbies=['reading']) + + my_user.hobbies = [1] # (2)! + t = Transaction(user=my_user) # (3)! + print(t) + #> user=User(hobbies=[1]) + + my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying']) + t = Transaction(user=my_sub_user) + print(t) + #> user=SubUser(hobbies=['scuba diving'], sins=['lying']) + ``` + + 1. `revalidate_instances` is set to `'never'` by **default. + 2. The assignment is not validated, unless you set `validate_assignment` to `True` in the model's config. + 3. Since `revalidate_instances` is set to `never`, this is not revalidated. + + If you want to revalidate instances during validation, you can set `revalidate_instances` to `'always'` + in the model's config. + + ```py + from typing import List + + from pydantic import BaseModel, ValidationError + + class User(BaseModel, revalidate_instances='always'): # (1)! + hobbies: List[str] + + class SubUser(User): + sins: List[str] + + class Transaction(BaseModel): + user: User + + my_user = User(hobbies=['reading']) + t = Transaction(user=my_user) + print(t) + #> user=User(hobbies=['reading']) + + my_user.hobbies = [1] + try: + t = Transaction(user=my_user) # (2)! + except ValidationError as e: + print(e) + ''' + 1 validation error for Transaction + user.hobbies.0 + Input should be a valid string [type=string_type, input_value=1, input_type=int] + ''' + + my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying']) + t = Transaction(user=my_sub_user) + print(t) # (3)! + #> user=User(hobbies=['scuba diving']) + ``` + + 1. `revalidate_instances` is set to `'always'`. + 2. The model is revalidated, since `revalidate_instances` is set to `'always'`. + 3. Using `'never'` we would have gotten `user=SubUser(hobbies=['scuba diving'], sins=['lying'])`. + + It's also possible to set `revalidate_instances` to `'subclass-instances'` to only revalidate instances + of subclasses of the model. + + ```py + from typing import List + + from pydantic import BaseModel + + class User(BaseModel, revalidate_instances='subclass-instances'): # (1)! + hobbies: List[str] + + class SubUser(User): + sins: List[str] + + class Transaction(BaseModel): + user: User + + my_user = User(hobbies=['reading']) + t = Transaction(user=my_user) + print(t) + #> user=User(hobbies=['reading']) + + my_user.hobbies = [1] + t = Transaction(user=my_user) # (2)! + print(t) + #> user=User(hobbies=[1]) + + my_sub_user = SubUser(hobbies=['scuba diving'], sins=['lying']) + t = Transaction(user=my_sub_user) + print(t) # (3)! + #> user=User(hobbies=['scuba diving']) + ``` + + 1. `revalidate_instances` is set to `'subclass-instances'`. + 2. This is not revalidated, since `my_user` is not a subclass of `User`. + 3. Using `'never'` we would have gotten `user=SubUser(hobbies=['scuba diving'], sins=['lying'])`. + """ + + ser_json_timedelta: Literal['iso8601', 'float'] + """ + The format of JSON serialized timedeltas. Accepts the string values of `'iso8601'` and + `'float'`. Defaults to `'iso8601'`. + + - `'iso8601'` will serialize timedeltas to ISO 8601 durations. + - `'float'` will serialize timedeltas to the total number of seconds. + """ + + ser_json_bytes: Literal['utf8', 'base64'] + """ + The encoding of JSON serialized bytes. Accepts the string values of `'utf8'` and `'base64'`. + Defaults to `'utf8'`. + + - `'utf8'` will serialize bytes to UTF-8 strings. + - `'base64'` will serialize bytes to URL safe base64 strings. + """ + + ser_json_inf_nan: Literal['null', 'constants'] + """ + The encoding of JSON serialized infinity and NaN float values. Accepts the string values of `'null'` and `'constants'`. + Defaults to `'null'`. + + - `'null'` will serialize infinity and NaN values as `null`. + - `'constants'` will serialize infinity and NaN values as `Infinity` and `NaN`. + """ + + # whether to validate default values during validation, default False + validate_default: bool + """Whether to validate default values during validation. Defaults to `False`.""" + + validate_return: bool + """whether to validate the return value from call validators. Defaults to `False`.""" + + protected_namespaces: tuple[str, ...] + """ + A `tuple` of strings that prevent model to have field which conflict with them. + Defaults to `('model_', )`). + + Pydantic prevents collisions between model attributes and `BaseModel`'s own methods by + namespacing them with the prefix `model_`. + + ```py + import warnings + + from pydantic import BaseModel + + warnings.filterwarnings('error') # Raise warnings as errors + + try: + + class Model(BaseModel): + model_prefixed_field: str + + except UserWarning as e: + print(e) + ''' + Field "model_prefixed_field" has conflict with protected namespace "model_". + + You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ()`. + ''' + ``` + + You can customize this behavior using the `protected_namespaces` setting: + + ```py + import warnings + + from pydantic import BaseModel, ConfigDict + + warnings.filterwarnings('error') # Raise warnings as errors + + try: + + class Model(BaseModel): + model_prefixed_field: str + also_protect_field: str + + model_config = ConfigDict( + protected_namespaces=('protect_me_', 'also_protect_') + ) + + except UserWarning as e: + print(e) + ''' + Field "also_protect_field" has conflict with protected namespace "also_protect_". + + You may be able to resolve this warning by setting `model_config['protected_namespaces'] = ('protect_me_',)`. + ''' + ``` + + While Pydantic will only emit a warning when an item is in a protected namespace but does not actually have a collision, + an error _is_ raised if there is an actual collision with an existing attribute: + + ```py + from pydantic import BaseModel + + try: + + class Model(BaseModel): + model_validate: str + + except NameError as e: + print(e) + ''' + Field "model_validate" conflicts with member > of protected namespace "model_". + ''' + ``` + """ + + hide_input_in_errors: bool + """ + Whether to hide inputs when printing errors. Defaults to `False`. + + Pydantic shows the input value and type when it raises `ValidationError` during the validation. + + ```py + from pydantic import BaseModel, ValidationError + + class Model(BaseModel): + a: str + + try: + Model(a=123) + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + a + Input should be a valid string [type=string_type, input_value=123, input_type=int] + ''' + ``` + + You can hide the input value and type by setting the `hide_input_in_errors` config to `True`. + + ```py + from pydantic import BaseModel, ConfigDict, ValidationError + + class Model(BaseModel): + a: str + model_config = ConfigDict(hide_input_in_errors=True) + + try: + Model(a=123) + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + a + Input should be a valid string [type=string_type] + ''' + ``` + """ + + defer_build: bool + """ + Whether to defer model validator and serializer construction until the first model validation. + + This can be useful to avoid the overhead of building models which are only + used nested within other models, or when you want to manually define type namespace via + [`Model.model_rebuild(_types_namespace=...)`][pydantic.BaseModel.model_rebuild]. Defaults to False. + """ + + plugin_settings: dict[str, object] | None + """A `dict` of settings for plugins. Defaults to `None`. + + See [Pydantic Plugins](../concepts/plugins.md) for details. + """ + + schema_generator: type[_GenerateSchema] | None + """ + A custom core schema generator class to use when generating JSON schemas. + Useful if you want to change the way types are validated across an entire model/schema. Defaults to `None`. + + The `GenerateSchema` interface is subject to change, currently only the `string_schema` method is public. + + See [#6737](https://github.com/pydantic/pydantic/pull/6737) for details. + """ + + json_schema_serialization_defaults_required: bool + """ + Whether fields with default values should be marked as required in the serialization schema. Defaults to `False`. + + This ensures that the serialization schema will reflect the fact a field with a default will always be present + when serializing the model, even though it is not required for validation. + + However, there are scenarios where this may be undesirable — in particular, if you want to share the schema + between validation and serialization, and don't mind fields with defaults being marked as not required during + serialization. See [#7209](https://github.com/pydantic/pydantic/issues/7209) for more details. + + ```py + from pydantic import BaseModel, ConfigDict + + class Model(BaseModel): + a: str = 'a' + + model_config = ConfigDict(json_schema_serialization_defaults_required=True) + + print(Model.model_json_schema(mode='validation')) + ''' + { + 'properties': {'a': {'default': 'a', 'title': 'A', 'type': 'string'}}, + 'title': 'Model', + 'type': 'object', + } + ''' + print(Model.model_json_schema(mode='serialization')) + ''' + { + 'properties': {'a': {'default': 'a', 'title': 'A', 'type': 'string'}}, + 'required': ['a'], + 'title': 'Model', + 'type': 'object', + } + ''' + ``` + """ + + json_schema_mode_override: Literal['validation', 'serialization', None] + """ + If not `None`, the specified mode will be used to generate the JSON schema regardless of what `mode` was passed to + the function call. Defaults to `None`. + + This provides a way to force the JSON schema generation to reflect a specific mode, e.g., to always use the + validation schema. + + It can be useful when using frameworks (such as FastAPI) that may generate different schemas for validation + and serialization that must both be referenced from the same schema; when this happens, we automatically append + `-Input` to the definition reference for the validation schema and `-Output` to the definition reference for the + serialization schema. By specifying a `json_schema_mode_override` though, this prevents the conflict between + the validation and serialization schemas (since both will use the specified schema), and so prevents the suffixes + from being added to the definition references. + + ```py + from pydantic import BaseModel, ConfigDict, Json + + class Model(BaseModel): + a: Json[int] # requires a string to validate, but will dump an int + + print(Model.model_json_schema(mode='serialization')) + ''' + { + 'properties': {'a': {'title': 'A', 'type': 'integer'}}, + 'required': ['a'], + 'title': 'Model', + 'type': 'object', + } + ''' + + class ForceInputModel(Model): + # the following ensures that even with mode='serialization', we + # will get the schema that would be generated for validation. + model_config = ConfigDict(json_schema_mode_override='validation') + + print(ForceInputModel.model_json_schema(mode='serialization')) + ''' + { + 'properties': { + 'a': { + 'contentMediaType': 'application/json', + 'contentSchema': {'type': 'integer'}, + 'title': 'A', + 'type': 'string', + } + }, + 'required': ['a'], + 'title': 'ForceInputModel', + 'type': 'object', + } + ''' + ``` + """ + + coerce_numbers_to_str: bool + """ + If `True`, enables automatic coercion of any `Number` type to `str` in "lax" (non-strict) mode. Defaults to `False`. + + Pydantic doesn't allow number types (`int`, `float`, `Decimal`) to be coerced as type `str` by default. + + ```py + from decimal import Decimal + + from pydantic import BaseModel, ConfigDict, ValidationError + + class Model(BaseModel): + value: str + + try: + print(Model(value=42)) + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + value + Input should be a valid string [type=string_type, input_value=42, input_type=int] + ''' + + class Model(BaseModel): + model_config = ConfigDict(coerce_numbers_to_str=True) + + value: str + + repr(Model(value=42).value) + #> "42" + repr(Model(value=42.13).value) + #> "42.13" + repr(Model(value=Decimal('42.13')).value) + #> "42.13" + ``` + """ + + regex_engine: Literal['rust-regex', 'python-re'] + """ + The regex engine to be used for pattern validation. + Defaults to `'rust-regex'`. + + - `rust-regex` uses the [`regex`](https://docs.rs/regex) Rust crate, + which is non-backtracking and therefore more DDoS resistant, but does not support all regex features. + - `python-re` use the [`re`](https://docs.python.org/3/library/re.html) module, + which supports all regex features, but may be slower. + + ```py + from pydantic import BaseModel, ConfigDict, Field, ValidationError + + class Model(BaseModel): + model_config = ConfigDict(regex_engine='python-re') + + value: str = Field(pattern=r'^abc(?=def)') + + print(Model(value='abcdef').value) + #> abcdef + + try: + print(Model(value='abxyzcdef')) + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + value + String should match pattern '^abc(?=def)' [type=string_pattern_mismatch, input_value='abxyzcdef', input_type=str] + ''' + ``` + """ + + validation_error_cause: bool + """ + If `True`, Python exceptions that were part of a validation failure will be shown as an exception group as a cause. Can be useful for debugging. Defaults to `False`. + + Note: + Python 3.10 and older don't support exception groups natively. <=3.10, backport must be installed: `pip install exceptiongroup`. + + Note: + The structure of validation errors are likely to change in future Pydantic versions. Pydantic offers no guarantees about their structure. Should be used for visual traceback debugging only. + """ + + use_attribute_docstrings: bool + ''' + Whether docstrings of attributes (bare string literals immediately following the attribute declaration) + should be used for field descriptions. Defaults to `False`. + + ```py + from pydantic import BaseModel, ConfigDict, Field + + + class Model(BaseModel): + model_config = ConfigDict(use_attribute_docstrings=True) + + x: str + """ + Example of an attribute docstring + """ + + y: int = Field(description="Description in Field") + """ + Description in Field overrides attribute docstring + """ + + + print(Model.model_fields["x"].description) + # > Example of an attribute docstring + print(Model.model_fields["y"].description) + # > Description in Field + ``` + This requires the source code of the class to be available at runtime. + + !!! warning "Usage with `TypedDict`" + Due to current limitations, attribute docstrings detection may not work as expected when using `TypedDict` + (in particular when multiple `TypedDict` classes have the same name in the same source file). The behavior + can be different depending on the Python version used. + ''' + + cache_strings: bool | Literal['all', 'keys', 'none'] + """ + Whether to cache strings to avoid constructing new Python objects. Defaults to True. + + Enabling this setting should significantly improve validation performance while increasing memory usage slightly. + + - `True` or `'all'` (the default): cache all strings + - `'keys'`: cache only dictionary keys + - `False` or `'none'`: no caching + + !!! note + `True` or `'all'` is required to cache strings during general validation because + validators don't know if they're in a key or a value. + + !!! tip + If repeated strings are rare, it's recommended to use `'keys'` or `'none'` to reduce memory usage, + as the performance difference is minimal if repeated strings are rare. + """ + + +_TypeT = TypeVar('_TypeT', bound=type) + + +def with_config(config: ConfigDict) -> Callable[[_TypeT], _TypeT]: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/config/#configuration-with-dataclass-from-the-standard-library-or-typeddict + + A convenience decorator to set a [Pydantic configuration](config.md) on a `TypedDict` or a `dataclass` from the standard library. + + Although the configuration can be set using the `__pydantic_config__` attribute, it does not play well with type checkers, + especially with `TypedDict`. + + !!! example "Usage" + + ```py + from typing_extensions import TypedDict + + from pydantic import ConfigDict, TypeAdapter, with_config + + @with_config(ConfigDict(str_to_lower=True)) + class Model(TypedDict): + x: str + + ta = TypeAdapter(Model) + + print(ta.validate_python({'x': 'ABC'})) + #> {'x': 'abc'} + ``` + """ + + def inner(TypedDictClass: _TypeT, /) -> _TypeT: + TypedDictClass.__pydantic_config__ = config + return TypedDictClass + + return inner + + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/dataclasses.py b/venv/lib/python3.10/site-packages/pydantic/dataclasses.py new file mode 100644 index 0000000000000000000000000000000000000000..c00de3216b83f9568d846f18d30b86515ff7db5e --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/dataclasses.py @@ -0,0 +1,327 @@ +"""Provide an enhanced dataclass that performs validation.""" +from __future__ import annotations as _annotations + +import dataclasses +import sys +import types +from typing import TYPE_CHECKING, Any, Callable, Generic, NoReturn, TypeVar, overload + +from typing_extensions import Literal, TypeGuard, dataclass_transform + +from ._internal import _config, _decorators, _typing_extra +from ._internal import _dataclasses as _pydantic_dataclasses +from ._migration import getattr_migration +from .config import ConfigDict +from .fields import Field, FieldInfo + +if TYPE_CHECKING: + from ._internal._dataclasses import PydanticDataclass + +__all__ = 'dataclass', 'rebuild_dataclass' + +_T = TypeVar('_T') + +if sys.version_info >= (3, 10): + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + *, + init: Literal[False] = False, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: ConfigDict | type[object] | None = None, + validate_on_init: bool | None = None, + kw_only: bool = ..., + slots: bool = ..., + ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore + ... + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + _cls: type[_T], # type: ignore + *, + init: Literal[False] = False, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: ConfigDict | type[object] | None = None, + validate_on_init: bool | None = None, + kw_only: bool = ..., + slots: bool = ..., + ) -> type[PydanticDataclass]: + ... + +else: + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + *, + init: Literal[False] = False, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: ConfigDict | type[object] | None = None, + validate_on_init: bool | None = None, + ) -> Callable[[type[_T]], type[PydanticDataclass]]: # type: ignore + ... + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + _cls: type[_T], # type: ignore + *, + init: Literal[False] = False, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: ConfigDict | type[object] | None = None, + validate_on_init: bool | None = None, + ) -> type[PydanticDataclass]: + ... + + +@dataclass_transform(field_specifiers=(dataclasses.field, Field)) +def dataclass( # noqa: C901 + _cls: type[_T] | None = None, + *, + init: Literal[False] = False, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: ConfigDict | type[object] | None = None, + validate_on_init: bool | None = None, + kw_only: bool = False, + slots: bool = False, +) -> Callable[[type[_T]], type[PydanticDataclass]] | type[PydanticDataclass]: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/dataclasses/ + + A decorator used to create a Pydantic-enhanced dataclass, similar to the standard Python `dataclass`, + but with added validation. + + This function should be used similarly to `dataclasses.dataclass`. + + Args: + _cls: The target `dataclass`. + init: Included for signature compatibility with `dataclasses.dataclass`, and is passed through to + `dataclasses.dataclass` when appropriate. If specified, must be set to `False`, as pydantic inserts its + own `__init__` function. + repr: A boolean indicating whether to include the field in the `__repr__` output. + eq: Determines if a `__eq__` method should be generated for the class. + order: Determines if comparison magic methods should be generated, such as `__lt__`, but not `__eq__`. + unsafe_hash: Determines if a `__hash__` method should be included in the class, as in `dataclasses.dataclass`. + frozen: Determines if the generated class should be a 'frozen' `dataclass`, which does not allow its + attributes to be modified after it has been initialized. + config: The Pydantic config to use for the `dataclass`. + validate_on_init: A deprecated parameter included for backwards compatibility; in V2, all Pydantic dataclasses + are validated on init. + kw_only: Determines if `__init__` method parameters must be specified by keyword only. Defaults to `False`. + slots: Determines if the generated class should be a 'slots' `dataclass`, which does not allow the addition of + new attributes after instantiation. + + Returns: + A decorator that accepts a class as its argument and returns a Pydantic `dataclass`. + + Raises: + AssertionError: Raised if `init` is not `False` or `validate_on_init` is `False`. + """ + assert init is False, 'pydantic.dataclasses.dataclass only supports init=False' + assert validate_on_init is not False, 'validate_on_init=False is no longer supported' + + if sys.version_info >= (3, 10): + kwargs = dict(kw_only=kw_only, slots=slots) + else: + kwargs = {} + + def make_pydantic_fields_compatible(cls: type[Any]) -> None: + """Make sure that stdlib `dataclasses` understands `Field` kwargs like `kw_only` + To do that, we simply change + `x: int = pydantic.Field(..., kw_only=True)` + into + `x: int = dataclasses.field(default=pydantic.Field(..., kw_only=True), kw_only=True)` + """ + for annotation_cls in cls.__mro__: + # In Python < 3.9, `__annotations__` might not be present if there are no fields. + # we therefore need to use `getattr` to avoid an `AttributeError`. + annotations = getattr(annotation_cls, '__annotations__', []) + for field_name in annotations: + field_value = getattr(cls, field_name, None) + # Process only if this is an instance of `FieldInfo`. + if not isinstance(field_value, FieldInfo): + continue + + # Initialize arguments for the standard `dataclasses.field`. + field_args: dict = {'default': field_value} + + # Handle `kw_only` for Python 3.10+ + if sys.version_info >= (3, 10) and field_value.kw_only: + field_args['kw_only'] = True + + # Set `repr` attribute if it's explicitly specified to be not `True`. + if field_value.repr is not True: + field_args['repr'] = field_value.repr + + setattr(cls, field_name, dataclasses.field(**field_args)) + # In Python 3.8, dataclasses checks cls.__dict__['__annotations__'] for annotations, + # so we must make sure it's initialized before we add to it. + if cls.__dict__.get('__annotations__') is None: + cls.__annotations__ = {} + cls.__annotations__[field_name] = annotations[field_name] + + def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]: + """Create a Pydantic dataclass from a regular dataclass. + + Args: + cls: The class to create the Pydantic dataclass from. + + Returns: + A Pydantic dataclass. + """ + original_cls = cls + + config_dict = config + if config_dict is None: + # if not explicitly provided, read from the type + cls_config = getattr(cls, '__pydantic_config__', None) + if cls_config is not None: + config_dict = cls_config + config_wrapper = _config.ConfigWrapper(config_dict) + decorators = _decorators.DecoratorInfos.build(cls) + + # Keep track of the original __doc__ so that we can restore it after applying the dataclasses decorator + # Otherwise, classes with no __doc__ will have their signature added into the JSON schema description, + # since dataclasses.dataclass will set this as the __doc__ + original_doc = cls.__doc__ + + if _pydantic_dataclasses.is_builtin_dataclass(cls): + # Don't preserve the docstring for vanilla dataclasses, as it may include the signature + # This matches v1 behavior, and there was an explicit test for it + original_doc = None + + # We don't want to add validation to the existing std lib dataclass, so we will subclass it + # If the class is generic, we need to make sure the subclass also inherits from Generic + # with all the same parameters. + bases = (cls,) + if issubclass(cls, Generic): + generic_base = Generic[cls.__parameters__] # type: ignore + bases = bases + (generic_base,) + cls = types.new_class(cls.__name__, bases) + + make_pydantic_fields_compatible(cls) + + cls = dataclasses.dataclass( # type: ignore[call-overload] + cls, + # the value of init here doesn't affect anything except that it makes it easier to generate a signature + init=True, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + **kwargs, + ) + + cls.__pydantic_decorators__ = decorators # type: ignore + cls.__doc__ = original_doc + cls.__module__ = original_cls.__module__ + cls.__qualname__ = original_cls.__qualname__ + pydantic_complete = _pydantic_dataclasses.complete_dataclass( + cls, config_wrapper, raise_errors=False, types_namespace=None + ) + cls.__pydantic_complete__ = pydantic_complete # type: ignore + return cls + + if _cls is None: + return create_dataclass + + return create_dataclass(_cls) + + +__getattr__ = getattr_migration(__name__) + +if (3, 8) <= sys.version_info < (3, 11): + # Monkeypatch dataclasses.InitVar so that typing doesn't error if it occurs as a type when evaluating type hints + # Starting in 3.11, typing.get_type_hints will not raise an error if the retrieved type hints are not callable. + + def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn: + """This function does nothing but raise an error that is as similar as possible to what you'd get + if you were to try calling `InitVar[int]()` without this monkeypatch. The whole purpose is just + to ensure typing._type_check does not error if the type hint evaluates to `InitVar[]`. + """ + raise TypeError("'InitVar' object is not callable") + + dataclasses.InitVar.__call__ = _call_initvar + + +def rebuild_dataclass( + cls: type[PydanticDataclass], + *, + force: bool = False, + raise_errors: bool = True, + _parent_namespace_depth: int = 2, + _types_namespace: dict[str, Any] | None = None, +) -> bool | None: + """Try to rebuild the pydantic-core schema for the dataclass. + + This may be necessary when one of the annotations is a ForwardRef which could not be resolved during + the initial attempt to build the schema, and automatic rebuilding fails. + + This is analogous to `BaseModel.model_rebuild`. + + Args: + cls: The class to rebuild the pydantic-core schema for. + force: Whether to force the rebuilding of the schema, defaults to `False`. + raise_errors: Whether to raise errors, defaults to `True`. + _parent_namespace_depth: The depth level of the parent namespace, defaults to 2. + _types_namespace: The types namespace, defaults to `None`. + + Returns: + Returns `None` if the schema is already "complete" and rebuilding was not required. + If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. + """ + if not force and cls.__pydantic_complete__: + return None + else: + if _types_namespace is not None: + types_namespace: dict[str, Any] | None = _types_namespace.copy() + else: + if _parent_namespace_depth > 0: + frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {} + # Note: we may need to add something similar to cls.__pydantic_parent_namespace__ from BaseModel + # here when implementing handling of recursive generics. See BaseModel.model_rebuild for reference. + types_namespace = frame_parent_ns + else: + types_namespace = {} + + types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace) + return _pydantic_dataclasses.complete_dataclass( + cls, + _config.ConfigWrapper(cls.__pydantic_config__, check=False), + raise_errors=raise_errors, + types_namespace=types_namespace, + ) + + +def is_pydantic_dataclass(__cls: type[Any]) -> TypeGuard[type[PydanticDataclass]]: + """Whether a class is a pydantic dataclass. + + Args: + __cls: The class. + + Returns: + `True` if the class is a pydantic dataclass, `False` otherwise. + """ + return dataclasses.is_dataclass(__cls) and '__pydantic_validator__' in __cls.__dict__ diff --git a/venv/lib/python3.10/site-packages/pydantic/datetime_parse.py b/venv/lib/python3.10/site-packages/pydantic/datetime_parse.py new file mode 100644 index 0000000000000000000000000000000000000000..902219df7cdd011de195a377ea37f9a270708998 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/datetime_parse.py @@ -0,0 +1,4 @@ +"""The `datetime_parse` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/decorator.py b/venv/lib/python3.10/site-packages/pydantic/decorator.py new file mode 100644 index 0000000000000000000000000000000000000000..c3643468d9935d2d071aeb95d88686811f1cb7de --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/decorator.py @@ -0,0 +1,4 @@ +"""The `decorator` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/env_settings.py b/venv/lib/python3.10/site-packages/pydantic/env_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..662f59005a09a1934155f9ae6ebab9a2d129d33f --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/env_settings.py @@ -0,0 +1,4 @@ +"""The `env_settings` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/error_wrappers.py b/venv/lib/python3.10/site-packages/pydantic/error_wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..5144eeee41e12d208ff8a582aa57e7dc96c3a93e --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/error_wrappers.py @@ -0,0 +1,4 @@ +"""The `error_wrappers` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/errors.py b/venv/lib/python3.10/site-packages/pydantic/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..e4fadd8cfa61baed42e624fd8b88e7e3aa92b668 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/errors.py @@ -0,0 +1,153 @@ +"""Pydantic-specific errors.""" +from __future__ import annotations as _annotations + +import re + +from typing_extensions import Literal, Self + +from ._migration import getattr_migration +from .version import version_short + +__all__ = ( + 'PydanticUserError', + 'PydanticUndefinedAnnotation', + 'PydanticImportError', + 'PydanticSchemaGenerationError', + 'PydanticInvalidForJsonSchema', + 'PydanticErrorCodes', +) + +# We use this URL to allow for future flexibility about how we host the docs, while allowing for Pydantic +# code in the while with "old" URLs to still work. +# 'u' refers to "user errors" - e.g. errors caused by developers using pydantic, as opposed to validation errors. +DEV_ERROR_DOCS_URL = f'https://errors.pydantic.dev/{version_short()}/u/' +PydanticErrorCodes = Literal[ + 'class-not-fully-defined', + 'custom-json-schema', + 'decorator-missing-field', + 'discriminator-no-field', + 'discriminator-alias-type', + 'discriminator-needs-literal', + 'discriminator-alias', + 'discriminator-validator', + 'callable-discriminator-no-tag', + 'typed-dict-version', + 'model-field-overridden', + 'model-field-missing-annotation', + 'config-both', + 'removed-kwargs', + 'invalid-for-json-schema', + 'json-schema-already-used', + 'base-model-instantiated', + 'undefined-annotation', + 'schema-for-unknown-type', + 'import-error', + 'create-model-field-definitions', + 'create-model-config-base', + 'validator-no-fields', + 'validator-invalid-fields', + 'validator-instance-method', + 'root-validator-pre-skip', + 'model-serializer-instance-method', + 'validator-field-config-info', + 'validator-v1-signature', + 'validator-signature', + 'field-serializer-signature', + 'model-serializer-signature', + 'multiple-field-serializers', + 'invalid_annotated_type', + 'type-adapter-config-unused', + 'root-model-extra', + 'unevaluable-type-annotation', + 'dataclass-init-false-extra-allow', + 'clashing-init-and-init-var', + 'model-config-invalid-field-name', +] + + +class PydanticErrorMixin: + """A mixin class for common functionality shared by all Pydantic-specific errors. + + Attributes: + message: A message describing the error. + code: An optional error code from PydanticErrorCodes enum. + """ + + def __init__(self, message: str, *, code: PydanticErrorCodes | None) -> None: + self.message = message + self.code = code + + def __str__(self) -> str: + if self.code is None: + return self.message + else: + return f'{self.message}\n\nFor further information visit {DEV_ERROR_DOCS_URL}{self.code}' + + +class PydanticUserError(PydanticErrorMixin, TypeError): + """An error raised due to incorrect use of Pydantic.""" + + +class PydanticUndefinedAnnotation(PydanticErrorMixin, NameError): + """A subclass of `NameError` raised when handling undefined annotations during `CoreSchema` generation. + + Attributes: + name: Name of the error. + message: Description of the error. + """ + + def __init__(self, name: str, message: str) -> None: + self.name = name + super().__init__(message=message, code='undefined-annotation') + + @classmethod + def from_name_error(cls, name_error: NameError) -> Self: + """Convert a `NameError` to a `PydanticUndefinedAnnotation` error. + + Args: + name_error: `NameError` to be converted. + + Returns: + Converted `PydanticUndefinedAnnotation` error. + """ + try: + name = name_error.name # type: ignore # python > 3.10 + except AttributeError: + name = re.search(r".*'(.+?)'", str(name_error)).group(1) # type: ignore[union-attr] + return cls(name=name, message=str(name_error)) + + +class PydanticImportError(PydanticErrorMixin, ImportError): + """An error raised when an import fails due to module changes between V1 and V2. + + Attributes: + message: Description of the error. + """ + + def __init__(self, message: str) -> None: + super().__init__(message, code='import-error') + + +class PydanticSchemaGenerationError(PydanticUserError): + """An error raised during failures to generate a `CoreSchema` for some type. + + Attributes: + message: Description of the error. + """ + + def __init__(self, message: str) -> None: + super().__init__(message, code='schema-for-unknown-type') + + +class PydanticInvalidForJsonSchema(PydanticUserError): + """An error raised during failures to generate a JSON schema for some `CoreSchema`. + + Attributes: + message: Description of the error. + """ + + def __init__(self, message: str) -> None: + super().__init__(message, code='invalid-for-json-schema') + + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/fields.py b/venv/lib/python3.10/site-packages/pydantic/fields.py new file mode 100644 index 0000000000000000000000000000000000000000..312bd92921bdfb5e58b97c662e92bb51cb133c87 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/fields.py @@ -0,0 +1,1237 @@ +"""Defining fields on models.""" +from __future__ import annotations as _annotations + +import dataclasses +import inspect +import sys +import typing +from copy import copy +from dataclasses import Field as DataclassField +from functools import cached_property +from typing import Any, ClassVar +from warnings import warn + +import annotated_types +import typing_extensions +from pydantic_core import PydanticUndefined +from typing_extensions import Literal, TypeAlias, Unpack, deprecated + +from . import types +from ._internal import _decorators, _fields, _generics, _internal_dataclass, _repr, _typing_extra, _utils +from .aliases import AliasChoices, AliasPath +from .config import JsonDict +from .errors import PydanticUserError +from .warnings import PydanticDeprecatedSince20 + +if typing.TYPE_CHECKING: + from ._internal._repr import ReprArgs +else: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + +__all__ = 'Field', 'PrivateAttr', 'computed_field' + + +_Unset: Any = PydanticUndefined + +if sys.version_info >= (3, 13): + import warnings + + Deprecated: TypeAlias = warnings.deprecated | deprecated +else: + Deprecated: TypeAlias = deprecated + + +class _FromFieldInfoInputs(typing_extensions.TypedDict, total=False): + """This class exists solely to add type checking for the `**kwargs` in `FieldInfo.from_field`.""" + + annotation: type[Any] | None + default_factory: typing.Callable[[], Any] | None + alias: str | None + alias_priority: int | None + validation_alias: str | AliasPath | AliasChoices | None + serialization_alias: str | None + title: str | None + description: str | None + examples: list[Any] | None + exclude: bool | None + gt: float | None + ge: float | None + lt: float | None + le: float | None + multiple_of: float | None + strict: bool | None + min_length: int | None + max_length: int | None + pattern: str | typing.Pattern[str] | None + allow_inf_nan: bool | None + max_digits: int | None + decimal_places: int | None + union_mode: Literal['smart', 'left_to_right'] | None + discriminator: str | types.Discriminator | None + deprecated: Deprecated | str | bool | None + json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None + frozen: bool | None + validate_default: bool | None + repr: bool + init: bool | None + init_var: bool | None + kw_only: bool | None + coerce_numbers_to_str: bool | None + + +class _FieldInfoInputs(_FromFieldInfoInputs, total=False): + """This class exists solely to add type checking for the `**kwargs` in `FieldInfo.__init__`.""" + + default: Any + + +class FieldInfo(_repr.Representation): + """This class holds information about a field. + + `FieldInfo` is used for any field definition regardless of whether the [`Field()`][pydantic.fields.Field] + function is explicitly used. + + !!! warning + You generally shouldn't be creating `FieldInfo` directly, you'll only need to use it when accessing + [`BaseModel`][pydantic.main.BaseModel] `.model_fields` internals. + + Attributes: + annotation: The type annotation of the field. + default: The default value of the field. + default_factory: The factory function used to construct the default for the field. + alias: The alias name of the field. + alias_priority: The priority of the field's alias. + validation_alias: The validation alias of the field. + serialization_alias: The serialization alias of the field. + title: The title of the field. + description: The description of the field. + examples: List of examples of the field. + exclude: Whether to exclude the field from the model serialization. + discriminator: Field name or Discriminator for discriminating the type in a tagged union. + deprecated: A deprecation message, an instance of `warnings.deprecated` or the `typing_extensions.deprecated` backport, + or a boolean. If `True`, a default deprecation message will be emitted when accessing the field. + json_schema_extra: A dict or callable to provide extra JSON schema properties. + frozen: Whether the field is frozen. + validate_default: Whether to validate the default value of the field. + repr: Whether to include the field in representation of the model. + init: Whether the field should be included in the constructor of the dataclass. + init_var: Whether the field should _only_ be included in the constructor of the dataclass, and not stored. + kw_only: Whether the field should be a keyword-only argument in the constructor of the dataclass. + metadata: List of metadata constraints. + """ + + annotation: type[Any] | None + default: Any + default_factory: typing.Callable[[], Any] | None + alias: str | None + alias_priority: int | None + validation_alias: str | AliasPath | AliasChoices | None + serialization_alias: str | None + title: str | None + description: str | None + examples: list[Any] | None + exclude: bool | None + discriminator: str | types.Discriminator | None + deprecated: Deprecated | str | bool | None + json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None + frozen: bool | None + validate_default: bool | None + repr: bool + init: bool | None + init_var: bool | None + kw_only: bool | None + metadata: list[Any] + + __slots__ = ( + 'annotation', + 'default', + 'default_factory', + 'alias', + 'alias_priority', + 'validation_alias', + 'serialization_alias', + 'title', + 'description', + 'examples', + 'exclude', + 'discriminator', + 'deprecated', + 'json_schema_extra', + 'frozen', + 'validate_default', + 'repr', + 'init', + 'init_var', + 'kw_only', + 'metadata', + '_attributes_set', + ) + + # used to convert kwargs to metadata/constraints, + # None has a special meaning - these items are collected into a `PydanticGeneralMetadata` + metadata_lookup: ClassVar[dict[str, typing.Callable[[Any], Any] | None]] = { + 'strict': types.Strict, + 'gt': annotated_types.Gt, + 'ge': annotated_types.Ge, + 'lt': annotated_types.Lt, + 'le': annotated_types.Le, + 'multiple_of': annotated_types.MultipleOf, + 'min_length': annotated_types.MinLen, + 'max_length': annotated_types.MaxLen, + 'pattern': None, + 'allow_inf_nan': None, + 'max_digits': None, + 'decimal_places': None, + 'union_mode': None, + 'coerce_numbers_to_str': None, + } + + def __init__(self, **kwargs: Unpack[_FieldInfoInputs]) -> None: + """This class should generally not be initialized directly; instead, use the `pydantic.fields.Field` function + or one of the constructor classmethods. + + See the signature of `pydantic.fields.Field` for more details about the expected arguments. + """ + self._attributes_set = {k: v for k, v in kwargs.items() if v is not _Unset} + kwargs = {k: _DefaultValues.get(k) if v is _Unset else v for k, v in kwargs.items()} # type: ignore + self.annotation, annotation_metadata = self._extract_metadata(kwargs.get('annotation')) + + default = kwargs.pop('default', PydanticUndefined) + if default is Ellipsis: + self.default = PydanticUndefined + else: + self.default = default + + self.default_factory = kwargs.pop('default_factory', None) + + if self.default is not PydanticUndefined and self.default_factory is not None: + raise TypeError('cannot specify both default and default_factory') + + self.title = kwargs.pop('title', None) + self.alias = kwargs.pop('alias', None) + self.validation_alias = kwargs.pop('validation_alias', None) + self.serialization_alias = kwargs.pop('serialization_alias', None) + alias_is_set = any(alias is not None for alias in (self.alias, self.validation_alias, self.serialization_alias)) + self.alias_priority = kwargs.pop('alias_priority', None) or 2 if alias_is_set else None + self.description = kwargs.pop('description', None) + self.examples = kwargs.pop('examples', None) + self.exclude = kwargs.pop('exclude', None) + self.discriminator = kwargs.pop('discriminator', None) + # For compatibility with FastAPI<=0.110.0, we preserve the existing value if it is not overridden + self.deprecated = kwargs.pop('deprecated', getattr(self, 'deprecated', None)) + self.repr = kwargs.pop('repr', True) + self.json_schema_extra = kwargs.pop('json_schema_extra', None) + self.validate_default = kwargs.pop('validate_default', None) + self.frozen = kwargs.pop('frozen', None) + # currently only used on dataclasses + self.init = kwargs.pop('init', None) + self.init_var = kwargs.pop('init_var', None) + self.kw_only = kwargs.pop('kw_only', None) + + self.metadata = self._collect_metadata(kwargs) + annotation_metadata # type: ignore + + @staticmethod + def from_field(default: Any = PydanticUndefined, **kwargs: Unpack[_FromFieldInfoInputs]) -> FieldInfo: + """Create a new `FieldInfo` object with the `Field` function. + + Args: + default: The default value for the field. Defaults to Undefined. + **kwargs: Additional arguments dictionary. + + Raises: + TypeError: If 'annotation' is passed as a keyword argument. + + Returns: + A new FieldInfo object with the given parameters. + + Example: + This is how you can create a field with default value like this: + + ```python + import pydantic + + class MyModel(pydantic.BaseModel): + foo: int = pydantic.Field(4) + ``` + """ + if 'annotation' in kwargs: + raise TypeError('"annotation" is not permitted as a Field keyword argument') + return FieldInfo(default=default, **kwargs) + + @staticmethod + def from_annotation(annotation: type[Any]) -> FieldInfo: + """Creates a `FieldInfo` instance from a bare annotation. + + This function is used internally to create a `FieldInfo` from a bare annotation like this: + + ```python + import pydantic + + class MyModel(pydantic.BaseModel): + foo: int # <-- like this + ``` + + We also account for the case where the annotation can be an instance of `Annotated` and where + one of the (not first) arguments in `Annotated` is an instance of `FieldInfo`, e.g.: + + ```python + import annotated_types + from typing_extensions import Annotated + + import pydantic + + class MyModel(pydantic.BaseModel): + foo: Annotated[int, annotated_types.Gt(42)] + bar: Annotated[int, pydantic.Field(gt=42)] + ``` + + Args: + annotation: An annotation object. + + Returns: + An instance of the field metadata. + """ + final = False + if _typing_extra.is_finalvar(annotation): + final = True + if annotation is not typing_extensions.Final: + annotation = typing_extensions.get_args(annotation)[0] + + if _typing_extra.is_annotated(annotation): + first_arg, *extra_args = typing_extensions.get_args(annotation) + if _typing_extra.is_finalvar(first_arg): + final = True + field_info_annotations = [a for a in extra_args if isinstance(a, FieldInfo)] + field_info = FieldInfo.merge_field_infos(*field_info_annotations, annotation=first_arg) + if field_info: + new_field_info = copy(field_info) + new_field_info.annotation = first_arg + new_field_info.frozen = final or field_info.frozen + metadata: list[Any] = [] + for a in extra_args: + if _typing_extra.is_deprecated_instance(a): + new_field_info.deprecated = a.message + elif not isinstance(a, FieldInfo): + metadata.append(a) + else: + metadata.extend(a.metadata) + new_field_info.metadata = metadata + return new_field_info + + return FieldInfo(annotation=annotation, frozen=final or None) + + @staticmethod + def from_annotated_attribute(annotation: type[Any], default: Any) -> FieldInfo: + """Create `FieldInfo` from an annotation with a default value. + + This is used in cases like the following: + + ```python + import annotated_types + from typing_extensions import Annotated + + import pydantic + + class MyModel(pydantic.BaseModel): + foo: int = 4 # <-- like this + bar: Annotated[int, annotated_types.Gt(4)] = 4 # <-- or this + spam: Annotated[int, pydantic.Field(gt=4)] = 4 # <-- or this + ``` + + Args: + annotation: The type annotation of the field. + default: The default value of the field. + + Returns: + A field object with the passed values. + """ + if annotation is default: + raise PydanticUserError( + 'Error when building FieldInfo from annotated attribute. ' + "Make sure you don't have any field name clashing with a type annotation ", + code='unevaluable-type-annotation', + ) + + final = False + if _typing_extra.is_finalvar(annotation): + final = True + if annotation is not typing_extensions.Final: + annotation = typing_extensions.get_args(annotation)[0] + + if isinstance(default, FieldInfo): + default.annotation, annotation_metadata = FieldInfo._extract_metadata(annotation) + default.metadata += annotation_metadata + default = default.merge_field_infos( + *[x for x in annotation_metadata if isinstance(x, FieldInfo)], default, annotation=default.annotation + ) + default.frozen = final or default.frozen + return default + elif isinstance(default, dataclasses.Field): + init_var = False + if annotation is dataclasses.InitVar: + init_var = True + annotation = Any + elif isinstance(annotation, dataclasses.InitVar): + init_var = True + annotation = annotation.type + pydantic_field = FieldInfo._from_dataclass_field(default) + pydantic_field.annotation, annotation_metadata = FieldInfo._extract_metadata(annotation) + pydantic_field.metadata += annotation_metadata + pydantic_field = pydantic_field.merge_field_infos( + *[x for x in annotation_metadata if isinstance(x, FieldInfo)], + pydantic_field, + annotation=pydantic_field.annotation, + ) + pydantic_field.frozen = final or pydantic_field.frozen + pydantic_field.init_var = init_var + pydantic_field.init = getattr(default, 'init', None) + pydantic_field.kw_only = getattr(default, 'kw_only', None) + return pydantic_field + else: + if _typing_extra.is_annotated(annotation): + first_arg, *extra_args = typing_extensions.get_args(annotation) + field_infos = [a for a in extra_args if isinstance(a, FieldInfo)] + field_info = FieldInfo.merge_field_infos(*field_infos, annotation=first_arg, default=default) + metadata: list[Any] = [] + for a in extra_args: + if _typing_extra.is_deprecated_instance(a): + field_info.deprecated = a.message + elif not isinstance(a, FieldInfo): + metadata.append(a) + else: + metadata.extend(a.metadata) + field_info.metadata = metadata + return field_info + + return FieldInfo(annotation=annotation, default=default, frozen=final or None) + + @staticmethod + def merge_field_infos(*field_infos: FieldInfo, **overrides: Any) -> FieldInfo: + """Merge `FieldInfo` instances keeping only explicitly set attributes. + + Later `FieldInfo` instances override earlier ones. + + Returns: + FieldInfo: A merged FieldInfo instance. + """ + flattened_field_infos: list[FieldInfo] = [] + for field_info in field_infos: + flattened_field_infos.extend(x for x in field_info.metadata if isinstance(x, FieldInfo)) + flattened_field_infos.append(field_info) + field_infos = tuple(flattened_field_infos) + if len(field_infos) == 1: + # No merging necessary, but we still need to make a copy and apply the overrides + field_info = copy(field_infos[0]) + field_info._attributes_set.update(overrides) + + default_override = overrides.pop('default', PydanticUndefined) + if default_override is Ellipsis: + default_override = PydanticUndefined + if default_override is not PydanticUndefined: + field_info.default = default_override + + for k, v in overrides.items(): + setattr(field_info, k, v) + return field_info # type: ignore + + new_kwargs: dict[str, Any] = {} + metadata = {} + for field_info in field_infos: + new_kwargs.update(field_info._attributes_set) + for x in field_info.metadata: + if not isinstance(x, FieldInfo): + metadata[type(x)] = x + new_kwargs.update(overrides) + field_info = FieldInfo(**new_kwargs) + field_info.metadata = list(metadata.values()) + return field_info + + @staticmethod + def _from_dataclass_field(dc_field: DataclassField[Any]) -> FieldInfo: + """Return a new `FieldInfo` instance from a `dataclasses.Field` instance. + + Args: + dc_field: The `dataclasses.Field` instance to convert. + + Returns: + The corresponding `FieldInfo` instance. + + Raises: + TypeError: If any of the `FieldInfo` kwargs does not match the `dataclass.Field` kwargs. + """ + default = dc_field.default + if default is dataclasses.MISSING: + default = PydanticUndefined + + if dc_field.default_factory is dataclasses.MISSING: + default_factory: typing.Callable[[], Any] | None = None + else: + default_factory = dc_field.default_factory + + # use the `Field` function so in correct kwargs raise the correct `TypeError` + dc_field_metadata = {k: v for k, v in dc_field.metadata.items() if k in _FIELD_ARG_NAMES} + return Field(default=default, default_factory=default_factory, repr=dc_field.repr, **dc_field_metadata) + + @staticmethod + def _extract_metadata(annotation: type[Any] | None) -> tuple[type[Any] | None, list[Any]]: + """Tries to extract metadata/constraints from an annotation if it uses `Annotated`. + + Args: + annotation: The type hint annotation for which metadata has to be extracted. + + Returns: + A tuple containing the extracted metadata type and the list of extra arguments. + """ + if annotation is not None: + if _typing_extra.is_annotated(annotation): + first_arg, *extra_args = typing_extensions.get_args(annotation) + return first_arg, list(extra_args) + + return annotation, [] + + @staticmethod + def _collect_metadata(kwargs: dict[str, Any]) -> list[Any]: + """Collect annotations from kwargs. + + Args: + kwargs: Keyword arguments passed to the function. + + Returns: + A list of metadata objects - a combination of `annotated_types.BaseMetadata` and + `PydanticMetadata`. + """ + metadata: list[Any] = [] + general_metadata = {} + for key, value in list(kwargs.items()): + try: + marker = FieldInfo.metadata_lookup[key] + except KeyError: + continue + + del kwargs[key] + if value is not None: + if marker is None: + general_metadata[key] = value + else: + metadata.append(marker(value)) + if general_metadata: + metadata.append(_fields.pydantic_general_metadata(**general_metadata)) + return metadata + + @property + def deprecation_message(self) -> str | None: + """The deprecation message to be emitted, or `None` if not set.""" + if self.deprecated is None: + return None + if isinstance(self.deprecated, bool): + return 'deprecated' if self.deprecated else None + return self.deprecated if isinstance(self.deprecated, str) else self.deprecated.message + + def get_default(self, *, call_default_factory: bool = False) -> Any: + """Get the default value. + + We expose an option for whether to call the default_factory (if present), as calling it may + result in side effects that we want to avoid. However, there are times when it really should + be called (namely, when instantiating a model via `model_construct`). + + Args: + call_default_factory: Whether to call the default_factory or not. Defaults to `False`. + + Returns: + The default value, calling the default factory if requested or `None` if not set. + """ + if self.default_factory is None: + return _utils.smart_deepcopy(self.default) + elif call_default_factory: + return self.default_factory() + else: + return None + + def is_required(self) -> bool: + """Check if the field is required (i.e., does not have a default value or factory). + + Returns: + `True` if the field is required, `False` otherwise. + """ + return self.default is PydanticUndefined and self.default_factory is None + + def rebuild_annotation(self) -> Any: + """Attempts to rebuild the original annotation for use in function signatures. + + If metadata is present, it adds it to the original annotation using + `Annotated`. Otherwise, it returns the original annotation as-is. + + Note that because the metadata has been flattened, the original annotation + may not be reconstructed exactly as originally provided, e.g. if the original + type had unrecognized annotations, or was annotated with a call to `pydantic.Field`. + + Returns: + The rebuilt annotation. + """ + if not self.metadata: + return self.annotation + else: + # Annotated arguments must be a tuple + return typing_extensions.Annotated[(self.annotation, *self.metadata)] # type: ignore + + def apply_typevars_map(self, typevars_map: dict[Any, Any] | None, types_namespace: dict[str, Any] | None) -> None: + """Apply a `typevars_map` to the annotation. + + This method is used when analyzing parametrized generic types to replace typevars with their concrete types. + + This method applies the `typevars_map` to the annotation in place. + + Args: + typevars_map: A dictionary mapping type variables to their concrete types. + types_namespace (dict | None): A dictionary containing related types to the annotated type. + + See Also: + pydantic._internal._generics.replace_types is used for replacing the typevars with + their concrete types. + """ + annotation = _typing_extra.eval_type_lenient(self.annotation, types_namespace) + self.annotation = _generics.replace_types(annotation, typevars_map) + + def __repr_args__(self) -> ReprArgs: + yield 'annotation', _repr.PlainRepr(_repr.display_as_type(self.annotation)) + yield 'required', self.is_required() + + for s in self.__slots__: + if s == '_attributes_set': + continue + if s == 'annotation': + continue + elif s == 'metadata' and not self.metadata: + continue + elif s == 'repr' and self.repr is True: + continue + if s == 'frozen' and self.frozen is False: + continue + if s == 'validation_alias' and self.validation_alias == self.alias: + continue + if s == 'serialization_alias' and self.serialization_alias == self.alias: + continue + if s == 'default' and self.default is not PydanticUndefined: + yield 'default', self.default + elif s == 'default_factory' and self.default_factory is not None: + yield 'default_factory', _repr.PlainRepr(_repr.display_as_type(self.default_factory)) + else: + value = getattr(self, s) + if value is not None and value is not PydanticUndefined: + yield s, value + + +class _EmptyKwargs(typing_extensions.TypedDict): + """This class exists solely to ensure that type checking warns about passing `**extra` in `Field`.""" + + +_DefaultValues = dict( + default=..., + default_factory=None, + alias=None, + alias_priority=None, + validation_alias=None, + serialization_alias=None, + title=None, + description=None, + examples=None, + exclude=None, + discriminator=None, + json_schema_extra=None, + frozen=None, + validate_default=None, + repr=True, + init=None, + init_var=None, + kw_only=None, + pattern=None, + strict=None, + gt=None, + ge=None, + lt=None, + le=None, + multiple_of=None, + allow_inf_nan=None, + max_digits=None, + decimal_places=None, + min_length=None, + max_length=None, + coerce_numbers_to_str=None, +) + + +def Field( # noqa: C901 + default: Any = PydanticUndefined, + *, + default_factory: typing.Callable[[], Any] | None = _Unset, + alias: str | None = _Unset, + alias_priority: int | None = _Unset, + validation_alias: str | AliasPath | AliasChoices | None = _Unset, + serialization_alias: str | None = _Unset, + title: str | None = _Unset, + description: str | None = _Unset, + examples: list[Any] | None = _Unset, + exclude: bool | None = _Unset, + discriminator: str | types.Discriminator | None = _Unset, + deprecated: Deprecated | str | bool | None = _Unset, + json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = _Unset, + frozen: bool | None = _Unset, + validate_default: bool | None = _Unset, + repr: bool = _Unset, + init: bool | None = _Unset, + init_var: bool | None = _Unset, + kw_only: bool | None = _Unset, + pattern: str | typing.Pattern[str] | None = _Unset, + strict: bool | None = _Unset, + coerce_numbers_to_str: bool | None = _Unset, + gt: float | None = _Unset, + ge: float | None = _Unset, + lt: float | None = _Unset, + le: float | None = _Unset, + multiple_of: float | None = _Unset, + allow_inf_nan: bool | None = _Unset, + max_digits: int | None = _Unset, + decimal_places: int | None = _Unset, + min_length: int | None = _Unset, + max_length: int | None = _Unset, + union_mode: Literal['smart', 'left_to_right'] = _Unset, + **extra: Unpack[_EmptyKwargs], +) -> Any: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/fields + + Create a field for objects that can be configured. + + Used to provide extra information about a field, either for the model schema or complex validation. Some arguments + apply only to number fields (`int`, `float`, `Decimal`) and some apply only to `str`. + + Note: + - Any `_Unset` objects will be replaced by the corresponding value defined in the `_DefaultValues` dictionary. If a key for the `_Unset` object is not found in the `_DefaultValues` dictionary, it will default to `None` + + Args: + default: Default value if the field is not set. + default_factory: A callable to generate the default value, such as :func:`~datetime.utcnow`. + alias: The name to use for the attribute when validating or serializing by alias. + This is often used for things like converting between snake and camel case. + alias_priority: Priority of the alias. This affects whether an alias generator is used. + validation_alias: Like `alias`, but only affects validation, not serialization. + serialization_alias: Like `alias`, but only affects serialization, not validation. + title: Human-readable title. + description: Human-readable description. + examples: Example values for this field. + exclude: Whether to exclude the field from the model serialization. + discriminator: Field name or Discriminator for discriminating the type in a tagged union. + deprecated: A deprecation message, an instance of `warnings.deprecated` or the `typing_extensions.deprecated` backport, + or a boolean. If `True`, a default deprecation message will be emitted when accessing the field. + json_schema_extra: A dict or callable to provide extra JSON schema properties. + frozen: Whether the field is frozen. If true, attempts to change the value on an instance will raise an error. + validate_default: If `True`, apply validation to the default value every time you create an instance. + Otherwise, for performance reasons, the default value of the field is trusted and not validated. + repr: A boolean indicating whether to include the field in the `__repr__` output. + init: Whether the field should be included in the constructor of the dataclass. + (Only applies to dataclasses.) + init_var: Whether the field should _only_ be included in the constructor of the dataclass. + (Only applies to dataclasses.) + kw_only: Whether the field should be a keyword-only argument in the constructor of the dataclass. + (Only applies to dataclasses.) + coerce_numbers_to_str: Whether to enable coercion of any `Number` type to `str` (not applicable in `strict` mode). + strict: If `True`, strict validation is applied to the field. + See [Strict Mode](../concepts/strict_mode.md) for details. + gt: Greater than. If set, value must be greater than this. Only applicable to numbers. + ge: Greater than or equal. If set, value must be greater than or equal to this. Only applicable to numbers. + lt: Less than. If set, value must be less than this. Only applicable to numbers. + le: Less than or equal. If set, value must be less than or equal to this. Only applicable to numbers. + multiple_of: Value must be a multiple of this. Only applicable to numbers. + min_length: Minimum length for iterables. + max_length: Maximum length for iterables. + pattern: Pattern for strings (a regular expression). + allow_inf_nan: Allow `inf`, `-inf`, `nan`. Only applicable to numbers. + max_digits: Maximum number of allow digits for strings. + decimal_places: Maximum number of decimal places allowed for numbers. + union_mode: The strategy to apply when validating a union. Can be `smart` (the default), or `left_to_right`. + See [Union Mode](standard_library_types.md#union-mode) for details. + extra: (Deprecated) Extra fields that will be included in the JSON schema. + + !!! warning Deprecated + The `extra` kwargs is deprecated. Use `json_schema_extra` instead. + + Returns: + A new [`FieldInfo`][pydantic.fields.FieldInfo]. The return annotation is `Any` so `Field` can be used on + type-annotated fields without causing a type error. + """ + # Check deprecated and removed params from V1. This logic should eventually be removed. + const = extra.pop('const', None) # type: ignore + if const is not None: + raise PydanticUserError('`const` is removed, use `Literal` instead', code='removed-kwargs') + + min_items = extra.pop('min_items', None) # type: ignore + if min_items is not None: + warn('`min_items` is deprecated and will be removed, use `min_length` instead', DeprecationWarning) + if min_length in (None, _Unset): + min_length = min_items # type: ignore + + max_items = extra.pop('max_items', None) # type: ignore + if max_items is not None: + warn('`max_items` is deprecated and will be removed, use `max_length` instead', DeprecationWarning) + if max_length in (None, _Unset): + max_length = max_items # type: ignore + + unique_items = extra.pop('unique_items', None) # type: ignore + if unique_items is not None: + raise PydanticUserError( + ( + '`unique_items` is removed, use `Set` instead' + '(this feature is discussed in https://github.com/pydantic/pydantic-core/issues/296)' + ), + code='removed-kwargs', + ) + + allow_mutation = extra.pop('allow_mutation', None) # type: ignore + if allow_mutation is not None: + warn('`allow_mutation` is deprecated and will be removed. use `frozen` instead', DeprecationWarning) + if allow_mutation is False: + frozen = True + + regex = extra.pop('regex', None) # type: ignore + if regex is not None: + raise PydanticUserError('`regex` is removed. use `pattern` instead', code='removed-kwargs') + + if isinstance(pattern, typing.Pattern): + pattern = pattern.pattern + + if extra: + warn( + 'Using extra keyword arguments on `Field` is deprecated and will be removed.' + ' Use `json_schema_extra` instead.' + f' (Extra keys: {", ".join(k.__repr__() for k in extra.keys())})', + DeprecationWarning, + ) + if not json_schema_extra or json_schema_extra is _Unset: + json_schema_extra = extra # type: ignore + + if ( + validation_alias + and validation_alias is not _Unset + and not isinstance(validation_alias, (str, AliasChoices, AliasPath)) + ): + raise TypeError('Invalid `validation_alias` type. it should be `str`, `AliasChoices`, or `AliasPath`') + + if serialization_alias in (_Unset, None) and isinstance(alias, str): + serialization_alias = alias + + if validation_alias in (_Unset, None): + validation_alias = alias + + include = extra.pop('include', None) # type: ignore + if include is not None: + warn('`include` is deprecated and does nothing. It will be removed, use `exclude` instead', DeprecationWarning) + + return FieldInfo.from_field( + default, + default_factory=default_factory, + alias=alias, + alias_priority=alias_priority, + validation_alias=validation_alias, + serialization_alias=serialization_alias, + title=title, + description=description, + examples=examples, + exclude=exclude, + discriminator=discriminator, + deprecated=deprecated, + json_schema_extra=json_schema_extra, + frozen=frozen, + pattern=pattern, + validate_default=validate_default, + repr=repr, + init=init, + init_var=init_var, + kw_only=kw_only, + coerce_numbers_to_str=coerce_numbers_to_str, + strict=strict, + gt=gt, + ge=ge, + lt=lt, + le=le, + multiple_of=multiple_of, + min_length=min_length, + max_length=max_length, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + union_mode=union_mode, + ) + + +_FIELD_ARG_NAMES = set(inspect.signature(Field).parameters) +_FIELD_ARG_NAMES.remove('extra') # do not include the varkwargs parameter + + +class ModelPrivateAttr(_repr.Representation): + """A descriptor for private attributes in class models. + + !!! warning + You generally shouldn't be creating `ModelPrivateAttr` instances directly, instead use + `pydantic.fields.PrivateAttr`. (This is similar to `FieldInfo` vs. `Field`.) + + Attributes: + default: The default value of the attribute if not provided. + default_factory: A callable function that generates the default value of the + attribute if not provided. + """ + + __slots__ = 'default', 'default_factory' + + def __init__( + self, default: Any = PydanticUndefined, *, default_factory: typing.Callable[[], Any] | None = None + ) -> None: + self.default = default + self.default_factory = default_factory + + if not typing.TYPE_CHECKING: + # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access + + def __getattr__(self, item: str) -> Any: + """This function improves compatibility with custom descriptors by ensuring delegation happens + as expected when the default value of a private attribute is a descriptor. + """ + if item in {'__get__', '__set__', '__delete__'}: + if hasattr(self.default, item): + return getattr(self.default, item) + raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') + + def __set_name__(self, cls: type[Any], name: str) -> None: + """Preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487.""" + if self.default is PydanticUndefined: + return + if not hasattr(self.default, '__set_name__'): + return + set_name = self.default.__set_name__ + if callable(set_name): + set_name(cls, name) + + def get_default(self) -> Any: + """Retrieve the default value of the object. + + If `self.default_factory` is `None`, the method will return a deep copy of the `self.default` object. + + If `self.default_factory` is not `None`, it will call `self.default_factory` and return the value returned. + + Returns: + The default value of the object. + """ + return _utils.smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and (self.default, self.default_factory) == ( + other.default, + other.default_factory, + ) + + +def PrivateAttr( + default: Any = PydanticUndefined, + *, + default_factory: typing.Callable[[], Any] | None = None, +) -> Any: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/models/#private-model-attributes + + Indicates that an attribute is intended for private use and not handled during normal validation/serialization. + + Private attributes are not validated by Pydantic, so it's up to you to ensure they are used in a type-safe manner. + + Private attributes are stored in `__private_attributes__` on the model. + + Args: + default: The attribute's default value. Defaults to Undefined. + default_factory: Callable that will be + called when a default value is needed for this attribute. + If both `default` and `default_factory` are set, an error will be raised. + + Returns: + An instance of [`ModelPrivateAttr`][pydantic.fields.ModelPrivateAttr] class. + + Raises: + ValueError: If both `default` and `default_factory` are set. + """ + if default is not PydanticUndefined and default_factory is not None: + raise TypeError('cannot specify both default and default_factory') + + return ModelPrivateAttr( + default, + default_factory=default_factory, + ) + + +@dataclasses.dataclass(**_internal_dataclass.slots_true) +class ComputedFieldInfo: + """A container for data from `@computed_field` so that we can access it while building the pydantic-core schema. + + Attributes: + decorator_repr: A class variable representing the decorator string, '@computed_field'. + wrapped_property: The wrapped computed field property. + return_type: The type of the computed field property's return value. + alias: The alias of the property to be used during serialization. + alias_priority: The priority of the alias. This affects whether an alias generator is used. + title: Title of the computed field to include in the serialization JSON schema. + description: Description of the computed field to include in the serialization JSON schema. + deprecated: A deprecation message, an instance of `warnings.deprecated` or the `typing_extensions.deprecated` backport, + or a boolean. If `True`, a default deprecation message will be emitted when accessing the field. + examples: Example values of the computed field to include in the serialization JSON schema. + json_schema_extra: A dict or callable to provide extra JSON schema properties. + repr: A boolean indicating whether to include the field in the __repr__ output. + """ + + decorator_repr: ClassVar[str] = '@computed_field' + wrapped_property: property + return_type: Any + alias: str | None + alias_priority: int | None + title: str | None + description: str | None + deprecated: Deprecated | str | bool | None + examples: list[Any] | None + json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None + repr: bool + + @property + def deprecation_message(self) -> str | None: + """The deprecation message to be emitted, or `None` if not set.""" + if self.deprecated is None: + return None + if isinstance(self.deprecated, bool): + return 'deprecated' if self.deprecated else None + return self.deprecated if isinstance(self.deprecated, str) else self.deprecated.message + + +def _wrapped_property_is_private(property_: cached_property | property) -> bool: # type: ignore + """Returns true if provided property is private, False otherwise.""" + wrapped_name: str = '' + + if isinstance(property_, property): + wrapped_name = getattr(property_.fget, '__name__', '') + elif isinstance(property_, cached_property): # type: ignore + wrapped_name = getattr(property_.func, '__name__', '') # type: ignore + + return wrapped_name.startswith('_') and not wrapped_name.startswith('__') + + +# this should really be `property[T], cached_property[T]` but property is not generic unlike cached_property +# See https://github.com/python/typing/issues/985 and linked issues +PropertyT = typing.TypeVar('PropertyT') + + +@typing.overload +def computed_field( + *, + alias: str | None = None, + alias_priority: int | None = None, + title: str | None = None, + description: str | None = None, + deprecated: Deprecated | str | bool | None = None, + examples: list[Any] | None = None, + json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = None, + repr: bool = True, + return_type: Any = PydanticUndefined, +) -> typing.Callable[[PropertyT], PropertyT]: + ... + + +@typing.overload +def computed_field(__func: PropertyT) -> PropertyT: + ... + + +def computed_field( + func: PropertyT | None = None, + /, + *, + alias: str | None = None, + alias_priority: int | None = None, + title: str | None = None, + description: str | None = None, + deprecated: Deprecated | str | bool | None = None, + examples: list[Any] | None = None, + json_schema_extra: JsonDict | typing.Callable[[JsonDict], None] | None = None, + repr: bool | None = None, + return_type: Any = PydanticUndefined, +) -> PropertyT | typing.Callable[[PropertyT], PropertyT]: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/fields#the-computed_field-decorator + + Decorator to include `property` and `cached_property` when serializing models or dataclasses. + + This is useful for fields that are computed from other fields, or for fields that are expensive to compute and should be cached. + + ```py + from pydantic import BaseModel, computed_field + + class Rectangle(BaseModel): + width: int + length: int + + @computed_field + @property + def area(self) -> int: + return self.width * self.length + + print(Rectangle(width=3, length=2).model_dump()) + #> {'width': 3, 'length': 2, 'area': 6} + ``` + + If applied to functions not yet decorated with `@property` or `@cached_property`, the function is + automatically wrapped with `property`. Although this is more concise, you will lose IntelliSense in your IDE, + and confuse static type checkers, thus explicit use of `@property` is recommended. + + !!! warning "Mypy Warning" + Even with the `@property` or `@cached_property` applied to your function before `@computed_field`, + mypy may throw a `Decorated property not supported` error. + See [mypy issue #1362](https://github.com/python/mypy/issues/1362), for more information. + To avoid this error message, add `# type: ignore[misc]` to the `@computed_field` line. + + [pyright](https://github.com/microsoft/pyright) supports `@computed_field` without error. + + ```py + import random + + from pydantic import BaseModel, computed_field + + class Square(BaseModel): + width: float + + @computed_field + def area(self) -> float: # converted to a `property` by `computed_field` + return round(self.width**2, 2) + + @area.setter + def area(self, new_area: float) -> None: + self.width = new_area**0.5 + + @computed_field(alias='the magic number', repr=False) + def random_number(self) -> int: + return random.randint(0, 1_000) + + square = Square(width=1.3) + + # `random_number` does not appear in representation + print(repr(square)) + #> Square(width=1.3, area=1.69) + + print(square.random_number) + #> 3 + + square.area = 4 + + print(square.model_dump_json(by_alias=True)) + #> {"width":2.0,"area":4.0,"the magic number":3} + ``` + + !!! warning "Overriding with `computed_field`" + You can't override a field from a parent class with a `computed_field` in the child class. + `mypy` complains about this behavior if allowed, and `dataclasses` doesn't allow this pattern either. + See the example below: + + ```py + from pydantic import BaseModel, computed_field + + class Parent(BaseModel): + a: str + + try: + + class Child(Parent): + @computed_field + @property + def a(self) -> str: + return 'new a' + + except ValueError as e: + print(repr(e)) + #> ValueError("you can't override a field with a computed field") + ``` + + Private properties decorated with `@computed_field` have `repr=False` by default. + + ```py + from functools import cached_property + + from pydantic import BaseModel, computed_field + + class Model(BaseModel): + foo: int + + @computed_field + @cached_property + def _private_cached_property(self) -> int: + return -self.foo + + @computed_field + @property + def _private_property(self) -> int: + return -self.foo + + m = Model(foo=1) + print(repr(m)) + #> M(foo=1) + ``` + + Args: + func: the function to wrap. + alias: alias to use when serializing this computed field, only used when `by_alias=True` + alias_priority: priority of the alias. This affects whether an alias generator is used + title: Title to use when including this computed field in JSON Schema + description: Description to use when including this computed field in JSON Schema, defaults to the function's + docstring + deprecated: A deprecation message (or an instance of `warnings.deprecated` or the `typing_extensions.deprecated` backport). + to be emitted when accessing the field. Or a boolean. This will automatically be set if the property is decorated with the + `deprecated` decorator. + examples: Example values to use when including this computed field in JSON Schema + json_schema_extra: A dict or callable to provide extra JSON schema properties. + repr: whether to include this computed field in model repr. + Default is `False` for private properties and `True` for public properties. + return_type: optional return for serialization logic to expect when serializing to JSON, if included + this must be correct, otherwise a `TypeError` is raised. + If you don't include a return type Any is used, which does runtime introspection to handle arbitrary + objects. + + Returns: + A proxy wrapper for the property. + """ + + def dec(f: Any) -> Any: + nonlocal description, deprecated, return_type, alias_priority + unwrapped = _decorators.unwrap_wrapped_function(f) + + if description is None and unwrapped.__doc__: + description = inspect.cleandoc(unwrapped.__doc__) + + if deprecated is None and hasattr(unwrapped, '__deprecated__'): + deprecated = unwrapped.__deprecated__ + + # if the function isn't already decorated with `@property` (or another descriptor), then we wrap it now + f = _decorators.ensure_property(f) + alias_priority = (alias_priority or 2) if alias is not None else None + + if repr is None: + repr_: bool = not _wrapped_property_is_private(property_=f) + else: + repr_ = repr + + dec_info = ComputedFieldInfo( + f, + return_type, + alias, + alias_priority, + title, + description, + deprecated, + examples, + json_schema_extra, + repr_, + ) + return _decorators.PydanticDescriptorProxy(f, dec_info) + + if func is None: + return dec + else: + return dec(func) diff --git a/venv/lib/python3.10/site-packages/pydantic/functional_serializers.py b/venv/lib/python3.10/site-packages/pydantic/functional_serializers.py new file mode 100644 index 0000000000000000000000000000000000000000..1a9e55636372dd463c29ae5e01bf60019b0529a3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/functional_serializers.py @@ -0,0 +1,399 @@ +"""This module contains related classes and functions for serialization.""" +from __future__ import annotations + +import dataclasses +from functools import partialmethod +from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, overload + +from pydantic_core import PydanticUndefined, core_schema +from pydantic_core import core_schema as _core_schema +from typing_extensions import Annotated, Literal, TypeAlias + +from . import PydanticUndefinedAnnotation +from ._internal import _decorators, _internal_dataclass +from .annotated_handlers import GetCoreSchemaHandler + + +@dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) +class PlainSerializer: + """Plain serializers use a function to modify the output of serialization. + + This is particularly helpful when you want to customize the serialization for annotated types. + Consider an input of `list`, which will be serialized into a space-delimited string. + + ```python + from typing import List + + from typing_extensions import Annotated + + from pydantic import BaseModel, PlainSerializer + + CustomStr = Annotated[ + List, PlainSerializer(lambda x: ' '.join(x), return_type=str) + ] + + class StudentModel(BaseModel): + courses: CustomStr + + student = StudentModel(courses=['Math', 'Chemistry', 'English']) + print(student.model_dump()) + #> {'courses': 'Math Chemistry English'} + ``` + + Attributes: + func: The serializer function. + return_type: The return type for the function. If omitted it will be inferred from the type annotation. + when_used: Determines when this serializer should be used. Accepts a string with values `'always'`, + `'unless-none'`, `'json'`, and `'json-unless-none'`. Defaults to 'always'. + """ + + func: core_schema.SerializerFunction + return_type: Any = PydanticUndefined + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always' + + def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + """Gets the Pydantic core schema. + + Args: + source_type: The source type. + handler: The `GetCoreSchemaHandler` instance. + + Returns: + The Pydantic core schema. + """ + schema = handler(source_type) + try: + return_type = _decorators.get_function_return_type( + self.func, self.return_type, handler._get_types_namespace() + ) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + return_schema = None if return_type is PydanticUndefined else handler.generate_schema(return_type) + schema['serialization'] = core_schema.plain_serializer_function_ser_schema( + function=self.func, + info_arg=_decorators.inspect_annotated_serializer(self.func, 'plain'), + return_schema=return_schema, + when_used=self.when_used, + ) + return schema + + +@dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) +class WrapSerializer: + """Wrap serializers receive the raw inputs along with a handler function that applies the standard serialization + logic, and can modify the resulting value before returning it as the final output of serialization. + + For example, here's a scenario in which a wrap serializer transforms timezones to UTC **and** utilizes the existing `datetime` serialization logic. + + ```python + from datetime import datetime, timezone + from typing import Any, Dict + + from typing_extensions import Annotated + + from pydantic import BaseModel, WrapSerializer + + class EventDatetime(BaseModel): + start: datetime + end: datetime + + def convert_to_utc(value: Any, handler, info) -> Dict[str, datetime]: + # Note that `helper` can actually help serialize the `value` for further custom serialization in case it's a subclass. + partial_result = handler(value, info) + if info.mode == 'json': + return { + k: datetime.fromisoformat(v).astimezone(timezone.utc) + for k, v in partial_result.items() + } + return {k: v.astimezone(timezone.utc) for k, v in partial_result.items()} + + UTCEventDatetime = Annotated[EventDatetime, WrapSerializer(convert_to_utc)] + + class EventModel(BaseModel): + event_datetime: UTCEventDatetime + + dt = EventDatetime( + start='2024-01-01T07:00:00-08:00', end='2024-01-03T20:00:00+06:00' + ) + event = EventModel(event_datetime=dt) + print(event.model_dump()) + ''' + { + 'event_datetime': { + 'start': datetime.datetime( + 2024, 1, 1, 15, 0, tzinfo=datetime.timezone.utc + ), + 'end': datetime.datetime( + 2024, 1, 3, 14, 0, tzinfo=datetime.timezone.utc + ), + } + } + ''' + + print(event.model_dump_json()) + ''' + {"event_datetime":{"start":"2024-01-01T15:00:00Z","end":"2024-01-03T14:00:00Z"}} + ''' + ``` + + Attributes: + func: The serializer function to be wrapped. + return_type: The return type for the function. If omitted it will be inferred from the type annotation. + when_used: Determines when this serializer should be used. Accepts a string with values `'always'`, + `'unless-none'`, `'json'`, and `'json-unless-none'`. Defaults to 'always'. + """ + + func: core_schema.WrapSerializerFunction + return_type: Any = PydanticUndefined + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always' + + def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + """This method is used to get the Pydantic core schema of the class. + + Args: + source_type: Source type. + handler: Core schema handler. + + Returns: + The generated core schema of the class. + """ + schema = handler(source_type) + try: + return_type = _decorators.get_function_return_type( + self.func, self.return_type, handler._get_types_namespace() + ) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + return_schema = None if return_type is PydanticUndefined else handler.generate_schema(return_type) + schema['serialization'] = core_schema.wrap_serializer_function_ser_schema( + function=self.func, + info_arg=_decorators.inspect_annotated_serializer(self.func, 'wrap'), + return_schema=return_schema, + when_used=self.when_used, + ) + return schema + + +if TYPE_CHECKING: + _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]] + _PlainSerializationFunction = Union[_core_schema.SerializerFunction, _PartialClsOrStaticMethod] + _WrapSerializationFunction = Union[_core_schema.WrapSerializerFunction, _PartialClsOrStaticMethod] + _PlainSerializeMethodType = TypeVar('_PlainSerializeMethodType', bound=_PlainSerializationFunction) + _WrapSerializeMethodType = TypeVar('_WrapSerializeMethodType', bound=_WrapSerializationFunction) + + +@overload +def field_serializer( + field: str, + /, + *fields: str, + return_type: Any = ..., + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ..., + check_fields: bool | None = ..., +) -> Callable[[_PlainSerializeMethodType], _PlainSerializeMethodType]: + ... + + +@overload +def field_serializer( + field: str, + /, + *fields: str, + mode: Literal['plain'], + return_type: Any = ..., + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ..., + check_fields: bool | None = ..., +) -> Callable[[_PlainSerializeMethodType], _PlainSerializeMethodType]: + ... + + +@overload +def field_serializer( + field: str, + /, + *fields: str, + mode: Literal['wrap'], + return_type: Any = ..., + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = ..., + check_fields: bool | None = ..., +) -> Callable[[_WrapSerializeMethodType], _WrapSerializeMethodType]: + ... + + +def field_serializer( + *fields: str, + mode: Literal['plain', 'wrap'] = 'plain', + return_type: Any = PydanticUndefined, + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always', + check_fields: bool | None = None, +) -> Callable[[Any], Any]: + """Decorator that enables custom field serialization. + + In the below example, a field of type `set` is used to mitigate duplication. A `field_serializer` is used to serialize the data as a sorted list. + + ```python + from typing import Set + + from pydantic import BaseModel, field_serializer + + class StudentModel(BaseModel): + name: str = 'Jane' + courses: Set[str] + + @field_serializer('courses', when_used='json') + def serialize_courses_in_order(courses: Set[str]): + return sorted(courses) + + student = StudentModel(courses={'Math', 'Chemistry', 'English'}) + print(student.model_dump_json()) + #> {"name":"Jane","courses":["Chemistry","English","Math"]} + ``` + + See [Custom serializers](../concepts/serialization.md#custom-serializers) for more information. + + Four signatures are supported: + + - `(self, value: Any, info: FieldSerializationInfo)` + - `(self, value: Any, nxt: SerializerFunctionWrapHandler, info: FieldSerializationInfo)` + - `(value: Any, info: SerializationInfo)` + - `(value: Any, nxt: SerializerFunctionWrapHandler, info: SerializationInfo)` + + Args: + fields: Which field(s) the method should be called on. + mode: The serialization mode. + + - `plain` means the function will be called instead of the default serialization logic, + - `wrap` means the function will be called with an argument to optionally call the + default serialization logic. + return_type: Optional return type for the function, if omitted it will be inferred from the type annotation. + when_used: Determines the serializer will be used for serialization. + check_fields: Whether to check that the fields actually exist on the model. + + Returns: + The decorator function. + """ + + def dec( + f: Callable[..., Any] | staticmethod[Any, Any] | classmethod[Any, Any, Any], + ) -> _decorators.PydanticDescriptorProxy[Any]: + dec_info = _decorators.FieldSerializerDecoratorInfo( + fields=fields, + mode=mode, + return_type=return_type, + when_used=when_used, + check_fields=check_fields, + ) + return _decorators.PydanticDescriptorProxy(f, dec_info) + + return dec + + +FuncType = TypeVar('FuncType', bound=Callable[..., Any]) + + +@overload +def model_serializer(__f: FuncType) -> FuncType: + ... + + +@overload +def model_serializer( + *, + mode: Literal['plain', 'wrap'] = ..., + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always', + return_type: Any = ..., +) -> Callable[[FuncType], FuncType]: + ... + + +def model_serializer( + f: Callable[..., Any] | None = None, + /, + *, + mode: Literal['plain', 'wrap'] = 'plain', + when_used: Literal['always', 'unless-none', 'json', 'json-unless-none'] = 'always', + return_type: Any = PydanticUndefined, +) -> Callable[[Any], Any]: + """Decorator that enables custom model serialization. + + This is useful when a model need to be serialized in a customized manner, allowing for flexibility beyond just specific fields. + + An example would be to serialize temperature to the same temperature scale, such as degrees Celsius. + + ```python + from typing import Literal + + from pydantic import BaseModel, model_serializer + + class TemperatureModel(BaseModel): + unit: Literal['C', 'F'] + value: int + + @model_serializer() + def serialize_model(self): + if self.unit == 'F': + return {'unit': 'C', 'value': int((self.value - 32) / 1.8)} + return {'unit': self.unit, 'value': self.value} + + temperature = TemperatureModel(unit='F', value=212) + print(temperature.model_dump()) + #> {'unit': 'C', 'value': 100} + ``` + + See [Custom serializers](../concepts/serialization.md#custom-serializers) for more information. + + Args: + f: The function to be decorated. + mode: The serialization mode. + + - `'plain'` means the function will be called instead of the default serialization logic + - `'wrap'` means the function will be called with an argument to optionally call the default + serialization logic. + when_used: Determines when this serializer should be used. + return_type: The return type for the function. If omitted it will be inferred from the type annotation. + + Returns: + The decorator function. + """ + + def dec(f: Callable[..., Any]) -> _decorators.PydanticDescriptorProxy[Any]: + dec_info = _decorators.ModelSerializerDecoratorInfo(mode=mode, return_type=return_type, when_used=when_used) + return _decorators.PydanticDescriptorProxy(f, dec_info) + + if f is None: + return dec + else: + return dec(f) # type: ignore + + +AnyType = TypeVar('AnyType') + + +if TYPE_CHECKING: + SerializeAsAny = Annotated[AnyType, ...] # SerializeAsAny[list[str]] will be treated by type checkers as list[str] + """Force serialization to ignore whatever is defined in the schema and instead ask the object + itself how it should be serialized. + In particular, this means that when model subclasses are serialized, fields present in the subclass + but not in the original schema will be included. + """ +else: + + @dataclasses.dataclass(**_internal_dataclass.slots_true) + class SerializeAsAny: # noqa: D101 + def __class_getitem__(cls, item: Any) -> Any: + return Annotated[item, SerializeAsAny()] + + def __get_pydantic_core_schema__( + self, source_type: Any, handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + schema = handler(source_type) + schema_to_update = schema + while schema_to_update['type'] == 'definitions': + schema_to_update = schema_to_update.copy() + schema_to_update = schema_to_update['schema'] + schema_to_update['serialization'] = core_schema.wrap_serializer_function_ser_schema( + lambda x, h: h(x), schema=core_schema.any_schema() + ) + return schema + + __hash__ = object.__hash__ diff --git a/venv/lib/python3.10/site-packages/pydantic/functional_validators.py b/venv/lib/python3.10/site-packages/pydantic/functional_validators.py new file mode 100644 index 0000000000000000000000000000000000000000..fc3ad71740178fcc2de39d953f61d7a44928f4fa --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/functional_validators.py @@ -0,0 +1,709 @@ +"""This module contains related classes and functions for validation.""" + +from __future__ import annotations as _annotations + +import dataclasses +import sys +from functools import partialmethod +from types import FunctionType +from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union, cast, overload + +from pydantic_core import core_schema +from pydantic_core import core_schema as _core_schema +from typing_extensions import Annotated, Literal, TypeAlias + +from . import GetCoreSchemaHandler as _GetCoreSchemaHandler +from ._internal import _core_metadata, _decorators, _generics, _internal_dataclass +from .annotated_handlers import GetCoreSchemaHandler +from .errors import PydanticUserError + +if sys.version_info < (3, 11): + from typing_extensions import Protocol +else: + from typing import Protocol + +_inspect_validator = _decorators.inspect_validator + + +@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) +class AfterValidator: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/validators/#annotated-validators + + A metadata class that indicates that a validation should be applied **after** the inner validation logic. + + Attributes: + func: The validator function. + + Example: + ```py + from typing_extensions import Annotated + + from pydantic import AfterValidator, BaseModel, ValidationError + + MyInt = Annotated[int, AfterValidator(lambda v: v + 1)] + + class Model(BaseModel): + a: MyInt + + print(Model(a=1).a) + #> 2 + + try: + Model(a='a') + except ValidationError as e: + print(e.json(indent=2)) + ''' + [ + { + "type": "int_parsing", + "loc": [ + "a" + ], + "msg": "Input should be a valid integer, unable to parse string as an integer", + "input": "a", + "url": "https://errors.pydantic.dev/2/v/int_parsing" + } + ] + ''' + ``` + """ + + func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction + + def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: + schema = handler(source_type) + info_arg = _inspect_validator(self.func, 'after') + if info_arg: + func = cast(core_schema.WithInfoValidatorFunction, self.func) + return core_schema.with_info_after_validator_function(func, schema=schema, field_name=handler.field_name) + else: + func = cast(core_schema.NoInfoValidatorFunction, self.func) + return core_schema.no_info_after_validator_function(func, schema=schema) + + +@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) +class BeforeValidator: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/validators/#annotated-validators + + A metadata class that indicates that a validation should be applied **before** the inner validation logic. + + Attributes: + func: The validator function. + + Example: + ```py + from typing_extensions import Annotated + + from pydantic import BaseModel, BeforeValidator + + MyInt = Annotated[int, BeforeValidator(lambda v: v + 1)] + + class Model(BaseModel): + a: MyInt + + print(Model(a=1).a) + #> 2 + + try: + Model(a='a') + except TypeError as e: + print(e) + #> can only concatenate str (not "int") to str + ``` + """ + + func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction + + def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: + schema = handler(source_type) + info_arg = _inspect_validator(self.func, 'before') + if info_arg: + func = cast(core_schema.WithInfoValidatorFunction, self.func) + return core_schema.with_info_before_validator_function(func, schema=schema, field_name=handler.field_name) + else: + func = cast(core_schema.NoInfoValidatorFunction, self.func) + return core_schema.no_info_before_validator_function(func, schema=schema) + + +@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) +class PlainValidator: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/validators/#annotated-validators + + A metadata class that indicates that a validation should be applied **instead** of the inner validation logic. + + Attributes: + func: The validator function. + + Example: + ```py + from typing_extensions import Annotated + + from pydantic import BaseModel, PlainValidator + + MyInt = Annotated[int, PlainValidator(lambda v: int(v) + 1)] + + class Model(BaseModel): + a: MyInt + + print(Model(a='1').a) + #> 2 + ``` + """ + + func: core_schema.NoInfoValidatorFunction | core_schema.WithInfoValidatorFunction + + def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: + # Note that for some valid uses of PlainValidator, it is not possible to generate a core schema for the + # source_type, so calling `handler(source_type)` will error, which prevents us from generating a proper + # serialization schema. To work around this for use cases that will not involve serialization, we simply + # catch any PydanticSchemaGenerationError that may be raised while attempting to build the serialization schema + # and abort any attempts to handle special serialization. + from pydantic import PydanticSchemaGenerationError + + try: + schema = handler(source_type) + serialization = core_schema.wrap_serializer_function_ser_schema(function=lambda v, h: h(v), schema=schema) + except PydanticSchemaGenerationError: + serialization = None + + info_arg = _inspect_validator(self.func, 'plain') + if info_arg: + func = cast(core_schema.WithInfoValidatorFunction, self.func) + return core_schema.with_info_plain_validator_function( + func, field_name=handler.field_name, serialization=serialization + ) + else: + func = cast(core_schema.NoInfoValidatorFunction, self.func) + return core_schema.no_info_plain_validator_function(func, serialization=serialization) + + +@dataclasses.dataclass(frozen=True, **_internal_dataclass.slots_true) +class WrapValidator: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/validators/#annotated-validators + + A metadata class that indicates that a validation should be applied **around** the inner validation logic. + + Attributes: + func: The validator function. + + ```py + from datetime import datetime + + from typing_extensions import Annotated + + from pydantic import BaseModel, ValidationError, WrapValidator + + def validate_timestamp(v, handler): + if v == 'now': + # we don't want to bother with further validation, just return the new value + return datetime.now() + try: + return handler(v) + except ValidationError: + # validation failed, in this case we want to return a default value + return datetime(2000, 1, 1) + + MyTimestamp = Annotated[datetime, WrapValidator(validate_timestamp)] + + class Model(BaseModel): + a: MyTimestamp + + print(Model(a='now').a) + #> 2032-01-02 03:04:05.000006 + print(Model(a='invalid').a) + #> 2000-01-01 00:00:00 + ``` + """ + + func: core_schema.NoInfoWrapValidatorFunction | core_schema.WithInfoWrapValidatorFunction + + def __get_pydantic_core_schema__(self, source_type: Any, handler: _GetCoreSchemaHandler) -> core_schema.CoreSchema: + schema = handler(source_type) + info_arg = _inspect_validator(self.func, 'wrap') + if info_arg: + func = cast(core_schema.WithInfoWrapValidatorFunction, self.func) + return core_schema.with_info_wrap_validator_function(func, schema=schema, field_name=handler.field_name) + else: + func = cast(core_schema.NoInfoWrapValidatorFunction, self.func) + return core_schema.no_info_wrap_validator_function(func, schema=schema) + + +if TYPE_CHECKING: + + class _OnlyValueValidatorClsMethod(Protocol): + def __call__(self, cls: Any, value: Any, /) -> Any: + ... + + class _V2ValidatorClsMethod(Protocol): + def __call__(self, cls: Any, value: Any, info: _core_schema.ValidationInfo, /) -> Any: + ... + + class _V2WrapValidatorClsMethod(Protocol): + def __call__( + self, + cls: Any, + value: Any, + handler: _core_schema.ValidatorFunctionWrapHandler, + info: _core_schema.ValidationInfo, + /, + ) -> Any: + ... + + _V2Validator = Union[ + _V2ValidatorClsMethod, + _core_schema.WithInfoValidatorFunction, + _OnlyValueValidatorClsMethod, + _core_schema.NoInfoValidatorFunction, + ] + + _V2WrapValidator = Union[ + _V2WrapValidatorClsMethod, + _core_schema.WithInfoWrapValidatorFunction, + ] + + _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]] + + _V2BeforeAfterOrPlainValidatorType = TypeVar( + '_V2BeforeAfterOrPlainValidatorType', + _V2Validator, + _PartialClsOrStaticMethod, + ) + _V2WrapValidatorType = TypeVar('_V2WrapValidatorType', _V2WrapValidator, _PartialClsOrStaticMethod) + + +@overload +def field_validator( + field: str, + /, + *fields: str, + mode: Literal['before', 'after', 'plain'] = ..., + check_fields: bool | None = ..., +) -> Callable[[_V2BeforeAfterOrPlainValidatorType], _V2BeforeAfterOrPlainValidatorType]: + ... + + +@overload +def field_validator( + field: str, + /, + *fields: str, + mode: Literal['wrap'], + check_fields: bool | None = ..., +) -> Callable[[_V2WrapValidatorType], _V2WrapValidatorType]: + ... + + +FieldValidatorModes: TypeAlias = Literal['before', 'after', 'wrap', 'plain'] + + +def field_validator( + field: str, + /, + *fields: str, + mode: FieldValidatorModes = 'after', + check_fields: bool | None = None, +) -> Callable[[Any], Any]: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/validators/#field-validators + + Decorate methods on the class indicating that they should be used to validate fields. + + Example usage: + ```py + from typing import Any + + from pydantic import ( + BaseModel, + ValidationError, + field_validator, + ) + + class Model(BaseModel): + a: str + + @field_validator('a') + @classmethod + def ensure_foobar(cls, v: Any): + if 'foobar' not in v: + raise ValueError('"foobar" not found in a') + return v + + print(repr(Model(a='this is foobar good'))) + #> Model(a='this is foobar good') + + try: + Model(a='snap') + except ValidationError as exc_info: + print(exc_info) + ''' + 1 validation error for Model + a + Value error, "foobar" not found in a [type=value_error, input_value='snap', input_type=str] + ''' + ``` + + For more in depth examples, see [Field Validators](../concepts/validators.md#field-validators). + + Args: + field: The first field the `field_validator` should be called on; this is separate + from `fields` to ensure an error is raised if you don't pass at least one. + *fields: Additional field(s) the `field_validator` should be called on. + mode: Specifies whether to validate the fields before or after validation. + check_fields: Whether to check that the fields actually exist on the model. + + Returns: + A decorator that can be used to decorate a function to be used as a field_validator. + + Raises: + PydanticUserError: + - If `@field_validator` is used bare (with no fields). + - If the args passed to `@field_validator` as fields are not strings. + - If `@field_validator` applied to instance methods. + """ + if isinstance(field, FunctionType): + raise PydanticUserError( + '`@field_validator` should be used with fields and keyword arguments, not bare. ' + "E.g. usage should be `@validator('', ...)`", + code='validator-no-fields', + ) + fields = field, *fields + if not all(isinstance(field, str) for field in fields): + raise PydanticUserError( + '`@field_validator` fields should be passed as separate string args. ' + "E.g. usage should be `@validator('', '', ...)`", + code='validator-invalid-fields', + ) + + def dec( + f: Callable[..., Any] | staticmethod[Any, Any] | classmethod[Any, Any, Any], + ) -> _decorators.PydanticDescriptorProxy[Any]: + if _decorators.is_instance_method_from_sig(f): + raise PydanticUserError( + '`@field_validator` cannot be applied to instance methods', code='validator-instance-method' + ) + + # auto apply the @classmethod decorator + f = _decorators.ensure_classmethod_based_on_signature(f) + + dec_info = _decorators.FieldValidatorDecoratorInfo(fields=fields, mode=mode, check_fields=check_fields) + return _decorators.PydanticDescriptorProxy(f, dec_info) + + return dec + + +_ModelType = TypeVar('_ModelType') +_ModelTypeCo = TypeVar('_ModelTypeCo', covariant=True) + + +class ModelWrapValidatorHandler(_core_schema.ValidatorFunctionWrapHandler, Protocol[_ModelTypeCo]): + """@model_validator decorated function handler argument type. This is used when `mode='wrap'`.""" + + def __call__( # noqa: D102 + self, + value: Any, + outer_location: str | int | None = None, + /, + ) -> _ModelTypeCo: # pragma: no cover + ... + + +class ModelWrapValidatorWithoutInfo(Protocol[_ModelType]): + """A @model_validator decorated function signature. + This is used when `mode='wrap'` and the function does not have info argument. + """ + + def __call__( # noqa: D102 + self, + cls: type[_ModelType], + # this can be a dict, a model instance + # or anything else that gets passed to validate_python + # thus validators _must_ handle all cases + value: Any, + handler: ModelWrapValidatorHandler[_ModelType], + /, + ) -> _ModelType: + ... + + +class ModelWrapValidator(Protocol[_ModelType]): + """A @model_validator decorated function signature. This is used when `mode='wrap'`.""" + + def __call__( # noqa: D102 + self, + cls: type[_ModelType], + # this can be a dict, a model instance + # or anything else that gets passed to validate_python + # thus validators _must_ handle all cases + value: Any, + handler: ModelWrapValidatorHandler[_ModelType], + info: _core_schema.ValidationInfo, + /, + ) -> _ModelType: + ... + + +class FreeModelBeforeValidatorWithoutInfo(Protocol): + """A @model_validator decorated function signature. + This is used when `mode='before'` and the function does not have info argument. + """ + + def __call__( # noqa: D102 + self, + # this can be a dict, a model instance + # or anything else that gets passed to validate_python + # thus validators _must_ handle all cases + value: Any, + /, + ) -> Any: + ... + + +class ModelBeforeValidatorWithoutInfo(Protocol): + """A @model_validator decorated function signature. + This is used when `mode='before'` and the function does not have info argument. + """ + + def __call__( # noqa: D102 + self, + cls: Any, + # this can be a dict, a model instance + # or anything else that gets passed to validate_python + # thus validators _must_ handle all cases + value: Any, + /, + ) -> Any: + ... + + +class FreeModelBeforeValidator(Protocol): + """A `@model_validator` decorated function signature. This is used when `mode='before'`.""" + + def __call__( # noqa: D102 + self, + # this can be a dict, a model instance + # or anything else that gets passed to validate_python + # thus validators _must_ handle all cases + value: Any, + info: _core_schema.ValidationInfo, + /, + ) -> Any: + ... + + +class ModelBeforeValidator(Protocol): + """A `@model_validator` decorated function signature. This is used when `mode='before'`.""" + + def __call__( # noqa: D102 + self, + cls: Any, + # this can be a dict, a model instance + # or anything else that gets passed to validate_python + # thus validators _must_ handle all cases + value: Any, + info: _core_schema.ValidationInfo, + /, + ) -> Any: + ... + + +ModelAfterValidatorWithoutInfo = Callable[[_ModelType], _ModelType] +"""A `@model_validator` decorated function signature. This is used when `mode='after'` and the function does not +have info argument. +""" + +ModelAfterValidator = Callable[[_ModelType, _core_schema.ValidationInfo], _ModelType] +"""A `@model_validator` decorated function signature. This is used when `mode='after'`.""" + +_AnyModelWrapValidator = Union[ModelWrapValidator[_ModelType], ModelWrapValidatorWithoutInfo[_ModelType]] +_AnyModeBeforeValidator = Union[ + FreeModelBeforeValidator, ModelBeforeValidator, FreeModelBeforeValidatorWithoutInfo, ModelBeforeValidatorWithoutInfo +] +_AnyModelAfterValidator = Union[ModelAfterValidator[_ModelType], ModelAfterValidatorWithoutInfo[_ModelType]] + + +@overload +def model_validator( + *, + mode: Literal['wrap'], +) -> Callable[ + [_AnyModelWrapValidator[_ModelType]], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo] +]: + ... + + +@overload +def model_validator( + *, + mode: Literal['before'], +) -> Callable[[_AnyModeBeforeValidator], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo]]: + ... + + +@overload +def model_validator( + *, + mode: Literal['after'], +) -> Callable[ + [_AnyModelAfterValidator[_ModelType]], _decorators.PydanticDescriptorProxy[_decorators.ModelValidatorDecoratorInfo] +]: + ... + + +def model_validator( + *, + mode: Literal['wrap', 'before', 'after'], +) -> Any: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/validators/#model-validators + + Decorate model methods for validation purposes. + + Example usage: + ```py + from typing_extensions import Self + + from pydantic import BaseModel, ValidationError, model_validator + + class Square(BaseModel): + width: float + height: float + + @model_validator(mode='after') + def verify_square(self) -> Self: + if self.width != self.height: + raise ValueError('width and height do not match') + return self + + s = Square(width=1, height=1) + print(repr(s)) + #> Square(width=1.0, height=1.0) + + try: + Square(width=1, height=2) + except ValidationError as e: + print(e) + ''' + 1 validation error for Square + Value error, width and height do not match [type=value_error, input_value={'width': 1, 'height': 2}, input_type=dict] + ''' + ``` + + For more in depth examples, see [Model Validators](../concepts/validators.md#model-validators). + + Args: + mode: A required string literal that specifies the validation mode. + It can be one of the following: 'wrap', 'before', or 'after'. + + Returns: + A decorator that can be used to decorate a function to be used as a model validator. + """ + + def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]: + # auto apply the @classmethod decorator + f = _decorators.ensure_classmethod_based_on_signature(f) + dec_info = _decorators.ModelValidatorDecoratorInfo(mode=mode) + return _decorators.PydanticDescriptorProxy(f, dec_info) + + return dec + + +AnyType = TypeVar('AnyType') + + +if TYPE_CHECKING: + # If we add configurable attributes to IsInstance, we'd probably need to stop hiding it from type checkers like this + InstanceOf = Annotated[AnyType, ...] # `IsInstance[Sequence]` will be recognized by type checkers as `Sequence` + +else: + + @dataclasses.dataclass(**_internal_dataclass.slots_true) + class InstanceOf: + '''Generic type for annotating a type that is an instance of a given class. + + Example: + ```py + from pydantic import BaseModel, InstanceOf + + class Foo: + ... + + class Bar(BaseModel): + foo: InstanceOf[Foo] + + Bar(foo=Foo()) + try: + Bar(foo=42) + except ValidationError as e: + print(e) + """ + [ + │ { + │ │ 'type': 'is_instance_of', + │ │ 'loc': ('foo',), + │ │ 'msg': 'Input should be an instance of Foo', + │ │ 'input': 42, + │ │ 'ctx': {'class': 'Foo'}, + │ │ 'url': 'https://errors.pydantic.dev/0.38.0/v/is_instance_of' + │ } + ] + """ + ``` + ''' + + @classmethod + def __class_getitem__(cls, item: AnyType) -> AnyType: + return Annotated[item, cls()] + + @classmethod + def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + from pydantic import PydanticSchemaGenerationError + + # use the generic _origin_ as the second argument to isinstance when appropriate + instance_of_schema = core_schema.is_instance_schema(_generics.get_origin(source) or source) + + try: + # Try to generate the "standard" schema, which will be used when loading from JSON + original_schema = handler(source) + except PydanticSchemaGenerationError: + # If that fails, just produce a schema that can validate from python + return instance_of_schema + else: + # Use the "original" approach to serialization + instance_of_schema['serialization'] = core_schema.wrap_serializer_function_ser_schema( + function=lambda v, h: h(v), schema=original_schema + ) + return core_schema.json_or_python_schema(python_schema=instance_of_schema, json_schema=original_schema) + + __hash__ = object.__hash__ + + +if TYPE_CHECKING: + SkipValidation = Annotated[AnyType, ...] # SkipValidation[list[str]] will be treated by type checkers as list[str] +else: + + @dataclasses.dataclass(**_internal_dataclass.slots_true) + class SkipValidation: + """If this is applied as an annotation (e.g., via `x: Annotated[int, SkipValidation]`), validation will be + skipped. You can also use `SkipValidation[int]` as a shorthand for `Annotated[int, SkipValidation]`. + + This can be useful if you want to use a type annotation for documentation/IDE/type-checking purposes, + and know that it is safe to skip validation for one or more of the fields. + + Because this converts the validation schema to `any_schema`, subsequent annotation-applied transformations + may not have the expected effects. Therefore, when used, this annotation should generally be the final + annotation applied to a type. + """ + + def __class_getitem__(cls, item: Any) -> Any: + return Annotated[item, SkipValidation()] + + @classmethod + def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + original_schema = handler(source) + metadata = _core_metadata.build_metadata_dict(js_annotation_functions=[lambda _c, h: h(original_schema)]) + return core_schema.any_schema( + metadata=metadata, + serialization=core_schema.wrap_serializer_function_ser_schema( + function=lambda v, h: h(v), schema=original_schema + ), + ) + + __hash__ = object.__hash__ diff --git a/venv/lib/python3.10/site-packages/pydantic/generics.py b/venv/lib/python3.10/site-packages/pydantic/generics.py new file mode 100644 index 0000000000000000000000000000000000000000..5f6f7f7a9df3cad597ae39752f5f4e87eef896d3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/generics.py @@ -0,0 +1,4 @@ +"""The `generics` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/json.py b/venv/lib/python3.10/site-packages/pydantic/json.py new file mode 100644 index 0000000000000000000000000000000000000000..020fb6d201d63284c82fc40095351c826c94f52c --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/json.py @@ -0,0 +1,4 @@ +"""The `json` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/json_schema.py b/venv/lib/python3.10/site-packages/pydantic/json_schema.py new file mode 100644 index 0000000000000000000000000000000000000000..4b0e0132636fbe47cb3620761ef77eb0818173b3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/json_schema.py @@ -0,0 +1,2493 @@ +""" +Usage docs: https://docs.pydantic.dev/2.5/concepts/json_schema/ + +The `json_schema` module contains classes and functions to allow the way [JSON Schema](https://json-schema.org/) +is generated to be customized. + +In general you shouldn't need to use this module directly; instead, you can use +[`BaseModel.model_json_schema`][pydantic.BaseModel.model_json_schema] and +[`TypeAdapter.json_schema`][pydantic.TypeAdapter.json_schema]. +""" +from __future__ import annotations as _annotations + +import dataclasses +import inspect +import math +import re +import warnings +from collections import defaultdict +from copy import deepcopy +from dataclasses import is_dataclass +from enum import Enum +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Counter, + Dict, + Hashable, + Iterable, + NewType, + Sequence, + Tuple, + TypeVar, + Union, + cast, +) + +import pydantic_core +from pydantic_core import CoreSchema, PydanticOmit, core_schema, to_jsonable_python +from pydantic_core.core_schema import ComputedField +from typing_extensions import Annotated, Literal, TypeAlias, assert_never, deprecated, final + +from pydantic.warnings import PydanticDeprecatedSince26 + +from ._internal import ( + _config, + _core_metadata, + _core_utils, + _decorators, + _internal_dataclass, + _mock_val_ser, + _schema_generation_shared, + _typing_extra, +) +from .annotated_handlers import GetJsonSchemaHandler +from .config import JsonDict, JsonSchemaExtraCallable, JsonValue +from .errors import PydanticInvalidForJsonSchema, PydanticSchemaGenerationError, PydanticUserError + +if TYPE_CHECKING: + from . import ConfigDict + from ._internal._core_utils import CoreSchemaField, CoreSchemaOrField + from ._internal._dataclasses import PydanticDataclass + from ._internal._schema_generation_shared import GetJsonSchemaFunction + from .main import BaseModel + + +CoreSchemaOrFieldType = Literal[core_schema.CoreSchemaType, core_schema.CoreSchemaFieldType] +""" +A type alias for defined schema types that represents a union of +`core_schema.CoreSchemaType` and +`core_schema.CoreSchemaFieldType`. +""" + +JsonSchemaValue = Dict[str, Any] +""" +A type alias for a JSON schema value. This is a dictionary of string keys to arbitrary JSON values. +""" + +JsonSchemaMode = Literal['validation', 'serialization'] +""" +A type alias that represents the mode of a JSON schema; either 'validation' or 'serialization'. + +For some types, the inputs to validation differ from the outputs of serialization. For example, +computed fields will only be present when serializing, and should not be provided when +validating. This flag provides a way to indicate whether you want the JSON schema required +for validation inputs, or that will be matched by serialization outputs. +""" + +_MODE_TITLE_MAPPING: dict[JsonSchemaMode, str] = {'validation': 'Input', 'serialization': 'Output'} + + +@deprecated( + '`update_json_schema` is deprecated, use a simple `my_dict.update(update_dict)` call instead.', + category=None, +) +def update_json_schema(schema: JsonSchemaValue, updates: dict[str, Any]) -> JsonSchemaValue: + """Update a JSON schema in-place by providing a dictionary of updates. + + This function sets the provided key-value pairs in the schema and returns the updated schema. + + Args: + schema: The JSON schema to update. + updates: A dictionary of key-value pairs to set in the schema. + + Returns: + The updated JSON schema. + """ + schema.update(updates) + return schema + + +JsonSchemaWarningKind = Literal['skipped-choice', 'non-serializable-default'] +""" +A type alias representing the kinds of warnings that can be emitted during JSON schema generation. + +See [`GenerateJsonSchema.render_warning_message`][pydantic.json_schema.GenerateJsonSchema.render_warning_message] +for more details. +""" + + +class PydanticJsonSchemaWarning(UserWarning): + """This class is used to emit warnings produced during JSON schema generation. + See the [`GenerateJsonSchema.emit_warning`][pydantic.json_schema.GenerateJsonSchema.emit_warning] and + [`GenerateJsonSchema.render_warning_message`][pydantic.json_schema.GenerateJsonSchema.render_warning_message] + methods for more details; these can be overridden to control warning behavior. + """ + + +# ##### JSON Schema Generation ##### +DEFAULT_REF_TEMPLATE = '#/$defs/{model}' +"""The default format string used to generate reference names.""" + +# There are three types of references relevant to building JSON schemas: +# 1. core_schema "ref" values; these are not exposed as part of the JSON schema +# * these might look like the fully qualified path of a model, its id, or something similar +CoreRef = NewType('CoreRef', str) +# 2. keys of the "definitions" object that will eventually go into the JSON schema +# * by default, these look like "MyModel", though may change in the presence of collisions +# * eventually, we may want to make it easier to modify the way these names are generated +DefsRef = NewType('DefsRef', str) +# 3. the values corresponding to the "$ref" key in the schema +# * By default, these look like "#/$defs/MyModel", as in {"$ref": "#/$defs/MyModel"} +JsonRef = NewType('JsonRef', str) + +CoreModeRef = Tuple[CoreRef, JsonSchemaMode] +JsonSchemaKeyT = TypeVar('JsonSchemaKeyT', bound=Hashable) + + +@dataclasses.dataclass(**_internal_dataclass.slots_true) +class _DefinitionsRemapping: + defs_remapping: dict[DefsRef, DefsRef] + json_remapping: dict[JsonRef, JsonRef] + + @staticmethod + def from_prioritized_choices( + prioritized_choices: dict[DefsRef, list[DefsRef]], + defs_to_json: dict[DefsRef, JsonRef], + definitions: dict[DefsRef, JsonSchemaValue], + ) -> _DefinitionsRemapping: + """ + This function should produce a remapping that replaces complex DefsRef with the simpler ones from the + prioritized_choices such that applying the name remapping would result in an equivalent JSON schema. + """ + # We need to iteratively simplify the definitions until we reach a fixed point. + # The reason for this is that outer definitions may reference inner definitions that get simplified + # into an equivalent reference, and the outer definitions won't be equivalent until we've simplified + # the inner definitions. + copied_definitions = deepcopy(definitions) + definitions_schema = {'$defs': copied_definitions} + for _iter in range(100): # prevent an infinite loop in the case of a bug, 100 iterations should be enough + # For every possible remapped DefsRef, collect all schemas that that DefsRef might be used for: + schemas_for_alternatives: dict[DefsRef, list[JsonSchemaValue]] = defaultdict(list) + for defs_ref in copied_definitions: + alternatives = prioritized_choices[defs_ref] + for alternative in alternatives: + schemas_for_alternatives[alternative].append(copied_definitions[defs_ref]) + + # Deduplicate the schemas for each alternative; the idea is that we only want to remap to a new DefsRef + # if it introduces no ambiguity, i.e., there is only one distinct schema for that DefsRef. + for defs_ref, schemas in schemas_for_alternatives.items(): + schemas_for_alternatives[defs_ref] = _deduplicate_schemas(schemas_for_alternatives[defs_ref]) + + # Build the remapping + defs_remapping: dict[DefsRef, DefsRef] = {} + json_remapping: dict[JsonRef, JsonRef] = {} + for original_defs_ref in definitions: + alternatives = prioritized_choices[original_defs_ref] + # Pick the first alternative that has only one schema, since that means there is no collision + remapped_defs_ref = next(x for x in alternatives if len(schemas_for_alternatives[x]) == 1) + defs_remapping[original_defs_ref] = remapped_defs_ref + json_remapping[defs_to_json[original_defs_ref]] = defs_to_json[remapped_defs_ref] + remapping = _DefinitionsRemapping(defs_remapping, json_remapping) + new_definitions_schema = remapping.remap_json_schema({'$defs': copied_definitions}) + if definitions_schema == new_definitions_schema: + # We've reached the fixed point + return remapping + definitions_schema = new_definitions_schema + + raise PydanticInvalidForJsonSchema('Failed to simplify the JSON schema definitions') + + def remap_defs_ref(self, ref: DefsRef) -> DefsRef: + return self.defs_remapping.get(ref, ref) + + def remap_json_ref(self, ref: JsonRef) -> JsonRef: + return self.json_remapping.get(ref, ref) + + def remap_json_schema(self, schema: Any) -> Any: + """ + Recursively update the JSON schema replacing all $refs + """ + if isinstance(schema, str): + # Note: this may not really be a JsonRef; we rely on having no collisions between JsonRefs and other strings + return self.remap_json_ref(JsonRef(schema)) + elif isinstance(schema, list): + return [self.remap_json_schema(item) for item in schema] + elif isinstance(schema, dict): + for key, value in schema.items(): + if key == '$ref' and isinstance(value, str): + schema['$ref'] = self.remap_json_ref(JsonRef(value)) + elif key == '$defs': + schema['$defs'] = { + self.remap_defs_ref(DefsRef(key)): self.remap_json_schema(value) + for key, value in schema['$defs'].items() + } + else: + schema[key] = self.remap_json_schema(value) + return schema + + +class GenerateJsonSchema: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/json_schema/#customizing-the-json-schema-generation-process + + A class for generating JSON schemas. + + This class generates JSON schemas based on configured parameters. The default schema dialect + is [https://json-schema.org/draft/2020-12/schema](https://json-schema.org/draft/2020-12/schema). + The class uses `by_alias` to configure how fields with + multiple names are handled and `ref_template` to format reference names. + + Attributes: + schema_dialect: The JSON schema dialect used to generate the schema. See + [Declaring a Dialect](https://json-schema.org/understanding-json-schema/reference/schema.html#id4) + in the JSON Schema documentation for more information about dialects. + ignored_warning_kinds: Warnings to ignore when generating the schema. `self.render_warning_message` will + do nothing if its argument `kind` is in `ignored_warning_kinds`; + this value can be modified on subclasses to easily control which warnings are emitted. + by_alias: Whether to use field aliases when generating the schema. + ref_template: The format string used when generating reference names. + core_to_json_refs: A mapping of core refs to JSON refs. + core_to_defs_refs: A mapping of core refs to definition refs. + defs_to_core_refs: A mapping of definition refs to core refs. + json_to_defs_refs: A mapping of JSON refs to definition refs. + definitions: Definitions in the schema. + + Args: + by_alias: Whether to use field aliases in the generated schemas. + ref_template: The format string to use when generating reference names. + + Raises: + JsonSchemaError: If the instance of the class is inadvertently re-used after generating a schema. + """ + + schema_dialect = 'https://json-schema.org/draft/2020-12/schema' + + # `self.render_warning_message` will do nothing if its argument `kind` is in `ignored_warning_kinds`; + # this value can be modified on subclasses to easily control which warnings are emitted + ignored_warning_kinds: set[JsonSchemaWarningKind] = {'skipped-choice'} + + def __init__(self, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE): + self.by_alias = by_alias + self.ref_template = ref_template + + self.core_to_json_refs: dict[CoreModeRef, JsonRef] = {} + self.core_to_defs_refs: dict[CoreModeRef, DefsRef] = {} + self.defs_to_core_refs: dict[DefsRef, CoreModeRef] = {} + self.json_to_defs_refs: dict[JsonRef, DefsRef] = {} + + self.definitions: dict[DefsRef, JsonSchemaValue] = {} + self._config_wrapper_stack = _config.ConfigWrapperStack(_config.ConfigWrapper({})) + + self._mode: JsonSchemaMode = 'validation' + + # The following includes a mapping of a fully-unique defs ref choice to a list of preferred + # alternatives, which are generally simpler, such as only including the class name. + # At the end of schema generation, we use these to produce a JSON schema with more human-readable + # definitions, which would also work better in a generated OpenAPI client, etc. + self._prioritized_defsref_choices: dict[DefsRef, list[DefsRef]] = {} + self._collision_counter: dict[str, int] = defaultdict(int) + self._collision_index: dict[str, int] = {} + + self._schema_type_to_method = self.build_schema_type_to_method() + + # When we encounter definitions we need to try to build them immediately + # so that they are available schemas that reference them + # But it's possible that CoreSchema was never going to be used + # (e.g. because the CoreSchema that references short circuits is JSON schema generation without needing + # the reference) so instead of failing altogether if we can't build a definition we + # store the error raised and re-throw it if we end up needing that def + self._core_defs_invalid_for_json_schema: dict[DefsRef, PydanticInvalidForJsonSchema] = {} + + # This changes to True after generating a schema, to prevent issues caused by accidental re-use + # of a single instance of a schema generator + self._used = False + + @property + def _config(self) -> _config.ConfigWrapper: + return self._config_wrapper_stack.tail + + @property + def mode(self) -> JsonSchemaMode: + if self._config.json_schema_mode_override is not None: + return self._config.json_schema_mode_override + else: + return self._mode + + def build_schema_type_to_method( + self, + ) -> dict[CoreSchemaOrFieldType, Callable[[CoreSchemaOrField], JsonSchemaValue]]: + """Builds a dictionary mapping fields to methods for generating JSON schemas. + + Returns: + A dictionary containing the mapping of `CoreSchemaOrFieldType` to a handler method. + + Raises: + TypeError: If no method has been defined for generating a JSON schema for a given pydantic core schema type. + """ + mapping: dict[CoreSchemaOrFieldType, Callable[[CoreSchemaOrField], JsonSchemaValue]] = {} + core_schema_types: list[CoreSchemaOrFieldType] = _typing_extra.all_literal_values( + CoreSchemaOrFieldType # type: ignore + ) + for key in core_schema_types: + method_name = f"{key.replace('-', '_')}_schema" + try: + mapping[key] = getattr(self, method_name) + except AttributeError as e: # pragma: no cover + raise TypeError( + f'No method for generating JsonSchema for core_schema.type={key!r} ' + f'(expected: {type(self).__name__}.{method_name})' + ) from e + return mapping + + def generate_definitions( + self, inputs: Sequence[tuple[JsonSchemaKeyT, JsonSchemaMode, core_schema.CoreSchema]] + ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], dict[DefsRef, JsonSchemaValue]]: + """Generates JSON schema definitions from a list of core schemas, pairing the generated definitions with a + mapping that links the input keys to the definition references. + + Args: + inputs: A sequence of tuples, where: + + - The first element is a JSON schema key type. + - The second element is the JSON mode: either 'validation' or 'serialization'. + - The third element is a core schema. + + Returns: + A tuple where: + + - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and + whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have + JsonRef references to definitions that are defined in the second returned element.) + - The second element is a dictionary whose keys are definition references for the JSON schemas + from the first returned element, and whose values are the actual JSON schema definitions. + + Raises: + PydanticUserError: Raised if the JSON schema generator has already been used to generate a JSON schema. + """ + if self._used: + raise PydanticUserError( + 'This JSON schema generator has already been used to generate a JSON schema. ' + f'You must create a new instance of {type(self).__name__} to generate a new JSON schema.', + code='json-schema-already-used', + ) + + for key, mode, schema in inputs: + self._mode = mode + self.generate_inner(schema) + + definitions_remapping = self._build_definitions_remapping() + + json_schemas_map: dict[tuple[JsonSchemaKeyT, JsonSchemaMode], DefsRef] = {} + for key, mode, schema in inputs: + self._mode = mode + json_schema = self.generate_inner(schema) + json_schemas_map[(key, mode)] = definitions_remapping.remap_json_schema(json_schema) + + json_schema = {'$defs': self.definitions} + json_schema = definitions_remapping.remap_json_schema(json_schema) + self._used = True + return json_schemas_map, _sort_json_schema(json_schema['$defs']) # type: ignore + + def generate(self, schema: CoreSchema, mode: JsonSchemaMode = 'validation') -> JsonSchemaValue: + """Generates a JSON schema for a specified schema in a specified mode. + + Args: + schema: A Pydantic model. + mode: The mode in which to generate the schema. Defaults to 'validation'. + + Returns: + A JSON schema representing the specified schema. + + Raises: + PydanticUserError: If the JSON schema generator has already been used to generate a JSON schema. + """ + self._mode = mode + if self._used: + raise PydanticUserError( + 'This JSON schema generator has already been used to generate a JSON schema. ' + f'You must create a new instance of {type(self).__name__} to generate a new JSON schema.', + code='json-schema-already-used', + ) + + json_schema: JsonSchemaValue = self.generate_inner(schema) + json_ref_counts = self.get_json_ref_counts(json_schema) + + # Remove the top-level $ref if present; note that the _generate method already ensures there are no sibling keys + ref = cast(JsonRef, json_schema.get('$ref')) + while ref is not None: # may need to unpack multiple levels + ref_json_schema = self.get_schema_from_definitions(ref) + if json_ref_counts[ref] > 1 or ref_json_schema is None: + # Keep the ref, but use an allOf to remove the top level $ref + json_schema = {'allOf': [{'$ref': ref}]} + else: + # "Unpack" the ref since this is the only reference + json_schema = ref_json_schema.copy() # copy to prevent recursive dict reference + json_ref_counts[ref] -= 1 + ref = cast(JsonRef, json_schema.get('$ref')) + + self._garbage_collect_definitions(json_schema) + definitions_remapping = self._build_definitions_remapping() + + if self.definitions: + json_schema['$defs'] = self.definitions + + json_schema = definitions_remapping.remap_json_schema(json_schema) + + # For now, we will not set the $schema key. However, if desired, this can be easily added by overriding + # this method and adding the following line after a call to super().generate(schema): + # json_schema['$schema'] = self.schema_dialect + + self._used = True + return _sort_json_schema(json_schema) + + def generate_inner(self, schema: CoreSchemaOrField) -> JsonSchemaValue: # noqa: C901 + """Generates a JSON schema for a given core schema. + + Args: + schema: The given core schema. + + Returns: + The generated JSON schema. + """ + # If a schema with the same CoreRef has been handled, just return a reference to it + # Note that this assumes that it will _never_ be the case that the same CoreRef is used + # on types that should have different JSON schemas + if 'ref' in schema: + core_ref = CoreRef(schema['ref']) # type: ignore[typeddict-item] + core_mode_ref = (core_ref, self.mode) + if core_mode_ref in self.core_to_defs_refs and self.core_to_defs_refs[core_mode_ref] in self.definitions: + return {'$ref': self.core_to_json_refs[core_mode_ref]} + + # Generate the JSON schema, accounting for the json_schema_override and core_schema_override + metadata_handler = _core_metadata.CoreMetadataHandler(schema) + + def populate_defs(core_schema: CoreSchema, json_schema: JsonSchemaValue) -> JsonSchemaValue: + if 'ref' in core_schema: + core_ref = CoreRef(core_schema['ref']) # type: ignore[typeddict-item] + defs_ref, ref_json_schema = self.get_cache_defs_ref_schema(core_ref) + json_ref = JsonRef(ref_json_schema['$ref']) + self.json_to_defs_refs[json_ref] = defs_ref + # Replace the schema if it's not a reference to itself + # What we want to avoid is having the def be just a ref to itself + # which is what would happen if we blindly assigned any + if json_schema.get('$ref', None) != json_ref: + self.definitions[defs_ref] = json_schema + self._core_defs_invalid_for_json_schema.pop(defs_ref, None) + json_schema = ref_json_schema + return json_schema + + def convert_to_all_of(json_schema: JsonSchemaValue) -> JsonSchemaValue: + if '$ref' in json_schema and len(json_schema.keys()) > 1: + # technically you can't have any other keys next to a "$ref" + # but it's an easy mistake to make and not hard to correct automatically here + json_schema = json_schema.copy() + ref = json_schema.pop('$ref') + json_schema = {'allOf': [{'$ref': ref}], **json_schema} + return json_schema + + def handler_func(schema_or_field: CoreSchemaOrField) -> JsonSchemaValue: + """Generate a JSON schema based on the input schema. + + Args: + schema_or_field: The core schema to generate a JSON schema from. + + Returns: + The generated JSON schema. + + Raises: + TypeError: If an unexpected schema type is encountered. + """ + # Generate the core-schema-type-specific bits of the schema generation: + json_schema: JsonSchemaValue | None = None + if self.mode == 'serialization' and 'serialization' in schema_or_field: + ser_schema = schema_or_field['serialization'] # type: ignore + json_schema = self.ser_schema(ser_schema) + if json_schema is None: + if _core_utils.is_core_schema(schema_or_field) or _core_utils.is_core_schema_field(schema_or_field): + generate_for_schema_type = self._schema_type_to_method[schema_or_field['type']] + json_schema = generate_for_schema_type(schema_or_field) + else: + raise TypeError(f'Unexpected schema type: schema={schema_or_field}') + if _core_utils.is_core_schema(schema_or_field): + json_schema = populate_defs(schema_or_field, json_schema) + json_schema = convert_to_all_of(json_schema) + return json_schema + + current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, handler_func) + + for js_modify_function in metadata_handler.metadata.get('pydantic_js_functions', ()): + + def new_handler_func( + schema_or_field: CoreSchemaOrField, + current_handler: GetJsonSchemaHandler = current_handler, + js_modify_function: GetJsonSchemaFunction = js_modify_function, + ) -> JsonSchemaValue: + json_schema = js_modify_function(schema_or_field, current_handler) + if _core_utils.is_core_schema(schema_or_field): + json_schema = populate_defs(schema_or_field, json_schema) + original_schema = current_handler.resolve_ref_schema(json_schema) + ref = json_schema.pop('$ref', None) + if ref and json_schema: + original_schema.update(json_schema) + return original_schema + + current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, new_handler_func) + + for js_modify_function in metadata_handler.metadata.get('pydantic_js_annotation_functions', ()): + + def new_handler_func( + schema_or_field: CoreSchemaOrField, + current_handler: GetJsonSchemaHandler = current_handler, + js_modify_function: GetJsonSchemaFunction = js_modify_function, + ) -> JsonSchemaValue: + json_schema = js_modify_function(schema_or_field, current_handler) + if _core_utils.is_core_schema(schema_or_field): + json_schema = populate_defs(schema_or_field, json_schema) + json_schema = convert_to_all_of(json_schema) + return json_schema + + current_handler = _schema_generation_shared.GenerateJsonSchemaHandler(self, new_handler_func) + + json_schema = current_handler(schema) + if _core_utils.is_core_schema(schema): + json_schema = populate_defs(schema, json_schema) + json_schema = convert_to_all_of(json_schema) + return json_schema + + # ### Schema generation methods + def any_schema(self, schema: core_schema.AnySchema) -> JsonSchemaValue: + """Generates a JSON schema that matches any value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return {} + + def none_schema(self, schema: core_schema.NoneSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches `None`. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return {'type': 'null'} + + def bool_schema(self, schema: core_schema.BoolSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a bool value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return {'type': 'boolean'} + + def int_schema(self, schema: core_schema.IntSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches an int value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema: dict[str, Any] = {'type': 'integer'} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.numeric) + json_schema = {k: v for k, v in json_schema.items() if v not in {math.inf, -math.inf}} + return json_schema + + def float_schema(self, schema: core_schema.FloatSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a float value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema: dict[str, Any] = {'type': 'number'} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.numeric) + json_schema = {k: v for k, v in json_schema.items() if v not in {math.inf, -math.inf}} + return json_schema + + def decimal_schema(self, schema: core_schema.DecimalSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a decimal value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema = self.str_schema(core_schema.str_schema()) + if self.mode == 'validation': + multiple_of = schema.get('multiple_of') + le = schema.get('le') + ge = schema.get('ge') + lt = schema.get('lt') + gt = schema.get('gt') + json_schema = { + 'anyOf': [ + self.float_schema( + core_schema.float_schema( + allow_inf_nan=schema.get('allow_inf_nan'), + multiple_of=None if multiple_of is None else float(multiple_of), + le=None if le is None else float(le), + ge=None if ge is None else float(ge), + lt=None if lt is None else float(lt), + gt=None if gt is None else float(gt), + ) + ), + json_schema, + ], + } + return json_schema + + def str_schema(self, schema: core_schema.StringSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a string value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema = {'type': 'string'} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.string) + return json_schema + + def bytes_schema(self, schema: core_schema.BytesSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a bytes value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema = {'type': 'string', 'format': 'base64url' if self._config.ser_json_bytes == 'base64' else 'binary'} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.bytes) + return json_schema + + def date_schema(self, schema: core_schema.DateSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a date value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema = {'type': 'string', 'format': 'date'} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.date) + return json_schema + + def time_schema(self, schema: core_schema.TimeSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a time value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return {'type': 'string', 'format': 'time'} + + def datetime_schema(self, schema: core_schema.DatetimeSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a datetime value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return {'type': 'string', 'format': 'date-time'} + + def timedelta_schema(self, schema: core_schema.TimedeltaSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a timedelta value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + if self._config.ser_json_timedelta == 'float': + return {'type': 'number'} + return {'type': 'string', 'format': 'duration'} + + def literal_schema(self, schema: core_schema.LiteralSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a literal value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + expected = [v.value if isinstance(v, Enum) else v for v in schema['expected']] + # jsonify the expected values + expected = [to_jsonable_python(v) for v in expected] + + result: dict[str, Any] = {'enum': expected} + if len(expected) == 1: + result['const'] = expected[0] + + types = {type(e) for e in expected} + if types == {str}: + result['type'] = 'string' + elif types == {int}: + result['type'] = 'integer' + elif types == {float}: + result['type'] = 'numeric' + elif types == {bool}: + result['type'] = 'boolean' + elif types == {list}: + result['type'] = 'array' + elif types == {type(None)}: + result['type'] = 'null' + return result + + def enum_schema(self, schema: core_schema.EnumSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches an Enum value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + enum_type = schema['cls'] + description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__) + if ( + description == 'An enumeration.' + ): # This is the default value provided by enum.EnumMeta.__new__; don't use it + description = None + result: dict[str, Any] = {'title': enum_type.__name__, 'description': description} + result = {k: v for k, v in result.items() if v is not None} + + expected = [to_jsonable_python(v.value) for v in schema['members']] + + result['enum'] = expected + if len(expected) == 1: + result['const'] = expected[0] + + types = {type(e) for e in expected} + if isinstance(enum_type, str) or types == {str}: + result['type'] = 'string' + elif isinstance(enum_type, int) or types == {int}: + result['type'] = 'integer' + elif isinstance(enum_type, float) or types == {float}: + result['type'] = 'numeric' + elif types == {bool}: + result['type'] = 'boolean' + elif types == {list}: + result['type'] = 'array' + + return result + + def is_instance_schema(self, schema: core_schema.IsInstanceSchema) -> JsonSchemaValue: + """Handles JSON schema generation for a core schema that checks if a value is an instance of a class. + + Unless overridden in a subclass, this raises an error. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.handle_invalid_for_json_schema(schema, f'core_schema.IsInstanceSchema ({schema["cls"]})') + + def is_subclass_schema(self, schema: core_schema.IsSubclassSchema) -> JsonSchemaValue: + """Handles JSON schema generation for a core schema that checks if a value is a subclass of a class. + + For backwards compatibility with v1, this does not raise an error, but can be overridden to change this. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + # Note: This is for compatibility with V1; you can override if you want different behavior. + return {} + + def callable_schema(self, schema: core_schema.CallableSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a callable value. + + Unless overridden in a subclass, this raises an error. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.handle_invalid_for_json_schema(schema, 'core_schema.CallableSchema') + + def list_schema(self, schema: core_schema.ListSchema) -> JsonSchemaValue: + """Returns a schema that matches a list schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema']) + json_schema = {'type': 'array', 'items': items_schema} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) + return json_schema + + @deprecated('`tuple_positional_schema` is deprecated. Use `tuple_schema` instead.', category=None) + @final + def tuple_positional_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue: + """Replaced by `tuple_schema`.""" + warnings.warn( + '`tuple_positional_schema` is deprecated. Use `tuple_schema` instead.', + PydanticDeprecatedSince26, + stacklevel=2, + ) + return self.tuple_schema(schema) + + @deprecated('`tuple_variable_schema` is deprecated. Use `tuple_schema` instead.', category=None) + @final + def tuple_variable_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue: + """Replaced by `tuple_schema`.""" + warnings.warn( + '`tuple_variable_schema` is deprecated. Use `tuple_schema` instead.', + PydanticDeprecatedSince26, + stacklevel=2, + ) + return self.tuple_schema(schema) + + def tuple_schema(self, schema: core_schema.TupleSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a tuple schema e.g. `Tuple[int, + str, bool]` or `Tuple[int, ...]`. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema: JsonSchemaValue = {'type': 'array'} + if 'variadic_item_index' in schema: + variadic_item_index = schema['variadic_item_index'] + if variadic_item_index > 0: + json_schema['minItems'] = variadic_item_index + json_schema['prefixItems'] = [ + self.generate_inner(item) for item in schema['items_schema'][:variadic_item_index] + ] + if variadic_item_index + 1 == len(schema['items_schema']): + # if the variadic item is the last item, then represent it faithfully + json_schema['items'] = self.generate_inner(schema['items_schema'][variadic_item_index]) + else: + # otherwise, 'items' represents the schema for the variadic + # item plus the suffix, so just allow anything for simplicity + # for now + json_schema['items'] = True + else: + prefixItems = [self.generate_inner(item) for item in schema['items_schema']] + if prefixItems: + json_schema['prefixItems'] = prefixItems + json_schema['minItems'] = len(prefixItems) + json_schema['maxItems'] = len(prefixItems) + self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) + return json_schema + + def set_schema(self, schema: core_schema.SetSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a set schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self._common_set_schema(schema) + + def frozenset_schema(self, schema: core_schema.FrozenSetSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a frozenset schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self._common_set_schema(schema) + + def _common_set_schema(self, schema: core_schema.SetSchema | core_schema.FrozenSetSchema) -> JsonSchemaValue: + items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema']) + json_schema = {'type': 'array', 'uniqueItems': True, 'items': items_schema} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) + return json_schema + + def generator_schema(self, schema: core_schema.GeneratorSchema) -> JsonSchemaValue: + """Returns a JSON schema that represents the provided GeneratorSchema. + + Args: + schema: The schema. + + Returns: + The generated JSON schema. + """ + items_schema = {} if 'items_schema' not in schema else self.generate_inner(schema['items_schema']) + json_schema = {'type': 'array', 'items': items_schema} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.array) + return json_schema + + def dict_schema(self, schema: core_schema.DictSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a dict schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema: JsonSchemaValue = {'type': 'object'} + + keys_schema = self.generate_inner(schema['keys_schema']).copy() if 'keys_schema' in schema else {} + keys_pattern = keys_schema.pop('pattern', None) + + values_schema = self.generate_inner(schema['values_schema']).copy() if 'values_schema' in schema else {} + values_schema.pop('title', None) # don't give a title to the additionalProperties + if values_schema or keys_pattern is not None: # don't add additionalProperties if it's empty + if keys_pattern is None: + json_schema['additionalProperties'] = values_schema + else: + json_schema['patternProperties'] = {keys_pattern: values_schema} + + self.update_with_validations(json_schema, schema, self.ValidationsMapping.object) + return json_schema + + def _function_schema( + self, + schema: _core_utils.AnyFunctionSchema, + ) -> JsonSchemaValue: + if _core_utils.is_function_with_inner_schema(schema): + # This could be wrong if the function's mode is 'before', but in practice will often be right, and when it + # isn't, I think it would be hard to automatically infer what the desired schema should be. + return self.generate_inner(schema['schema']) + + # function-plain + return self.handle_invalid_for_json_schema( + schema, f'core_schema.PlainValidatorFunctionSchema ({schema["function"]})' + ) + + def function_before_schema(self, schema: core_schema.BeforeValidatorFunctionSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a function-before schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self._function_schema(schema) + + def function_after_schema(self, schema: core_schema.AfterValidatorFunctionSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a function-after schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self._function_schema(schema) + + def function_plain_schema(self, schema: core_schema.PlainValidatorFunctionSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a function-plain schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self._function_schema(schema) + + def function_wrap_schema(self, schema: core_schema.WrapValidatorFunctionSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a function-wrap schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self._function_schema(schema) + + def default_schema(self, schema: core_schema.WithDefaultSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema with a default value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema = self.generate_inner(schema['schema']) + + if 'default' not in schema: + return json_schema + default = schema['default'] + # Note: if you want to include the value returned by the default_factory, + # override this method and replace the code above with: + # if 'default' in schema: + # default = schema['default'] + # elif 'default_factory' in schema: + # default = schema['default_factory']() + # else: + # return json_schema + + try: + encoded_default = self.encode_default(default) + except pydantic_core.PydanticSerializationError: + self.emit_warning( + 'non-serializable-default', + f'Default value {default} is not JSON serializable; excluding default from JSON schema', + ) + # Return the inner schema, as though there was no default + return json_schema + + if '$ref' in json_schema: + # Since reference schemas do not support child keys, we wrap the reference schema in a single-case allOf: + return {'allOf': [json_schema], 'default': encoded_default} + else: + json_schema['default'] = encoded_default + return json_schema + + def nullable_schema(self, schema: core_schema.NullableSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that allows null values. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + null_schema = {'type': 'null'} + inner_json_schema = self.generate_inner(schema['schema']) + + if inner_json_schema == null_schema: + return null_schema + else: + # Thanks to the equality check against `null_schema` above, I think 'oneOf' would also be valid here; + # I'll use 'anyOf' for now, but it could be changed it if it would work better with some external tooling + return self.get_flattened_anyof([inner_json_schema, null_schema]) + + def union_schema(self, schema: core_schema.UnionSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that allows values matching any of the given schemas. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + generated: list[JsonSchemaValue] = [] + + choices = schema['choices'] + for choice in choices: + # choice will be a tuple if an explicit label was provided + choice_schema = choice[0] if isinstance(choice, tuple) else choice + try: + generated.append(self.generate_inner(choice_schema)) + except PydanticOmit: + continue + except PydanticInvalidForJsonSchema as exc: + self.emit_warning('skipped-choice', exc.message) + if len(generated) == 1: + return generated[0] + return self.get_flattened_anyof(generated) + + def tagged_union_schema(self, schema: core_schema.TaggedUnionSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that allows values matching any of the given schemas, where + the schemas are tagged with a discriminator field that indicates which schema should be used to validate + the value. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + generated: dict[str, JsonSchemaValue] = {} + for k, v in schema['choices'].items(): + if isinstance(k, Enum): + k = k.value + try: + # Use str(k) since keys must be strings for json; while not technically correct, + # it's the closest that can be represented in valid JSON + generated[str(k)] = self.generate_inner(v).copy() + except PydanticOmit: + continue + except PydanticInvalidForJsonSchema as exc: + self.emit_warning('skipped-choice', exc.message) + + one_of_choices = _deduplicate_schemas(generated.values()) + json_schema: JsonSchemaValue = {'oneOf': one_of_choices} + + # This reflects the v1 behavior; TODO: we should make it possible to exclude OpenAPI stuff from the JSON schema + openapi_discriminator = self._extract_discriminator(schema, one_of_choices) + if openapi_discriminator is not None: + json_schema['discriminator'] = { + 'propertyName': openapi_discriminator, + 'mapping': {k: v.get('$ref', v) for k, v in generated.items()}, + } + + return json_schema + + def _extract_discriminator( + self, schema: core_schema.TaggedUnionSchema, one_of_choices: list[JsonDict] + ) -> str | None: + """Extract a compatible OpenAPI discriminator from the schema and one_of choices that end up in the final + schema.""" + openapi_discriminator: str | None = None + + if isinstance(schema['discriminator'], str): + return schema['discriminator'] + + if isinstance(schema['discriminator'], list): + # If the discriminator is a single item list containing a string, that is equivalent to the string case + if len(schema['discriminator']) == 1 and isinstance(schema['discriminator'][0], str): + return schema['discriminator'][0] + # When an alias is used that is different from the field name, the discriminator will be a list of single + # str lists, one for the attribute and one for the actual alias. The logic here will work even if there is + # more than one possible attribute, and looks for whether a single alias choice is present as a documented + # property on all choices. If so, that property will be used as the OpenAPI discriminator. + for alias_path in schema['discriminator']: + if not isinstance(alias_path, list): + break # this means that the discriminator is not a list of alias paths + if len(alias_path) != 1: + continue # this means that the "alias" does not represent a single field + alias = alias_path[0] + if not isinstance(alias, str): + continue # this means that the "alias" does not represent a field + alias_is_present_on_all_choices = True + for choice in one_of_choices: + while '$ref' in choice: + assert isinstance(choice['$ref'], str) + choice = self.get_schema_from_definitions(JsonRef(choice['$ref'])) or {} + properties = choice.get('properties', {}) + if not isinstance(properties, dict) or alias not in properties: + alias_is_present_on_all_choices = False + break + if alias_is_present_on_all_choices: + openapi_discriminator = alias + break + return openapi_discriminator + + def chain_schema(self, schema: core_schema.ChainSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a core_schema.ChainSchema. + + When generating a schema for validation, we return the validation JSON schema for the first step in the chain. + For serialization, we return the serialization JSON schema for the last step in the chain. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + step_index = 0 if self.mode == 'validation' else -1 # use first step for validation, last for serialization + return self.generate_inner(schema['steps'][step_index]) + + def lax_or_strict_schema(self, schema: core_schema.LaxOrStrictSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that allows values matching either the lax schema or the + strict schema. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + # TODO: Need to read the default value off of model config or whatever + use_strict = schema.get('strict', False) # TODO: replace this default False + # If your JSON schema fails to generate it is probably + # because one of the following two branches failed. + if use_strict: + return self.generate_inner(schema['strict_schema']) + else: + return self.generate_inner(schema['lax_schema']) + + def json_or_python_schema(self, schema: core_schema.JsonOrPythonSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that allows values matching either the JSON schema or the + Python schema. + + The JSON schema is used instead of the Python schema. If you want to use the Python schema, you should override + this method. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.generate_inner(schema['json_schema']) + + def typed_dict_schema(self, schema: core_schema.TypedDictSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a typed dict. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + total = schema.get('total', True) + named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [ + (name, self.field_is_required(field, total), field) + for name, field in schema['fields'].items() + if self.field_is_present(field) + ] + if self.mode == 'serialization': + named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', []))) + cls = _get_typed_dict_cls(schema) + config = _get_typed_dict_config(cls) + with self._config_wrapper_stack.push(config): + json_schema = self._named_required_fields_schema(named_required_fields) + + json_schema_extra = config.get('json_schema_extra') + extra = schema.get('extra_behavior') + if extra is None: + extra = config.get('extra', 'ignore') + + if cls is not None: + title = config.get('title') or cls.__name__ + json_schema = self._update_class_schema(json_schema, title, extra, cls, json_schema_extra) + else: + if extra == 'forbid': + json_schema['additionalProperties'] = False + elif extra == 'allow': + json_schema['additionalProperties'] = True + + return json_schema + + @staticmethod + def _name_required_computed_fields( + computed_fields: list[ComputedField], + ) -> list[tuple[str, bool, core_schema.ComputedField]]: + return [(field['property_name'], True, field) for field in computed_fields] + + def _named_required_fields_schema( + self, named_required_fields: Sequence[tuple[str, bool, CoreSchemaField]] + ) -> JsonSchemaValue: + properties: dict[str, JsonSchemaValue] = {} + required_fields: list[str] = [] + for name, required, field in named_required_fields: + if self.by_alias: + name = self._get_alias_name(field, name) + try: + field_json_schema = self.generate_inner(field).copy() + except PydanticOmit: + continue + if 'title' not in field_json_schema and self.field_title_should_be_set(field): + title = self.get_title_from_name(name) + field_json_schema['title'] = title + field_json_schema = self.handle_ref_overrides(field_json_schema) + properties[name] = field_json_schema + if required: + required_fields.append(name) + + json_schema = {'type': 'object', 'properties': properties} + if required_fields: + json_schema['required'] = required_fields + return json_schema + + def _get_alias_name(self, field: CoreSchemaField, name: str) -> str: + if field['type'] == 'computed-field': + alias: Any = field.get('alias', name) + elif self.mode == 'validation': + alias = field.get('validation_alias', name) + else: + alias = field.get('serialization_alias', name) + if isinstance(alias, str): + name = alias + elif isinstance(alias, list): + alias = cast('list[str] | str', alias) + for path in alias: + if isinstance(path, list) and len(path) == 1 and isinstance(path[0], str): + # Use the first valid single-item string path; the code that constructs the alias array + # should ensure the first such item is what belongs in the JSON schema + name = path[0] + break + else: + assert_never(alias) + return name + + def typed_dict_field_schema(self, schema: core_schema.TypedDictField) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a typed dict field. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.generate_inner(schema['schema']) + + def dataclass_field_schema(self, schema: core_schema.DataclassField) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a dataclass field. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.generate_inner(schema['schema']) + + def model_field_schema(self, schema: core_schema.ModelField) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a model field. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.generate_inner(schema['schema']) + + def computed_field_schema(self, schema: core_schema.ComputedField) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a computed field. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.generate_inner(schema['return_schema']) + + def model_schema(self, schema: core_schema.ModelSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a model. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + # We do not use schema['model'].model_json_schema() here + # because it could lead to inconsistent refs handling, etc. + cls = cast('type[BaseModel]', schema['cls']) + config = cls.model_config + title = config.get('title') + + with self._config_wrapper_stack.push(config): + json_schema = self.generate_inner(schema['schema']) + + json_schema_extra = config.get('json_schema_extra') + if cls.__pydantic_root_model__: + root_json_schema_extra = cls.model_fields['root'].json_schema_extra + if json_schema_extra and root_json_schema_extra: + raise ValueError( + '"model_config[\'json_schema_extra\']" and "Field.json_schema_extra" on "RootModel.root"' + ' field must not be set simultaneously' + ) + if root_json_schema_extra: + json_schema_extra = root_json_schema_extra + + json_schema = self._update_class_schema(json_schema, title, config.get('extra', None), cls, json_schema_extra) + + return json_schema + + def _update_class_schema( + self, + json_schema: JsonSchemaValue, + title: str | None, + extra: Literal['allow', 'ignore', 'forbid'] | None, + cls: type[Any], + json_schema_extra: JsonDict | JsonSchemaExtraCallable | None, + ) -> JsonSchemaValue: + if '$ref' in json_schema: + schema_to_update = self.get_schema_from_definitions(JsonRef(json_schema['$ref'])) or json_schema + else: + schema_to_update = json_schema + + if title is not None: + # referenced_schema['title'] = title + schema_to_update.setdefault('title', title) + + if 'additionalProperties' not in schema_to_update: + if extra == 'allow': + schema_to_update['additionalProperties'] = True + elif extra == 'forbid': + schema_to_update['additionalProperties'] = False + + if isinstance(json_schema_extra, (staticmethod, classmethod)): + # In older versions of python, this is necessary to ensure staticmethod/classmethods are callable + json_schema_extra = json_schema_extra.__get__(cls) + + if isinstance(json_schema_extra, dict): + schema_to_update.update(json_schema_extra) + elif callable(json_schema_extra): + if len(inspect.signature(json_schema_extra).parameters) > 1: + json_schema_extra(schema_to_update, cls) # type: ignore + else: + json_schema_extra(schema_to_update) # type: ignore + elif json_schema_extra is not None: + raise ValueError( + f"model_config['json_schema_extra']={json_schema_extra} should be a dict, callable, or None" + ) + + return json_schema + + def resolve_schema_to_update(self, json_schema: JsonSchemaValue) -> JsonSchemaValue: + """Resolve a JsonSchemaValue to the non-ref schema if it is a $ref schema. + + Args: + json_schema: The schema to resolve. + + Returns: + The resolved schema. + """ + if '$ref' in json_schema: + schema_to_update = self.get_schema_from_definitions(JsonRef(json_schema['$ref'])) + if schema_to_update is None: + raise RuntimeError(f'Cannot update undefined schema for $ref={json_schema["$ref"]}') + return self.resolve_schema_to_update(schema_to_update) + else: + schema_to_update = json_schema + return schema_to_update + + def model_fields_schema(self, schema: core_schema.ModelFieldsSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a model's fields. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [ + (name, self.field_is_required(field, total=True), field) + for name, field in schema['fields'].items() + if self.field_is_present(field) + ] + if self.mode == 'serialization': + named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', []))) + json_schema = self._named_required_fields_schema(named_required_fields) + extras_schema = schema.get('extras_schema', None) + if extras_schema is not None: + schema_to_update = self.resolve_schema_to_update(json_schema) + schema_to_update['additionalProperties'] = self.generate_inner(extras_schema) + return json_schema + + def field_is_present(self, field: CoreSchemaField) -> bool: + """Whether the field should be included in the generated JSON schema. + + Args: + field: The schema for the field itself. + + Returns: + `True` if the field should be included in the generated JSON schema, `False` otherwise. + """ + if self.mode == 'serialization': + # If you still want to include the field in the generated JSON schema, + # override this method and return True + return not field.get('serialization_exclude') + elif self.mode == 'validation': + return True + else: + assert_never(self.mode) + + def field_is_required( + self, + field: core_schema.ModelField | core_schema.DataclassField | core_schema.TypedDictField, + total: bool, + ) -> bool: + """Whether the field should be marked as required in the generated JSON schema. + (Note that this is irrelevant if the field is not present in the JSON schema.). + + Args: + field: The schema for the field itself. + total: Only applies to `TypedDictField`s. + Indicates if the `TypedDict` this field belongs to is total, in which case any fields that don't + explicitly specify `required=False` are required. + + Returns: + `True` if the field should be marked as required in the generated JSON schema, `False` otherwise. + """ + if self.mode == 'serialization' and self._config.json_schema_serialization_defaults_required: + return not field.get('serialization_exclude') + else: + if field['type'] == 'typed-dict-field': + return field.get('required', total) + else: + return field['schema']['type'] != 'default' + + def dataclass_args_schema(self, schema: core_schema.DataclassArgsSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a dataclass's constructor arguments. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + named_required_fields: list[tuple[str, bool, CoreSchemaField]] = [ + (field['name'], self.field_is_required(field, total=True), field) + for field in schema['fields'] + if self.field_is_present(field) + ] + if self.mode == 'serialization': + named_required_fields.extend(self._name_required_computed_fields(schema.get('computed_fields', []))) + return self._named_required_fields_schema(named_required_fields) + + def dataclass_schema(self, schema: core_schema.DataclassSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a dataclass. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + cls = schema['cls'] + config: ConfigDict = getattr(cls, '__pydantic_config__', cast('ConfigDict', {})) + title = config.get('title') or cls.__name__ + + with self._config_wrapper_stack.push(config): + json_schema = self.generate_inner(schema['schema']).copy() + + json_schema_extra = config.get('json_schema_extra') + json_schema = self._update_class_schema(json_schema, title, config.get('extra', None), cls, json_schema_extra) + + # Dataclass-specific handling of description + if is_dataclass(cls) and not hasattr(cls, '__pydantic_validator__'): + # vanilla dataclass; don't use cls.__doc__ as it will contain the class signature by default + description = None + else: + description = None if cls.__doc__ is None else inspect.cleandoc(cls.__doc__) + if description: + json_schema['description'] = description + + return json_schema + + def arguments_schema(self, schema: core_schema.ArgumentsSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a function's arguments. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + metadata = _core_metadata.CoreMetadataHandler(schema).metadata + prefer_positional = metadata.get('pydantic_js_prefer_positional_arguments') + + arguments = schema['arguments_schema'] + kw_only_arguments = [a for a in arguments if a.get('mode') == 'keyword_only'] + kw_or_p_arguments = [a for a in arguments if a.get('mode') in {'positional_or_keyword', None}] + p_only_arguments = [a for a in arguments if a.get('mode') == 'positional_only'] + var_args_schema = schema.get('var_args_schema') + var_kwargs_schema = schema.get('var_kwargs_schema') + + if prefer_positional: + positional_possible = not kw_only_arguments and not var_kwargs_schema + if positional_possible: + return self.p_arguments_schema(p_only_arguments + kw_or_p_arguments, var_args_schema) + + keyword_possible = not p_only_arguments and not var_args_schema + if keyword_possible: + return self.kw_arguments_schema(kw_or_p_arguments + kw_only_arguments, var_kwargs_schema) + + if not prefer_positional: + positional_possible = not kw_only_arguments and not var_kwargs_schema + if positional_possible: + return self.p_arguments_schema(p_only_arguments + kw_or_p_arguments, var_args_schema) + + raise PydanticInvalidForJsonSchema( + 'Unable to generate JSON schema for arguments validator with positional-only and keyword-only arguments' + ) + + def kw_arguments_schema( + self, arguments: list[core_schema.ArgumentsParameter], var_kwargs_schema: CoreSchema | None + ) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a function's keyword arguments. + + Args: + arguments: The core schema. + + Returns: + The generated JSON schema. + """ + properties: dict[str, JsonSchemaValue] = {} + required: list[str] = [] + for argument in arguments: + name = self.get_argument_name(argument) + argument_schema = self.generate_inner(argument['schema']).copy() + argument_schema['title'] = self.get_title_from_name(name) + properties[name] = argument_schema + + if argument['schema']['type'] != 'default': + # This assumes that if the argument has a default value, + # the inner schema must be of type WithDefaultSchema. + # I believe this is true, but I am not 100% sure + required.append(name) + + json_schema: JsonSchemaValue = {'type': 'object', 'properties': properties} + if required: + json_schema['required'] = required + + if var_kwargs_schema: + additional_properties_schema = self.generate_inner(var_kwargs_schema) + if additional_properties_schema: + json_schema['additionalProperties'] = additional_properties_schema + else: + json_schema['additionalProperties'] = False + return json_schema + + def p_arguments_schema( + self, arguments: list[core_schema.ArgumentsParameter], var_args_schema: CoreSchema | None + ) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a function's positional arguments. + + Args: + arguments: The core schema. + + Returns: + The generated JSON schema. + """ + prefix_items: list[JsonSchemaValue] = [] + min_items = 0 + + for argument in arguments: + name = self.get_argument_name(argument) + + argument_schema = self.generate_inner(argument['schema']).copy() + argument_schema['title'] = self.get_title_from_name(name) + prefix_items.append(argument_schema) + + if argument['schema']['type'] != 'default': + # This assumes that if the argument has a default value, + # the inner schema must be of type WithDefaultSchema. + # I believe this is true, but I am not 100% sure + min_items += 1 + + json_schema: JsonSchemaValue = {'type': 'array', 'prefixItems': prefix_items} + if min_items: + json_schema['minItems'] = min_items + + if var_args_schema: + items_schema = self.generate_inner(var_args_schema) + if items_schema: + json_schema['items'] = items_schema + else: + json_schema['maxItems'] = len(prefix_items) + + return json_schema + + def get_argument_name(self, argument: core_schema.ArgumentsParameter) -> str: + """Retrieves the name of an argument. + + Args: + argument: The core schema. + + Returns: + The name of the argument. + """ + name = argument['name'] + if self.by_alias: + alias = argument.get('alias') + if isinstance(alias, str): + name = alias + else: + pass # might want to do something else? + return name + + def call_schema(self, schema: core_schema.CallSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a function call. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.generate_inner(schema['arguments_schema']) + + def custom_error_schema(self, schema: core_schema.CustomErrorSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a custom error. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return self.generate_inner(schema['schema']) + + def json_schema(self, schema: core_schema.JsonSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a JSON object. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + content_core_schema = schema.get('schema') or core_schema.any_schema() + content_json_schema = self.generate_inner(content_core_schema) + if self.mode == 'validation': + return {'type': 'string', 'contentMediaType': 'application/json', 'contentSchema': content_json_schema} + else: + # self.mode == 'serialization' + return content_json_schema + + def url_schema(self, schema: core_schema.UrlSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a URL. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + json_schema = {'type': 'string', 'format': 'uri', 'minLength': 1} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.string) + return json_schema + + def multi_host_url_schema(self, schema: core_schema.MultiHostUrlSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a URL that can be used with multiple hosts. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + # Note: 'multi-host-uri' is a custom/pydantic-specific format, not part of the JSON Schema spec + json_schema = {'type': 'string', 'format': 'multi-host-uri', 'minLength': 1} + self.update_with_validations(json_schema, schema, self.ValidationsMapping.string) + return json_schema + + def uuid_schema(self, schema: core_schema.UuidSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a UUID. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + return {'type': 'string', 'format': 'uuid'} + + def definitions_schema(self, schema: core_schema.DefinitionsSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that defines a JSON object with definitions. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + for definition in schema['definitions']: + try: + self.generate_inner(definition) + except PydanticInvalidForJsonSchema as e: + core_ref: CoreRef = CoreRef(definition['ref']) # type: ignore + self._core_defs_invalid_for_json_schema[self.get_defs_ref((core_ref, self.mode))] = e + continue + return self.generate_inner(schema['schema']) + + def definition_ref_schema(self, schema: core_schema.DefinitionReferenceSchema) -> JsonSchemaValue: + """Generates a JSON schema that matches a schema that references a definition. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + core_ref = CoreRef(schema['schema_ref']) + _, ref_json_schema = self.get_cache_defs_ref_schema(core_ref) + return ref_json_schema + + def ser_schema( + self, schema: core_schema.SerSchema | core_schema.IncExSeqSerSchema | core_schema.IncExDictSerSchema + ) -> JsonSchemaValue | None: + """Generates a JSON schema that matches a schema that defines a serialized object. + + Args: + schema: The core schema. + + Returns: + The generated JSON schema. + """ + schema_type = schema['type'] + if schema_type == 'function-plain' or schema_type == 'function-wrap': + # PlainSerializerFunctionSerSchema or WrapSerializerFunctionSerSchema + return_schema = schema.get('return_schema') + if return_schema is not None: + return self.generate_inner(return_schema) + elif schema_type == 'format' or schema_type == 'to-string': + # FormatSerSchema or ToStringSerSchema + return self.str_schema(core_schema.str_schema()) + elif schema['type'] == 'model': + # ModelSerSchema + return self.generate_inner(schema['schema']) + return None + + # ### Utility methods + + def get_title_from_name(self, name: str) -> str: + """Retrieves a title from a name. + + Args: + name: The name to retrieve a title from. + + Returns: + The title. + """ + return name.title().replace('_', ' ') + + def field_title_should_be_set(self, schema: CoreSchemaOrField) -> bool: + """Returns true if a field with the given schema should have a title set based on the field name. + + Intuitively, we want this to return true for schemas that wouldn't otherwise provide their own title + (e.g., int, float, str), and false for those that would (e.g., BaseModel subclasses). + + Args: + schema: The schema to check. + + Returns: + `True` if the field should have a title set, `False` otherwise. + """ + if _core_utils.is_core_schema_field(schema): + if schema['type'] == 'computed-field': + field_schema = schema['return_schema'] + else: + field_schema = schema['schema'] + return self.field_title_should_be_set(field_schema) + + elif _core_utils.is_core_schema(schema): + if schema.get('ref'): # things with refs, such as models and enums, should not have titles set + return False + if schema['type'] in {'default', 'nullable', 'definitions'}: + return self.field_title_should_be_set(schema['schema']) # type: ignore[typeddict-item] + if _core_utils.is_function_with_inner_schema(schema): + return self.field_title_should_be_set(schema['schema']) + if schema['type'] == 'definition-ref': + # Referenced schemas should not have titles set for the same reason + # schemas with refs should not + return False + return True # anything else should have title set + + else: + raise PydanticInvalidForJsonSchema(f'Unexpected schema type: schema={schema}') # pragma: no cover + + def normalize_name(self, name: str) -> str: + """Normalizes a name to be used as a key in a dictionary. + + Args: + name: The name to normalize. + + Returns: + The normalized name. + """ + return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name).replace('.', '__') + + def get_defs_ref(self, core_mode_ref: CoreModeRef) -> DefsRef: + """Override this method to change the way that definitions keys are generated from a core reference. + + Args: + core_mode_ref: The core reference. + + Returns: + The definitions key. + """ + # Split the core ref into "components"; generic origins and arguments are each separate components + core_ref, mode = core_mode_ref + components = re.split(r'([\][,])', core_ref) + # Remove IDs from each component + components = [x.rsplit(':', 1)[0] for x in components] + core_ref_no_id = ''.join(components) + # Remove everything before the last period from each "component" + components = [re.sub(r'(?:[^.[\]]+\.)+((?:[^.[\]]+))', r'\1', x) for x in components] + short_ref = ''.join(components) + + mode_title = _MODE_TITLE_MAPPING[mode] + + # It is important that the generated defs_ref values be such that at least one choice will not + # be generated for any other core_ref. Currently, this should be the case because we include + # the id of the source type in the core_ref + name = DefsRef(self.normalize_name(short_ref)) + name_mode = DefsRef(self.normalize_name(short_ref) + f'-{mode_title}') + module_qualname = DefsRef(self.normalize_name(core_ref_no_id)) + module_qualname_mode = DefsRef(f'{module_qualname}-{mode_title}') + module_qualname_id = DefsRef(self.normalize_name(core_ref)) + occurrence_index = self._collision_index.get(module_qualname_id) + if occurrence_index is None: + self._collision_counter[module_qualname] += 1 + occurrence_index = self._collision_index[module_qualname_id] = self._collision_counter[module_qualname] + + module_qualname_occurrence = DefsRef(f'{module_qualname}__{occurrence_index}') + module_qualname_occurrence_mode = DefsRef(f'{module_qualname_mode}__{occurrence_index}') + + self._prioritized_defsref_choices[module_qualname_occurrence_mode] = [ + name, + name_mode, + module_qualname, + module_qualname_mode, + module_qualname_occurrence, + module_qualname_occurrence_mode, + ] + + return module_qualname_occurrence_mode + + def get_cache_defs_ref_schema(self, core_ref: CoreRef) -> tuple[DefsRef, JsonSchemaValue]: + """This method wraps the get_defs_ref method with some cache-lookup/population logic, + and returns both the produced defs_ref and the JSON schema that will refer to the right definition. + + Args: + core_ref: The core reference to get the definitions reference for. + + Returns: + A tuple of the definitions reference and the JSON schema that will refer to it. + """ + core_mode_ref = (core_ref, self.mode) + maybe_defs_ref = self.core_to_defs_refs.get(core_mode_ref) + if maybe_defs_ref is not None: + json_ref = self.core_to_json_refs[core_mode_ref] + return maybe_defs_ref, {'$ref': json_ref} + + defs_ref = self.get_defs_ref(core_mode_ref) + + # populate the ref translation mappings + self.core_to_defs_refs[core_mode_ref] = defs_ref + self.defs_to_core_refs[defs_ref] = core_mode_ref + + json_ref = JsonRef(self.ref_template.format(model=defs_ref)) + self.core_to_json_refs[core_mode_ref] = json_ref + self.json_to_defs_refs[json_ref] = defs_ref + ref_json_schema = {'$ref': json_ref} + return defs_ref, ref_json_schema + + def handle_ref_overrides(self, json_schema: JsonSchemaValue) -> JsonSchemaValue: + """It is not valid for a schema with a top-level $ref to have sibling keys. + + During our own schema generation, we treat sibling keys as overrides to the referenced schema, + but this is not how the official JSON schema spec works. + + Because of this, we first remove any sibling keys that are redundant with the referenced schema, then if + any remain, we transform the schema from a top-level '$ref' to use allOf to move the $ref out of the top level. + (See bottom of https://swagger.io/docs/specification/using-ref/ for a reference about this behavior) + """ + if '$ref' in json_schema: + # prevent modifications to the input; this copy may be safe to drop if there is significant overhead + json_schema = json_schema.copy() + + referenced_json_schema = self.get_schema_from_definitions(JsonRef(json_schema['$ref'])) + if referenced_json_schema is None: + # This can happen when building schemas for models with not-yet-defined references. + # It may be a good idea to do a recursive pass at the end of the generation to remove + # any redundant override keys. + if len(json_schema) > 1: + # Make it an allOf to at least resolve the sibling keys issue + json_schema = json_schema.copy() + json_schema.setdefault('allOf', []) + json_schema['allOf'].append({'$ref': json_schema['$ref']}) + del json_schema['$ref'] + + return json_schema + for k, v in list(json_schema.items()): + if k == '$ref': + continue + if k in referenced_json_schema and referenced_json_schema[k] == v: + del json_schema[k] # redundant key + if len(json_schema) > 1: + # There is a remaining "override" key, so we need to move $ref out of the top level + json_ref = JsonRef(json_schema['$ref']) + del json_schema['$ref'] + assert 'allOf' not in json_schema # this should never happen, but just in case + json_schema['allOf'] = [{'$ref': json_ref}] + + return json_schema + + def get_schema_from_definitions(self, json_ref: JsonRef) -> JsonSchemaValue | None: + def_ref = self.json_to_defs_refs[json_ref] + if def_ref in self._core_defs_invalid_for_json_schema: + raise self._core_defs_invalid_for_json_schema[def_ref] + return self.definitions.get(def_ref, None) + + def encode_default(self, dft: Any) -> Any: + """Encode a default value to a JSON-serializable value. + + This is used to encode default values for fields in the generated JSON schema. + + Args: + dft: The default value to encode. + + Returns: + The encoded default value. + """ + from .type_adapter import TypeAdapter, _type_has_config + + config = self._config + try: + default = ( + dft + if _type_has_config(type(dft)) + else TypeAdapter(type(dft), config=config.config_dict).dump_python(dft, mode='json') + ) + except PydanticSchemaGenerationError: + raise pydantic_core.PydanticSerializationError(f'Unable to encode default value {dft}') + + return pydantic_core.to_jsonable_python( + default, + timedelta_mode=config.ser_json_timedelta, + bytes_mode=config.ser_json_bytes, + ) + + def update_with_validations( + self, json_schema: JsonSchemaValue, core_schema: CoreSchema, mapping: dict[str, str] + ) -> None: + """Update the json_schema with the corresponding validations specified in the core_schema, + using the provided mapping to translate keys in core_schema to the appropriate keys for a JSON schema. + + Args: + json_schema: The JSON schema to update. + core_schema: The core schema to get the validations from. + mapping: A mapping from core_schema attribute names to the corresponding JSON schema attribute names. + """ + for core_key, json_schema_key in mapping.items(): + if core_key in core_schema: + json_schema[json_schema_key] = core_schema[core_key] + + class ValidationsMapping: + """This class just contains mappings from core_schema attribute names to the corresponding + JSON schema attribute names. While I suspect it is unlikely to be necessary, you can in + principle override this class in a subclass of GenerateJsonSchema (by inheriting from + GenerateJsonSchema.ValidationsMapping) to change these mappings. + """ + + numeric = { + 'multiple_of': 'multipleOf', + 'le': 'maximum', + 'ge': 'minimum', + 'lt': 'exclusiveMaximum', + 'gt': 'exclusiveMinimum', + } + bytes = { + 'min_length': 'minLength', + 'max_length': 'maxLength', + } + string = { + 'min_length': 'minLength', + 'max_length': 'maxLength', + 'pattern': 'pattern', + } + array = { + 'min_length': 'minItems', + 'max_length': 'maxItems', + } + object = { + 'min_length': 'minProperties', + 'max_length': 'maxProperties', + } + date = { + 'le': 'maximum', + 'ge': 'minimum', + 'lt': 'exclusiveMaximum', + 'gt': 'exclusiveMinimum', + } + + def get_flattened_anyof(self, schemas: list[JsonSchemaValue]) -> JsonSchemaValue: + members = [] + for schema in schemas: + if len(schema) == 1 and 'anyOf' in schema: + members.extend(schema['anyOf']) + else: + members.append(schema) + members = _deduplicate_schemas(members) + if len(members) == 1: + return members[0] + return {'anyOf': members} + + def get_json_ref_counts(self, json_schema: JsonSchemaValue) -> dict[JsonRef, int]: + """Get all values corresponding to the key '$ref' anywhere in the json_schema.""" + json_refs: dict[JsonRef, int] = Counter() + + def _add_json_refs(schema: Any) -> None: + if isinstance(schema, dict): + if '$ref' in schema: + json_ref = JsonRef(schema['$ref']) + if not isinstance(json_ref, str): + return # in this case, '$ref' might have been the name of a property + already_visited = json_ref in json_refs + json_refs[json_ref] += 1 + if already_visited: + return # prevent recursion on a definition that was already visited + defs_ref = self.json_to_defs_refs[json_ref] + if defs_ref in self._core_defs_invalid_for_json_schema: + raise self._core_defs_invalid_for_json_schema[defs_ref] + _add_json_refs(self.definitions[defs_ref]) + + for v in schema.values(): + _add_json_refs(v) + elif isinstance(schema, list): + for v in schema: + _add_json_refs(v) + + _add_json_refs(json_schema) + return json_refs + + def handle_invalid_for_json_schema(self, schema: CoreSchemaOrField, error_info: str) -> JsonSchemaValue: + raise PydanticInvalidForJsonSchema(f'Cannot generate a JsonSchema for {error_info}') + + def emit_warning(self, kind: JsonSchemaWarningKind, detail: str) -> None: + """This method simply emits PydanticJsonSchemaWarnings based on handling in the `warning_message` method.""" + message = self.render_warning_message(kind, detail) + if message is not None: + warnings.warn(message, PydanticJsonSchemaWarning) + + def render_warning_message(self, kind: JsonSchemaWarningKind, detail: str) -> str | None: + """This method is responsible for ignoring warnings as desired, and for formatting the warning messages. + + You can override the value of `ignored_warning_kinds` in a subclass of GenerateJsonSchema + to modify what warnings are generated. If you want more control, you can override this method; + just return None in situations where you don't want warnings to be emitted. + + Args: + kind: The kind of warning to render. It can be one of the following: + + - 'skipped-choice': A choice field was skipped because it had no valid choices. + - 'non-serializable-default': A default value was skipped because it was not JSON-serializable. + detail: A string with additional details about the warning. + + Returns: + The formatted warning message, or `None` if no warning should be emitted. + """ + if kind in self.ignored_warning_kinds: + return None + return f'{detail} [{kind}]' + + def _build_definitions_remapping(self) -> _DefinitionsRemapping: + defs_to_json: dict[DefsRef, JsonRef] = {} + for defs_refs in self._prioritized_defsref_choices.values(): + for defs_ref in defs_refs: + json_ref = JsonRef(self.ref_template.format(model=defs_ref)) + defs_to_json[defs_ref] = json_ref + + return _DefinitionsRemapping.from_prioritized_choices( + self._prioritized_defsref_choices, defs_to_json, self.definitions + ) + + def _garbage_collect_definitions(self, schema: JsonSchemaValue) -> None: + visited_defs_refs: set[DefsRef] = set() + unvisited_json_refs = _get_all_json_refs(schema) + while unvisited_json_refs: + next_json_ref = unvisited_json_refs.pop() + next_defs_ref = self.json_to_defs_refs[next_json_ref] + if next_defs_ref in visited_defs_refs: + continue + visited_defs_refs.add(next_defs_ref) + unvisited_json_refs.update(_get_all_json_refs(self.definitions[next_defs_ref])) + + self.definitions = {k: v for k, v in self.definitions.items() if k in visited_defs_refs} + + +# ##### Start JSON Schema Generation Functions ##### + + +def model_json_schema( + cls: type[BaseModel] | type[PydanticDataclass], + by_alias: bool = True, + ref_template: str = DEFAULT_REF_TEMPLATE, + schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, + mode: JsonSchemaMode = 'validation', +) -> dict[str, Any]: + """Utility function to generate a JSON Schema for a model. + + Args: + cls: The model class to generate a JSON Schema for. + by_alias: If `True` (the default), fields will be serialized according to their alias. + If `False`, fields will be serialized according to their attribute name. + ref_template: The template to use for generating JSON Schema references. + schema_generator: The class to use for generating the JSON Schema. + mode: The mode to use for generating the JSON Schema. It can be one of the following: + + - 'validation': Generate a JSON Schema for validating data. + - 'serialization': Generate a JSON Schema for serializing data. + + Returns: + The generated JSON Schema. + """ + from .main import BaseModel + + schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) + if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer): + cls.__pydantic_validator__.rebuild() + + if cls is BaseModel: + raise AttributeError('model_json_schema() must be called on a subclass of BaseModel, not BaseModel itself.') + assert '__pydantic_core_schema__' in cls.__dict__, 'this is a bug! please report it' + return schema_generator_instance.generate(cls.__pydantic_core_schema__, mode=mode) + + +def models_json_schema( + models: Sequence[tuple[type[BaseModel] | type[PydanticDataclass], JsonSchemaMode]], + *, + by_alias: bool = True, + title: str | None = None, + description: str | None = None, + ref_template: str = DEFAULT_REF_TEMPLATE, + schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, +) -> tuple[dict[tuple[type[BaseModel] | type[PydanticDataclass], JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]: + """Utility function to generate a JSON Schema for multiple models. + + Args: + models: A sequence of tuples of the form (model, mode). + by_alias: Whether field aliases should be used as keys in the generated JSON Schema. + title: The title of the generated JSON Schema. + description: The description of the generated JSON Schema. + ref_template: The reference template to use for generating JSON Schema references. + schema_generator: The schema generator to use for generating the JSON Schema. + + Returns: + A tuple where: + - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and + whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have + JsonRef references to definitions that are defined in the second returned element.) + - The second element is a JSON schema containing all definitions referenced in the first returned + element, along with the optional title and description keys. + """ + for cls, _ in models: + if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer): + cls.__pydantic_validator__.rebuild() + + instance = schema_generator(by_alias=by_alias, ref_template=ref_template) + inputs = [(m, mode, m.__pydantic_core_schema__) for m, mode in models] + json_schemas_map, definitions = instance.generate_definitions(inputs) + + json_schema: dict[str, Any] = {} + if definitions: + json_schema['$defs'] = definitions + if title: + json_schema['title'] = title + if description: + json_schema['description'] = description + + return json_schemas_map, json_schema + + +# ##### End JSON Schema Generation Functions ##### + + +_HashableJsonValue: TypeAlias = Union[ + int, float, str, bool, None, Tuple['_HashableJsonValue', ...], Tuple[Tuple[str, '_HashableJsonValue'], ...] +] + + +def _deduplicate_schemas(schemas: Iterable[JsonDict]) -> list[JsonDict]: + return list({_make_json_hashable(schema): schema for schema in schemas}.values()) + + +def _make_json_hashable(value: JsonValue) -> _HashableJsonValue: + if isinstance(value, dict): + return tuple(sorted((k, _make_json_hashable(v)) for k, v in value.items())) + elif isinstance(value, list): + return tuple(_make_json_hashable(v) for v in value) + else: + return value + + +def _sort_json_schema(value: JsonSchemaValue, parent_key: str | None = None) -> JsonSchemaValue: + if isinstance(value, dict): + sorted_dict: dict[str, JsonSchemaValue] = {} + keys = value.keys() + if (parent_key != 'properties') and (parent_key != 'default'): + keys = sorted(keys) + for key in keys: + sorted_dict[key] = _sort_json_schema(value[key], parent_key=key) + return sorted_dict + elif isinstance(value, list): + sorted_list: list[JsonSchemaValue] = [] + for item in value: # type: ignore + sorted_list.append(_sort_json_schema(item, parent_key)) + return sorted_list # type: ignore + else: + return value + + +@dataclasses.dataclass(**_internal_dataclass.slots_true) +class WithJsonSchema: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/json_schema/#withjsonschema-annotation + + Add this as an annotation on a field to override the (base) JSON schema that would be generated for that field. + This provides a way to set a JSON schema for types that would otherwise raise errors when producing a JSON schema, + such as Callable, or types that have an is-instance core schema, without needing to go so far as creating a + custom subclass of pydantic.json_schema.GenerateJsonSchema. + Note that any _modifications_ to the schema that would normally be made (such as setting the title for model fields) + will still be performed. + + If `mode` is set this will only apply to that schema generation mode, allowing you + to set different json schemas for validation and serialization. + """ + + json_schema: JsonSchemaValue | None + mode: Literal['validation', 'serialization'] | None = None + + def __get_pydantic_json_schema__( + self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + mode = self.mode or handler.mode + if mode != handler.mode: + return handler(core_schema) + if self.json_schema is None: + # This exception is handled in pydantic.json_schema.GenerateJsonSchema._named_required_fields_schema + raise PydanticOmit + else: + return self.json_schema + + def __hash__(self) -> int: + return hash(type(self.mode)) + + +@dataclasses.dataclass(**_internal_dataclass.slots_true) +class Examples: + """Add examples to a JSON schema. + + Examples should be a map of example names (strings) + to example values (any valid JSON). + + If `mode` is set this will only apply to that schema generation mode, + allowing you to add different examples for validation and serialization. + """ + + examples: dict[str, Any] + mode: Literal['validation', 'serialization'] | None = None + + def __get_pydantic_json_schema__( + self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + mode = self.mode or handler.mode + json_schema = handler(core_schema) + if mode != handler.mode: + return json_schema + examples = json_schema.get('examples', {}) + examples.update(to_jsonable_python(self.examples)) + json_schema['examples'] = examples + return json_schema + + def __hash__(self) -> int: + return hash(type(self.mode)) + + +def _get_all_json_refs(item: Any) -> set[JsonRef]: + """Get all the definitions references from a JSON schema.""" + refs: set[JsonRef] = set() + if isinstance(item, dict): + for key, value in item.items(): + if key == '$ref' and isinstance(value, str): + # the isinstance check ensures that '$ref' isn't the name of a property, etc. + refs.add(JsonRef(value)) + elif isinstance(value, dict): + refs.update(_get_all_json_refs(value)) + elif isinstance(value, list): + for item in value: + refs.update(_get_all_json_refs(item)) + elif isinstance(item, list): + for item in item: + refs.update(_get_all_json_refs(item)) + return refs + + +AnyType = TypeVar('AnyType') + +if TYPE_CHECKING: + SkipJsonSchema = Annotated[AnyType, ...] +else: + + @dataclasses.dataclass(**_internal_dataclass.slots_true) + class SkipJsonSchema: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/json_schema/#skipjsonschema-annotation + + Add this as an annotation on a field to skip generating a JSON schema for that field. + + Example: + ```py + from typing import Union + + from pydantic import BaseModel + from pydantic.json_schema import SkipJsonSchema + + from pprint import pprint + + + class Model(BaseModel): + a: Union[int, None] = None # (1)! + b: Union[int, SkipJsonSchema[None]] = None # (2)! + c: SkipJsonSchema[Union[int, None]] = None # (3)! + + + pprint(Model.model_json_schema()) + ''' + { + 'properties': { + 'a': { + 'anyOf': [ + {'type': 'integer'}, + {'type': 'null'} + ], + 'default': None, + 'title': 'A' + }, + 'b': { + 'default': None, + 'title': 'B', + 'type': 'integer' + } + }, + 'title': 'Model', + 'type': 'object' + } + ''' + ``` + + 1. The integer and null types are both included in the schema for `a`. + 2. The integer type is the only type included in the schema for `b`. + 3. The entirety of the `c` field is omitted from the schema. + """ + + def __class_getitem__(cls, item: AnyType) -> AnyType: + return Annotated[item, cls()] + + def __get_pydantic_json_schema__( + self, core_schema: CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + raise PydanticOmit + + def __hash__(self) -> int: + return hash(type(self)) + + +def _get_typed_dict_cls(schema: core_schema.TypedDictSchema) -> type[Any] | None: + metadata = _core_metadata.CoreMetadataHandler(schema).metadata + cls = metadata.get('pydantic_typed_dict_cls') + return cls + + +def _get_typed_dict_config(cls: type[Any] | None) -> ConfigDict: + if cls is not None: + try: + return _decorators.get_attribute_from_bases(cls, '__pydantic_config__') + except AttributeError: + pass + return {} diff --git a/venv/lib/python3.10/site-packages/pydantic/main.py b/venv/lib/python3.10/site-packages/pydantic/main.py new file mode 100644 index 0000000000000000000000000000000000000000..5d87f7c917a3835fbc22d9793252a120d1bde2a0 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/main.py @@ -0,0 +1,1560 @@ +"""Logic for creating models.""" +from __future__ import annotations as _annotations + +import operator +import sys +import types +import typing +import warnings +from copy import copy, deepcopy +from typing import Any, ClassVar, Dict, Generator, Literal, Set, Tuple, TypeVar, Union + +import pydantic_core +import typing_extensions +from pydantic_core import PydanticUndefined +from typing_extensions import TypeAlias + +from ._internal import ( + _config, + _decorators, + _fields, + _forward_ref, + _generics, + _mock_val_ser, + _model_construction, + _repr, + _typing_extra, + _utils, +) +from ._migration import getattr_migration +from .aliases import AliasChoices, AliasPath +from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler +from .config import ConfigDict +from .errors import PydanticUndefinedAnnotation, PydanticUserError +from .json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue, model_json_schema +from .warnings import PydanticDeprecatedSince20 + +# Always define certain types that are needed to resolve method type hints/annotations +# (even when not type checking) via typing.get_type_hints. +Model = TypeVar('Model', bound='BaseModel') +TupleGenerator = Generator[Tuple[str, Any], None, None] +# should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope +IncEx: TypeAlias = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any], None] + + +if typing.TYPE_CHECKING: + from inspect import Signature + from pathlib import Path + + from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator + from typing_extensions import Unpack + + from ._internal._utils import AbstractSetIntStr, MappingIntStrAny + from .deprecated.parse import Protocol as DeprecatedParseProtocol + from .fields import ComputedFieldInfo, FieldInfo, ModelPrivateAttr + from .fields import Field as _Field +else: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + +__all__ = 'BaseModel', 'create_model' + +_object_setattr = _model_construction.object_setattr + + +class BaseModel(metaclass=_model_construction.ModelMetaclass): + """Usage docs: https://docs.pydantic.dev/2.7/concepts/models/ + + A base class for creating Pydantic models. + + Attributes: + __class_vars__: The names of classvars defined on the model. + __private_attributes__: Metadata about the private attributes of the model. + __signature__: The signature for instantiating the model. + + __pydantic_complete__: Whether model building is completed, or if there are still undefined fields. + __pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer. + __pydantic_custom_init__: Whether the model has a custom `__init__` function. + __pydantic_decorators__: Metadata containing the decorators defined on the model. + This replaces `Model.__validators__` and `Model.__root_validators__` from Pydantic V1. + __pydantic_generic_metadata__: Metadata for generic models; contains data used for a similar purpose to + __args__, __origin__, __parameters__ in typing-module generics. May eventually be replaced by these. + __pydantic_parent_namespace__: Parent namespace of the model, used for automatic rebuilding of models. + __pydantic_post_init__: The name of the post-init method for the model, if defined. + __pydantic_root_model__: Whether the model is a `RootModel`. + __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the model. + __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the model. + + __pydantic_extra__: An instance attribute with the values of extra fields from validation when + `model_config['extra'] == 'allow'`. + __pydantic_fields_set__: An instance attribute with the names of fields explicitly set. + __pydantic_private__: Instance attribute with the values of private attributes set on the model instance. + """ + + if typing.TYPE_CHECKING: + # Here we provide annotations for the attributes of BaseModel. + # Many of these are populated by the metaclass, which is why this section is in a `TYPE_CHECKING` block. + # However, for the sake of easy review, we have included type annotations of all class and instance attributes + # of `BaseModel` here: + + # Class attributes + model_config: ClassVar[ConfigDict] + """ + Configuration for the model, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict]. + """ + + model_fields: ClassVar[dict[str, FieldInfo]] + """ + Metadata about the fields defined on the model, + mapping of field names to [`FieldInfo`][pydantic.fields.FieldInfo]. + + This replaces `Model.__fields__` from Pydantic V1. + """ + + model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]] + """A dictionary of computed field names and their corresponding `ComputedFieldInfo` objects.""" + + __class_vars__: ClassVar[set[str]] + __private_attributes__: ClassVar[dict[str, ModelPrivateAttr]] + __signature__: ClassVar[Signature] + + __pydantic_complete__: ClassVar[bool] + __pydantic_core_schema__: ClassVar[CoreSchema] + __pydantic_custom_init__: ClassVar[bool] + __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] + __pydantic_generic_metadata__: ClassVar[_generics.PydanticGenericMetadata] + __pydantic_parent_namespace__: ClassVar[dict[str, Any] | None] + __pydantic_post_init__: ClassVar[None | Literal['model_post_init']] + __pydantic_root_model__: ClassVar[bool] + __pydantic_serializer__: ClassVar[SchemaSerializer] + __pydantic_validator__: ClassVar[SchemaValidator] + + # Instance attributes + # Note: we use the non-existent kwarg `init=False` in pydantic.fields.Field below so that @dataclass_transform + # doesn't think these are valid as keyword arguments to the class initializer. + __pydantic_extra__: dict[str, Any] | None = _Field(init=False) # type: ignore + __pydantic_fields_set__: set[str] = _Field(init=False) # type: ignore + __pydantic_private__: dict[str, Any] | None = _Field(init=False) # type: ignore + + else: + # `model_fields` and `__pydantic_decorators__` must be set for + # pydantic._internal._generate_schema.GenerateSchema.model_schema to work for a plain BaseModel annotation + model_fields = {} + model_computed_fields = {} + + __pydantic_decorators__ = _decorators.DecoratorInfos() + __pydantic_parent_namespace__ = None + # Prevent `BaseModel` from being instantiated directly: + __pydantic_validator__ = _mock_val_ser.MockValSer( + 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', + val_or_ser='validator', + code='base-model-instantiated', + ) + __pydantic_serializer__ = _mock_val_ser.MockValSer( + 'Pydantic models should inherit from BaseModel, BaseModel cannot be instantiated directly', + val_or_ser='serializer', + code='base-model-instantiated', + ) + + __slots__ = '__dict__', '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__' + + model_config = ConfigDict() + __pydantic_complete__ = False + __pydantic_root_model__ = False + + def __init__(self, /, **data: Any) -> None: # type: ignore + """Create a new model by parsing and validating input data from keyword arguments. + + Raises [`ValidationError`][pydantic_core.ValidationError] if the input data cannot be + validated to form a valid model. + + `self` is explicitly positional-only to allow `self` as a field name. + """ + # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks + __tracebackhide__ = True + self.__pydantic_validator__.validate_python(data, self_instance=self) + + # The following line sets a flag that we use to determine when `__init__` gets overridden by the user + __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess] + + @property + def model_extra(self) -> dict[str, Any] | None: + """Get extra fields set during validation. + + Returns: + A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`. + """ + return self.__pydantic_extra__ + + @property + def model_fields_set(self) -> set[str]: + """Returns the set of fields that have been explicitly set on this model instance. + + Returns: + A set of strings representing the fields that have been set, + i.e. that were not filled from defaults. + """ + return self.__pydantic_fields_set__ + + @classmethod + def model_construct(cls: type[Model], _fields_set: set[str] | None = None, **values: Any) -> Model: # noqa: C901 + """Creates a new instance of the `Model` class with validated data. + + Creates a new model setting `__dict__` and `__pydantic_fields_set__` from trusted or pre-validated data. + Default values are respected, but no other validation is performed. + + !!! note + `model_construct()` generally respects the `model_config.extra` setting on the provided model. + That is, if `model_config.extra == 'allow'`, then all extra passed values are added to the model instance's `__dict__` + and `__pydantic_extra__` fields. If `model_config.extra == 'ignore'` (the default), then all extra passed values are ignored. + Because no validation is performed with a call to `model_construct()`, having `model_config.extra == 'forbid'` does not result in + an error if extra values are passed, but they will be ignored. + + Args: + _fields_set: The set of field names accepted for the Model instance. + values: Trusted or pre-validated data dictionary. + + Returns: + A new instance of the `Model` class with validated data. + """ + m = cls.__new__(cls) + fields_values: dict[str, Any] = {} + fields_set = set() + + for name, field in cls.model_fields.items(): + if field.alias is not None and field.alias in values: + fields_values[name] = values.pop(field.alias) + fields_set.add(name) + + if (name not in fields_set) and (field.validation_alias is not None): + validation_aliases: list[str | AliasPath] = ( + field.validation_alias.choices + if isinstance(field.validation_alias, AliasChoices) + else [field.validation_alias] + ) + + for alias in validation_aliases: + if isinstance(alias, str) and alias in values: + fields_values[name] = values.pop(alias) + fields_set.add(name) + break + elif isinstance(alias, AliasPath): + value = alias.search_dict_for_path(values) + if value is not PydanticUndefined: + fields_values[name] = value + fields_set.add(name) + break + + if name not in fields_set: + if name in values: + fields_values[name] = values.pop(name) + fields_set.add(name) + elif not field.is_required(): + fields_values[name] = field.get_default(call_default_factory=True) + if _fields_set is None: + _fields_set = fields_set + + _extra: dict[str, Any] | None = ( + {k: v for k, v in values.items()} if cls.model_config.get('extra') == 'allow' else None + ) + _object_setattr(m, '__dict__', fields_values) + _object_setattr(m, '__pydantic_fields_set__', _fields_set) + if not cls.__pydantic_root_model__: + _object_setattr(m, '__pydantic_extra__', _extra) + + if cls.__pydantic_post_init__: + m.model_post_init(None) + # update private attributes with values set + if hasattr(m, '__pydantic_private__') and m.__pydantic_private__ is not None: + for k, v in values.items(): + if k in m.__private_attributes__: + m.__pydantic_private__[k] = v + + elif not cls.__pydantic_root_model__: + # Note: if there are any private attributes, cls.__pydantic_post_init__ would exist + # Since it doesn't, that means that `__pydantic_private__` should be set to None + _object_setattr(m, '__pydantic_private__', None) + + return m + + def model_copy(self: Model, *, update: dict[str, Any] | None = None, deep: bool = False) -> Model: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#model_copy + + Returns a copy of the model. + + Args: + update: Values to change/add in the new model. Note: the data is not validated + before creating the new model. You should trust this data. + deep: Set to `True` to make a deep copy of the model. + + Returns: + New model instance. + """ + copied = self.__deepcopy__() if deep else self.__copy__() + if update: + if self.model_config.get('extra') == 'allow': + for k, v in update.items(): + if k in self.model_fields: + copied.__dict__[k] = v + else: + if copied.__pydantic_extra__ is None: + copied.__pydantic_extra__ = {} + copied.__pydantic_extra__[k] = v + else: + copied.__dict__.update(update) + copied.__pydantic_fields_set__.update(update.keys()) + return copied + + def model_dump( + self, + *, + mode: Literal['json', 'python'] | str = 'python', + include: IncEx = None, + exclude: IncEx = None, + context: dict[str, Any] | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal['none', 'warn', 'error'] = True, + serialize_as_any: bool = False, + ) -> dict[str, Any]: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump + + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + Args: + mode: The mode in which `to_python` should run. + If mode is 'json', the output will only contain JSON serializable types. + If mode is 'python', the output may contain non-JSON-serializable Python objects. + include: A set of fields to include in the output. + exclude: A set of fields to exclude from the output. + context: Additional context to pass to the serializer. + by_alias: Whether to use the field's alias in the dictionary key if defined. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value. + exclude_none: Whether to exclude fields that have a value of `None`. + round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. + warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, + "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. + serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. + + Returns: + A dictionary representation of the model. + """ + return self.__pydantic_serializer__.to_python( + self, + mode=mode, + by_alias=by_alias, + include=include, + exclude=exclude, + context=context, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + round_trip=round_trip, + warnings=warnings, + serialize_as_any=serialize_as_any, + ) + + def model_dump_json( + self, + *, + indent: int | None = None, + include: IncEx = None, + exclude: IncEx = None, + context: dict[str, Any] | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal['none', 'warn', 'error'] = True, + serialize_as_any: bool = False, + ) -> str: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/serialization/#modelmodel_dump_json + + Generates a JSON representation of the model using Pydantic's `to_json` method. + + Args: + indent: Indentation to use in the JSON output. If None is passed, the output will be compact. + include: Field(s) to include in the JSON output. + exclude: Field(s) to exclude from the JSON output. + context: Additional context to pass to the serializer. + by_alias: Whether to serialize using field aliases. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value. + exclude_none: Whether to exclude fields that have a value of `None`. + round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T]. + warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, + "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. + serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. + + Returns: + A JSON string representation of the model. + """ + return self.__pydantic_serializer__.to_json( + self, + indent=indent, + include=include, + exclude=exclude, + context=context, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + round_trip=round_trip, + warnings=warnings, + serialize_as_any=serialize_as_any, + ).decode() + + @classmethod + def model_json_schema( + cls, + by_alias: bool = True, + ref_template: str = DEFAULT_REF_TEMPLATE, + schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, + mode: JsonSchemaMode = 'validation', + ) -> dict[str, Any]: + """Generates a JSON schema for a model class. + + Args: + by_alias: Whether to use attribute aliases or not. + ref_template: The reference template. + schema_generator: To override the logic used to generate the JSON schema, as a subclass of + `GenerateJsonSchema` with your desired modifications + mode: The mode in which to generate the schema. + + Returns: + The JSON schema for the given model class. + """ + return model_json_schema( + cls, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator, mode=mode + ) + + @classmethod + def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str: + """Compute the class name for parametrizations of generic classes. + + This method can be overridden to achieve a custom naming scheme for generic BaseModels. + + Args: + params: Tuple of types of the class. Given a generic class + `Model` with 2 type variables and a concrete model `Model[str, int]`, + the value `(str, int)` would be passed to `params`. + + Returns: + String representing the new class where `params` are passed to `cls` as type variables. + + Raises: + TypeError: Raised when trying to generate concrete names for non-generic models. + """ + if not issubclass(cls, typing.Generic): + raise TypeError('Concrete names should only be generated for generic models.') + + # Any strings received should represent forward references, so we handle them specially below. + # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future, + # we may be able to remove this special case. + param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params] + params_component = ', '.join(param_names) + return f'{cls.__name__}[{params_component}]' + + def model_post_init(self, __context: Any) -> None: + """Override this method to perform additional initialization after `__init__` and `model_construct`. + This is useful if you want to do some validation that requires the entire model to be initialized. + """ + pass + + @classmethod + def model_rebuild( + cls, + *, + force: bool = False, + raise_errors: bool = True, + _parent_namespace_depth: int = 2, + _types_namespace: dict[str, Any] | None = None, + ) -> bool | None: + """Try to rebuild the pydantic-core schema for the model. + + This may be necessary when one of the annotations is a ForwardRef which could not be resolved during + the initial attempt to build the schema, and automatic rebuilding fails. + + Args: + force: Whether to force the rebuilding of the model schema, defaults to `False`. + raise_errors: Whether to raise errors, defaults to `True`. + _parent_namespace_depth: The depth level of the parent namespace, defaults to 2. + _types_namespace: The types namespace, defaults to `None`. + + Returns: + Returns `None` if the schema is already "complete" and rebuilding was not required. + If rebuilding _was_ required, returns `True` if rebuilding was successful, otherwise `False`. + """ + if not force and cls.__pydantic_complete__: + return None + else: + if '__pydantic_core_schema__' in cls.__dict__: + delattr(cls, '__pydantic_core_schema__') # delete cached value to ensure full rebuild happens + if _types_namespace is not None: + types_namespace: dict[str, Any] | None = _types_namespace.copy() + else: + if _parent_namespace_depth > 0: + frame_parent_ns = _typing_extra.parent_frame_namespace(parent_depth=_parent_namespace_depth) or {} + cls_parent_ns = ( + _model_construction.unpack_lenient_weakvaluedict(cls.__pydantic_parent_namespace__) or {} + ) + types_namespace = {**cls_parent_ns, **frame_parent_ns} + cls.__pydantic_parent_namespace__ = _model_construction.build_lenient_weakvaluedict(types_namespace) + else: + types_namespace = _model_construction.unpack_lenient_weakvaluedict( + cls.__pydantic_parent_namespace__ + ) + + types_namespace = _typing_extra.get_cls_types_namespace(cls, types_namespace) + + # manually override defer_build so complete_model_class doesn't skip building the model again + config = {**cls.model_config, 'defer_build': False} + return _model_construction.complete_model_class( + cls, + cls.__name__, + _config.ConfigWrapper(config, check=False), + raise_errors=raise_errors, + types_namespace=types_namespace, + ) + + @classmethod + def model_validate( + cls: type[Model], + obj: Any, + *, + strict: bool | None = None, + from_attributes: bool | None = None, + context: dict[str, Any] | None = None, + ) -> Model: + """Validate a pydantic model instance. + + Args: + obj: The object to validate. + strict: Whether to enforce types strictly. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator. + + Raises: + ValidationError: If the object could not be validated. + + Returns: + The validated model instance. + """ + # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks + __tracebackhide__ = True + return cls.__pydantic_validator__.validate_python( + obj, strict=strict, from_attributes=from_attributes, context=context + ) + + @classmethod + def model_validate_json( + cls: type[Model], + json_data: str | bytes | bytearray, + *, + strict: bool | None = None, + context: dict[str, Any] | None = None, + ) -> Model: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + + Validate the given JSON data against the Pydantic model. + + Args: + json_data: The JSON data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator. + + Returns: + The validated Pydantic model. + + Raises: + ValueError: If `json_data` is not a JSON string. + """ + # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks + __tracebackhide__ = True + return cls.__pydantic_validator__.validate_json(json_data, strict=strict, context=context) + + @classmethod + def model_validate_strings( + cls: type[Model], + obj: Any, + *, + strict: bool | None = None, + context: dict[str, Any] | None = None, + ) -> Model: + """Validate the given object contains string data against the Pydantic model. + + Args: + obj: The object contains string data to validate. + strict: Whether to enforce types strictly. + context: Extra variables to pass to the validator. + + Returns: + The validated Pydantic model. + """ + # `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks + __tracebackhide__ = True + return cls.__pydantic_validator__.validate_strings(obj, strict=strict, context=context) + + @classmethod + def __get_pydantic_core_schema__(cls, source: type[BaseModel], handler: GetCoreSchemaHandler, /) -> CoreSchema: + """Hook into generating the model's CoreSchema. + + Args: + source: The class we are generating a schema for. + This will generally be the same as the `cls` argument if this is a classmethod. + handler: Call into Pydantic's internal JSON schema generation. + A callable that calls into Pydantic's internal CoreSchema generation logic. + + Returns: + A `pydantic-core` `CoreSchema`. + """ + # Only use the cached value from this _exact_ class; we don't want one from a parent class + # This is why we check `cls.__dict__` and don't use `cls.__pydantic_core_schema__` or similar. + if '__pydantic_core_schema__' in cls.__dict__: + # Due to the way generic classes are built, it's possible that an invalid schema may be temporarily + # set on generic classes. I think we could resolve this to ensure that we get proper schema caching + # for generics, but for simplicity for now, we just always rebuild if the class has a generic origin. + if not cls.__pydantic_generic_metadata__['origin']: + return cls.__pydantic_core_schema__ + + return handler(source) + + @classmethod + def __get_pydantic_json_schema__( + cls, + core_schema: CoreSchema, + handler: GetJsonSchemaHandler, + /, + ) -> JsonSchemaValue: + """Hook into generating the model's JSON schema. + + Args: + core_schema: A `pydantic-core` CoreSchema. + You can ignore this argument and call the handler with a new CoreSchema, + wrap this CoreSchema (`{'type': 'nullable', 'schema': current_schema}`), + or just call the handler with the original schema. + handler: Call into Pydantic's internal JSON schema generation. + This will raise a `pydantic.errors.PydanticInvalidForJsonSchema` if JSON schema + generation fails. + Since this gets called by `BaseModel.model_json_schema` you can override the + `schema_generator` argument to that function to change JSON schema generation globally + for a type. + + Returns: + A JSON schema, as a Python object. + """ + return handler(core_schema) + + @classmethod + def __pydantic_init_subclass__(cls, **kwargs: Any) -> None: + """This is intended to behave just like `__init_subclass__`, but is called by `ModelMetaclass` + only after the class is actually fully initialized. In particular, attributes like `model_fields` will + be present when this is called. + + This is necessary because `__init_subclass__` will always be called by `type.__new__`, + and it would require a prohibitively large refactor to the `ModelMetaclass` to ensure that + `type.__new__` was called in such a manner that the class would already be sufficiently initialized. + + This will receive the same `kwargs` that would be passed to the standard `__init_subclass__`, namely, + any kwargs passed to the class definition that aren't used internally by pydantic. + + Args: + **kwargs: Any keyword arguments passed to the class definition that aren't used internally + by pydantic. + """ + pass + + def __class_getitem__( + cls, typevar_values: type[Any] | tuple[type[Any], ...] + ) -> type[BaseModel] | _forward_ref.PydanticRecursiveRef: + cached = _generics.get_cached_generic_type_early(cls, typevar_values) + if cached is not None: + return cached + + if cls is BaseModel: + raise TypeError('Type parameters should be placed on typing.Generic, not BaseModel') + if not hasattr(cls, '__parameters__'): + raise TypeError(f'{cls} cannot be parametrized because it does not inherit from typing.Generic') + if not cls.__pydantic_generic_metadata__['parameters'] and typing.Generic not in cls.__bases__: + raise TypeError(f'{cls} is not a generic class') + + if not isinstance(typevar_values, tuple): + typevar_values = (typevar_values,) + _generics.check_parameters_count(cls, typevar_values) + + # Build map from generic typevars to passed params + typevars_map: dict[_typing_extra.TypeVarType, type[Any]] = dict( + zip(cls.__pydantic_generic_metadata__['parameters'], typevar_values) + ) + + if _utils.all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: + submodel = cls # if arguments are equal to parameters it's the same object + _generics.set_cached_generic_type(cls, typevar_values, submodel) + else: + parent_args = cls.__pydantic_generic_metadata__['args'] + if not parent_args: + args = typevar_values + else: + args = tuple(_generics.replace_types(arg, typevars_map) for arg in parent_args) + + origin = cls.__pydantic_generic_metadata__['origin'] or cls + model_name = origin.model_parametrized_name(args) + params = tuple( + {param: None for param in _generics.iter_contained_typevars(typevars_map.values())} + ) # use dict as ordered set + + with _generics.generic_recursion_self_type(origin, args) as maybe_self_type: + if maybe_self_type is not None: + return maybe_self_type + + cached = _generics.get_cached_generic_type_late(cls, typevar_values, origin, args) + if cached is not None: + return cached + + # Attempt to rebuild the origin in case new types have been defined + try: + # depth 3 gets you above this __class_getitem__ call + origin.model_rebuild(_parent_namespace_depth=3) + except PydanticUndefinedAnnotation: + # It's okay if it fails, it just means there are still undefined types + # that could be evaluated later. + # TODO: Make sure validation fails if there are still undefined types, perhaps using MockValidator + pass + + submodel = _generics.create_generic_submodel(model_name, origin, args, params) + + # Update cache + _generics.set_cached_generic_type(cls, typevar_values, submodel, origin, args) + + return submodel + + def __copy__(self: Model) -> Model: + """Returns a shallow copy of the model.""" + cls = type(self) + m = cls.__new__(cls) + _object_setattr(m, '__dict__', copy(self.__dict__)) + _object_setattr(m, '__pydantic_extra__', copy(self.__pydantic_extra__)) + _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) + + if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None: + _object_setattr(m, '__pydantic_private__', None) + else: + _object_setattr( + m, + '__pydantic_private__', + {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, + ) + + return m + + def __deepcopy__(self: Model, memo: dict[int, Any] | None = None) -> Model: + """Returns a deep copy of the model.""" + cls = type(self) + m = cls.__new__(cls) + _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo)) + _object_setattr(m, '__pydantic_extra__', deepcopy(self.__pydantic_extra__, memo=memo)) + # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str], + # and attempting a deepcopy would be marginally slower. + _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) + + if not hasattr(self, '__pydantic_private__') or self.__pydantic_private__ is None: + _object_setattr(m, '__pydantic_private__', None) + else: + _object_setattr( + m, + '__pydantic_private__', + deepcopy({k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}, memo=memo), + ) + + return m + + if not typing.TYPE_CHECKING: + # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access + # The same goes for __setattr__ and __delattr__, see: https://github.com/pydantic/pydantic/issues/8643 + + def __getattr__(self, item: str) -> Any: + private_attributes = object.__getattribute__(self, '__private_attributes__') + if item in private_attributes: + attribute = private_attributes[item] + if hasattr(attribute, '__get__'): + return attribute.__get__(self, type(self)) # type: ignore + + try: + # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items + return self.__pydantic_private__[item] # type: ignore + except KeyError as exc: + raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc + else: + # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized. + # See `BaseModel.__repr_args__` for more details + try: + pydantic_extra = object.__getattribute__(self, '__pydantic_extra__') + except AttributeError: + pydantic_extra = None + + if pydantic_extra: + try: + return pydantic_extra[item] + except KeyError as exc: + raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc + else: + if hasattr(self.__class__, item): + return super().__getattribute__(item) # Raises AttributeError if appropriate + else: + # this is the current error + raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') + + def __setattr__(self, name: str, value: Any) -> None: + if name in self.__class_vars__: + raise AttributeError( + f'{name!r} is a ClassVar of `{self.__class__.__name__}` and cannot be set on an instance. ' + f'If you want to set a value on the class, use `{self.__class__.__name__}.{name} = value`.' + ) + elif not _fields.is_valid_field_name(name): + if self.__pydantic_private__ is None or name not in self.__private_attributes__: + _object_setattr(self, name, value) + else: + attribute = self.__private_attributes__[name] + if hasattr(attribute, '__set__'): + attribute.__set__(self, value) # type: ignore + else: + self.__pydantic_private__[name] = value + return + + self._check_frozen(name, value) + + attr = getattr(self.__class__, name, None) + if isinstance(attr, property): + attr.__set__(self, value) + elif self.model_config.get('validate_assignment', None): + self.__pydantic_validator__.validate_assignment(self, name, value) + elif self.model_config.get('extra') != 'allow' and name not in self.model_fields: + # TODO - matching error + raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') + elif self.model_config.get('extra') == 'allow' and name not in self.model_fields: + if self.model_extra and name in self.model_extra: + self.__pydantic_extra__[name] = value # type: ignore + else: + try: + getattr(self, name) + except AttributeError: + # attribute does not already exist on instance, so put it in extra + self.__pydantic_extra__[name] = value # type: ignore + else: + # attribute _does_ already exist on instance, and was not in extra, so update it + _object_setattr(self, name, value) + else: + self.__dict__[name] = value + self.__pydantic_fields_set__.add(name) + + def __delattr__(self, item: str) -> Any: + if item in self.__private_attributes__: + attribute = self.__private_attributes__[item] + if hasattr(attribute, '__delete__'): + attribute.__delete__(self) # type: ignore + return + + try: + # Note: self.__pydantic_private__ cannot be None if self.__private_attributes__ has items + del self.__pydantic_private__[item] # type: ignore + return + except KeyError as exc: + raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') from exc + + self._check_frozen(item, None) + + if item in self.model_fields: + object.__delattr__(self, item) + elif self.__pydantic_extra__ is not None and item in self.__pydantic_extra__: + del self.__pydantic_extra__[item] + else: + try: + object.__delattr__(self, item) + except AttributeError: + raise AttributeError(f'{type(self).__name__!r} object has no attribute {item!r}') + + def _check_frozen(self, name: str, value: Any) -> None: + if self.model_config.get('frozen', None): + typ = 'frozen_instance' + elif getattr(self.model_fields.get(name), 'frozen', False): + typ = 'frozen_field' + else: + return + error: pydantic_core.InitErrorDetails = { + 'type': typ, + 'loc': (name,), + 'input': value, + } + raise pydantic_core.ValidationError.from_exception_data(self.__class__.__name__, [error]) + + def __getstate__(self) -> dict[Any, Any]: + private = self.__pydantic_private__ + if private: + private = {k: v for k, v in private.items() if v is not PydanticUndefined} + return { + '__dict__': self.__dict__, + '__pydantic_extra__': self.__pydantic_extra__, + '__pydantic_fields_set__': self.__pydantic_fields_set__, + '__pydantic_private__': private, + } + + def __setstate__(self, state: dict[Any, Any]) -> None: + _object_setattr(self, '__pydantic_fields_set__', state['__pydantic_fields_set__']) + _object_setattr(self, '__pydantic_extra__', state['__pydantic_extra__']) + _object_setattr(self, '__pydantic_private__', state['__pydantic_private__']) + _object_setattr(self, '__dict__', state['__dict__']) + + if not typing.TYPE_CHECKING: + + def __eq__(self, other: Any) -> bool: + if isinstance(other, BaseModel): + # When comparing instances of generic types for equality, as long as all field values are equal, + # only require their generic origin types to be equal, rather than exact type equality. + # This prevents headaches like MyGeneric(x=1) != MyGeneric[Any](x=1). + self_type = self.__pydantic_generic_metadata__['origin'] or self.__class__ + other_type = other.__pydantic_generic_metadata__['origin'] or other.__class__ + + # Perform common checks first + if not ( + self_type == other_type + and getattr(self, '__pydantic_private__', None) == getattr(other, '__pydantic_private__', None) + and self.__pydantic_extra__ == other.__pydantic_extra__ + ): + return False + + # We only want to compare pydantic fields but ignoring fields is costly. + # We'll perform a fast check first, and fallback only when needed + # See GH-7444 and GH-7825 for rationale and a performance benchmark + + # First, do the fast (and sometimes faulty) __dict__ comparison + if self.__dict__ == other.__dict__: + # If the check above passes, then pydantic fields are equal, we can return early + return True + + # We don't want to trigger unnecessary costly filtering of __dict__ on all unequal objects, so we return + # early if there are no keys to ignore (we would just return False later on anyway) + model_fields = type(self).model_fields.keys() + if self.__dict__.keys() <= model_fields and other.__dict__.keys() <= model_fields: + return False + + # If we reach here, there are non-pydantic-fields keys, mapped to unequal values, that we need to ignore + # Resort to costly filtering of the __dict__ objects + # We use operator.itemgetter because it is much faster than dict comprehensions + # NOTE: Contrary to standard python class and instances, when the Model class has a default value for an + # attribute and the model instance doesn't have a corresponding attribute, accessing the missing attribute + # raises an error in BaseModel.__getattr__ instead of returning the class attribute + # So we can use operator.itemgetter() instead of operator.attrgetter() + getter = operator.itemgetter(*model_fields) if model_fields else lambda _: _utils._SENTINEL + try: + return getter(self.__dict__) == getter(other.__dict__) + except KeyError: + # In rare cases (such as when using the deprecated BaseModel.copy() method), + # the __dict__ may not contain all model fields, which is how we can get here. + # getter(self.__dict__) is much faster than any 'safe' method that accounts + # for missing keys, and wrapping it in a `try` doesn't slow things down much + # in the common case. + self_fields_proxy = _utils.SafeGetItemProxy(self.__dict__) + other_fields_proxy = _utils.SafeGetItemProxy(other.__dict__) + return getter(self_fields_proxy) == getter(other_fields_proxy) + + # other instance is not a BaseModel + else: + return NotImplemented # delegate to the other item in the comparison + + if typing.TYPE_CHECKING: + # We put `__init_subclass__` in a TYPE_CHECKING block because, even though we want the type-checking benefits + # described in the signature of `__init_subclass__` below, we don't want to modify the default behavior of + # subclass initialization. + + def __init_subclass__(cls, **kwargs: Unpack[ConfigDict]): + """This signature is included purely to help type-checkers check arguments to class declaration, which + provides a way to conveniently set model_config key/value pairs. + + ```py + from pydantic import BaseModel + + class MyModel(BaseModel, extra='allow'): + ... + ``` + + However, this may be deceiving, since the _actual_ calls to `__init_subclass__` will not receive any + of the config arguments, and will only receive any keyword arguments passed during class initialization + that are _not_ expected keys in ConfigDict. (This is due to the way `ModelMetaclass.__new__` works.) + + Args: + **kwargs: Keyword arguments passed to the class definition, which set model_config + + Note: + You may want to override `__pydantic_init_subclass__` instead, which behaves similarly but is called + *after* the class is fully initialized. + """ + + def __iter__(self) -> TupleGenerator: + """So `dict(model)` works.""" + yield from [(k, v) for (k, v) in self.__dict__.items() if not k.startswith('_')] + extra = self.__pydantic_extra__ + if extra: + yield from extra.items() + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + def __repr_args__(self) -> _repr.ReprArgs: + for k, v in self.__dict__.items(): + field = self.model_fields.get(k) + if field and field.repr: + yield k, v + + # `__pydantic_extra__` can fail to be set if the model is not yet fully initialized. + # This can happen if a `ValidationError` is raised during initialization and the instance's + # repr is generated as part of the exception handling. Therefore, we use `getattr` here + # with a fallback, even though the type hints indicate the attribute will always be present. + try: + pydantic_extra = object.__getattribute__(self, '__pydantic_extra__') + except AttributeError: + pydantic_extra = None + + if pydantic_extra is not None: + yield from ((k, v) for k, v in pydantic_extra.items()) + yield from ((k, getattr(self, k)) for k, v in self.model_computed_fields.items() if v.repr) + + # take logic from `_repr.Representation` without the side effects of inheritance, see #5740 + __repr_name__ = _repr.Representation.__repr_name__ + __repr_str__ = _repr.Representation.__repr_str__ + __pretty__ = _repr.Representation.__pretty__ + __rich_repr__ = _repr.Representation.__rich_repr__ + + def __str__(self) -> str: + return self.__repr_str__(' ') + + # ##### Deprecated methods from v1 ##### + @property + @typing_extensions.deprecated( + 'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=None + ) + def __fields__(self) -> dict[str, FieldInfo]: + warnings.warn( + 'The `__fields__` attribute is deprecated, use `model_fields` instead.', category=PydanticDeprecatedSince20 + ) + return self.model_fields + + @property + @typing_extensions.deprecated( + 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.', + category=None, + ) + def __fields_set__(self) -> set[str]: + warnings.warn( + 'The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.', + category=PydanticDeprecatedSince20, + ) + return self.__pydantic_fields_set__ + + @typing_extensions.deprecated('The `dict` method is deprecated; use `model_dump` instead.', category=None) + def dict( # noqa: D102 + self, + *, + include: IncEx = None, + exclude: IncEx = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> typing.Dict[str, Any]: # noqa UP006 + warnings.warn('The `dict` method is deprecated; use `model_dump` instead.', category=PydanticDeprecatedSince20) + return self.model_dump( + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + @typing_extensions.deprecated('The `json` method is deprecated; use `model_dump_json` instead.', category=None) + def json( # noqa: D102 + self, + *, + include: IncEx = None, + exclude: IncEx = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + encoder: typing.Callable[[Any], Any] | None = PydanticUndefined, # type: ignore[assignment] + models_as_dict: bool = PydanticUndefined, # type: ignore[assignment] + **dumps_kwargs: Any, + ) -> str: + warnings.warn( + 'The `json` method is deprecated; use `model_dump_json` instead.', category=PydanticDeprecatedSince20 + ) + if encoder is not PydanticUndefined: + raise TypeError('The `encoder` argument is no longer supported; use field serializers instead.') + if models_as_dict is not PydanticUndefined: + raise TypeError('The `models_as_dict` argument is no longer supported; use a model serializer instead.') + if dumps_kwargs: + raise TypeError('`dumps_kwargs` keyword arguments are no longer supported.') + return self.model_dump_json( + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + @classmethod + @typing_extensions.deprecated('The `parse_obj` method is deprecated; use `model_validate` instead.', category=None) + def parse_obj(cls: type[Model], obj: Any) -> Model: # noqa: D102 + warnings.warn( + 'The `parse_obj` method is deprecated; use `model_validate` instead.', category=PydanticDeprecatedSince20 + ) + return cls.model_validate(obj) + + @classmethod + @typing_extensions.deprecated( + 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, ' + 'otherwise load the data then use `model_validate` instead.', + category=None, + ) + def parse_raw( # noqa: D102 + cls: type[Model], + b: str | bytes, + *, + content_type: str | None = None, + encoding: str = 'utf8', + proto: DeprecatedParseProtocol | None = None, + allow_pickle: bool = False, + ) -> Model: # pragma: no cover + warnings.warn( + 'The `parse_raw` method is deprecated; if your data is JSON use `model_validate_json`, ' + 'otherwise load the data then use `model_validate` instead.', + category=PydanticDeprecatedSince20, + ) + from .deprecated import parse + + try: + obj = parse.load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + ) + except (ValueError, TypeError) as exc: + import json + + # try to match V1 + if isinstance(exc, UnicodeDecodeError): + type_str = 'value_error.unicodedecode' + elif isinstance(exc, json.JSONDecodeError): + type_str = 'value_error.jsondecode' + elif isinstance(exc, ValueError): + type_str = 'value_error' + else: + type_str = 'type_error' + + # ctx is missing here, but since we've added `input` to the error, we're not pretending it's the same + error: pydantic_core.InitErrorDetails = { + # The type: ignore on the next line is to ignore the requirement of LiteralString + 'type': pydantic_core.PydanticCustomError(type_str, str(exc)), # type: ignore + 'loc': ('__root__',), + 'input': b, + } + raise pydantic_core.ValidationError.from_exception_data(cls.__name__, [error]) + return cls.model_validate(obj) + + @classmethod + @typing_extensions.deprecated( + 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON ' + 'use `model_validate_json`, otherwise `model_validate` instead.', + category=None, + ) + def parse_file( # noqa: D102 + cls: type[Model], + path: str | Path, + *, + content_type: str | None = None, + encoding: str = 'utf8', + proto: DeprecatedParseProtocol | None = None, + allow_pickle: bool = False, + ) -> Model: + warnings.warn( + 'The `parse_file` method is deprecated; load the data from file, then if your data is JSON ' + 'use `model_validate_json`, otherwise `model_validate` instead.', + category=PydanticDeprecatedSince20, + ) + from .deprecated import parse + + obj = parse.load_file( + path, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + ) + return cls.parse_obj(obj) + + @classmethod + @typing_extensions.deprecated( + 'The `from_orm` method is deprecated; set ' + "`model_config['from_attributes']=True` and use `model_validate` instead.", + category=None, + ) + def from_orm(cls: type[Model], obj: Any) -> Model: # noqa: D102 + warnings.warn( + 'The `from_orm` method is deprecated; set ' + "`model_config['from_attributes']=True` and use `model_validate` instead.", + category=PydanticDeprecatedSince20, + ) + if not cls.model_config.get('from_attributes', None): + raise PydanticUserError( + 'You must set the config attribute `from_attributes=True` to use from_orm', code=None + ) + return cls.model_validate(obj) + + @classmethod + @typing_extensions.deprecated('The `construct` method is deprecated; use `model_construct` instead.', category=None) + def construct(cls: type[Model], _fields_set: set[str] | None = None, **values: Any) -> Model: # noqa: D102 + warnings.warn( + 'The `construct` method is deprecated; use `model_construct` instead.', category=PydanticDeprecatedSince20 + ) + return cls.model_construct(_fields_set=_fields_set, **values) + + @typing_extensions.deprecated( + 'The `copy` method is deprecated; use `model_copy` instead. ' + 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.', + category=None, + ) + def copy( + self: Model, + *, + include: AbstractSetIntStr | MappingIntStrAny | None = None, + exclude: AbstractSetIntStr | MappingIntStrAny | None = None, + update: typing.Dict[str, Any] | None = None, # noqa UP006 + deep: bool = False, + ) -> Model: # pragma: no cover + """Returns a copy of the model. + + !!! warning "Deprecated" + This method is now deprecated; use `model_copy` instead. + + If you need `include` or `exclude`, use: + + ```py + data = self.model_dump(include=include, exclude=exclude, round_trip=True) + data = {**data, **(update or {})} + copied = self.model_validate(data) + ``` + + Args: + include: Optional set or mapping specifying which fields to include in the copied model. + exclude: Optional set or mapping specifying which fields to exclude in the copied model. + update: Optional dictionary of field-value pairs to override field values in the copied model. + deep: If True, the values of fields that are Pydantic models will be deep-copied. + + Returns: + A copy of the model with included, excluded and updated fields as specified. + """ + warnings.warn( + 'The `copy` method is deprecated; use `model_copy` instead. ' + 'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.', + category=PydanticDeprecatedSince20, + ) + from .deprecated import copy_internals + + values = dict( + copy_internals._iter( + self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False + ), + **(update or {}), + ) + if self.__pydantic_private__ is None: + private = None + else: + private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined} + + if self.__pydantic_extra__ is None: + extra: dict[str, Any] | None = None + else: + extra = self.__pydantic_extra__.copy() + for k in list(self.__pydantic_extra__): + if k not in values: # k was in the exclude + extra.pop(k) + for k in list(values): + if k in self.__pydantic_extra__: # k must have come from extra + extra[k] = values.pop(k) + + # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg + if update: + fields_set = self.__pydantic_fields_set__ | update.keys() + else: + fields_set = set(self.__pydantic_fields_set__) + + # removing excluded fields from `__pydantic_fields_set__` + if exclude: + fields_set -= set(exclude) + + return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep) + + @classmethod + @typing_extensions.deprecated('The `schema` method is deprecated; use `model_json_schema` instead.', category=None) + def schema( # noqa: D102 + cls, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE + ) -> typing.Dict[str, Any]: # noqa UP006 + warnings.warn( + 'The `schema` method is deprecated; use `model_json_schema` instead.', category=PydanticDeprecatedSince20 + ) + return cls.model_json_schema(by_alias=by_alias, ref_template=ref_template) + + @classmethod + @typing_extensions.deprecated( + 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.', + category=None, + ) + def schema_json( # noqa: D102 + cls, *, by_alias: bool = True, ref_template: str = DEFAULT_REF_TEMPLATE, **dumps_kwargs: Any + ) -> str: # pragma: no cover + warnings.warn( + 'The `schema_json` method is deprecated; use `model_json_schema` and json.dumps instead.', + category=PydanticDeprecatedSince20, + ) + import json + + from .deprecated.json import pydantic_encoder + + return json.dumps( + cls.model_json_schema(by_alias=by_alias, ref_template=ref_template), + default=pydantic_encoder, + **dumps_kwargs, + ) + + @classmethod + @typing_extensions.deprecated('The `validate` method is deprecated; use `model_validate` instead.', category=None) + def validate(cls: type[Model], value: Any) -> Model: # noqa: D102 + warnings.warn( + 'The `validate` method is deprecated; use `model_validate` instead.', category=PydanticDeprecatedSince20 + ) + return cls.model_validate(value) + + @classmethod + @typing_extensions.deprecated( + 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.', + category=None, + ) + def update_forward_refs(cls, **localns: Any) -> None: # noqa: D102 + warnings.warn( + 'The `update_forward_refs` method is deprecated; use `model_rebuild` instead.', + category=PydanticDeprecatedSince20, + ) + if localns: # pragma: no cover + raise TypeError('`localns` arguments are not longer accepted.') + cls.model_rebuild(force=True) + + @typing_extensions.deprecated( + 'The private method `_iter` will be removed and should no longer be used.', category=None + ) + def _iter(self, *args: Any, **kwargs: Any) -> Any: + warnings.warn( + 'The private method `_iter` will be removed and should no longer be used.', + category=PydanticDeprecatedSince20, + ) + from .deprecated import copy_internals + + return copy_internals._iter(self, *args, **kwargs) + + @typing_extensions.deprecated( + 'The private method `_copy_and_set_values` will be removed and should no longer be used.', + category=None, + ) + def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any: + warnings.warn( + 'The private method `_copy_and_set_values` will be removed and should no longer be used.', + category=PydanticDeprecatedSince20, + ) + from .deprecated import copy_internals + + return copy_internals._copy_and_set_values(self, *args, **kwargs) + + @classmethod + @typing_extensions.deprecated( + 'The private method `_get_value` will be removed and should no longer be used.', + category=None, + ) + def _get_value(cls, *args: Any, **kwargs: Any) -> Any: + warnings.warn( + 'The private method `_get_value` will be removed and should no longer be used.', + category=PydanticDeprecatedSince20, + ) + from .deprecated import copy_internals + + return copy_internals._get_value(cls, *args, **kwargs) + + @typing_extensions.deprecated( + 'The private method `_calculate_keys` will be removed and should no longer be used.', + category=None, + ) + def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any: + warnings.warn( + 'The private method `_calculate_keys` will be removed and should no longer be used.', + category=PydanticDeprecatedSince20, + ) + from .deprecated import copy_internals + + return copy_internals._calculate_keys(self, *args, **kwargs) + + +@typing.overload +def create_model( + __model_name: str, + *, + __config__: ConfigDict | None = None, + __doc__: str | None = None, + __base__: None = None, + __module__: str = __name__, + __validators__: dict[str, classmethod] | None = None, + __cls_kwargs__: dict[str, Any] | None = None, + **field_definitions: Any, +) -> type[BaseModel]: + ... + + +@typing.overload +def create_model( + __model_name: str, + *, + __config__: ConfigDict | None = None, + __doc__: str | None = None, + __base__: type[Model] | tuple[type[Model], ...], + __module__: str = __name__, + __validators__: dict[str, classmethod] | None = None, + __cls_kwargs__: dict[str, Any] | None = None, + **field_definitions: Any, +) -> type[Model]: + ... + + +def create_model( # noqa: C901 + __model_name: str, + *, + __config__: ConfigDict | None = None, + __doc__: str | None = None, + __base__: type[Model] | tuple[type[Model], ...] | None = None, + __module__: str | None = None, + __validators__: dict[str, classmethod] | None = None, + __cls_kwargs__: dict[str, Any] | None = None, + __slots__: tuple[str, ...] | None = None, + **field_definitions: Any, +) -> type[Model]: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/models/#dynamic-model-creation + + Dynamically creates and returns a new Pydantic model, in other words, `create_model` dynamically creates a + subclass of [`BaseModel`][pydantic.BaseModel]. + + Args: + __model_name: The name of the newly created model. + __config__: The configuration of the new model. + __doc__: The docstring of the new model. + __base__: The base class or classes for the new model. + __module__: The name of the module that the model belongs to; + if `None`, the value is taken from `sys._getframe(1)` + __validators__: A dictionary of methods that validate fields. + __cls_kwargs__: A dictionary of keyword arguments for class creation, such as `metaclass`. + __slots__: Deprecated. Should not be passed to `create_model`. + **field_definitions: Attributes of the new model. They should be passed in the format: + `=(, )`, `=(, )`, or `typing.Annotated[, ]`. + Any additional metadata in `typing.Annotated[, , ...]` will be ignored. + + Returns: + The new [model][pydantic.BaseModel]. + + Raises: + PydanticUserError: If `__base__` and `__config__` are both passed. + """ + if __slots__ is not None: + # __slots__ will be ignored from here on + warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning) + + if __base__ is not None: + if __config__ is not None: + raise PydanticUserError( + 'to avoid confusion `__config__` and `__base__` cannot be used together', + code='create-model-config-base', + ) + if not isinstance(__base__, tuple): + __base__ = (__base__,) + else: + __base__ = (typing.cast(typing.Type['Model'], BaseModel),) + + __cls_kwargs__ = __cls_kwargs__ or {} + + fields = {} + annotations = {} + + for f_name, f_def in field_definitions.items(): + if not _fields.is_valid_field_name(f_name): + warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) + if isinstance(f_def, tuple): + f_def = typing.cast('tuple[str, Any]', f_def) + try: + f_annotation, f_value = f_def + except ValueError as e: + raise PydanticUserError( + 'Field definitions should be a `(, )`.', + code='create-model-field-definitions', + ) from e + + elif _typing_extra.is_annotated(f_def): + (f_annotation, f_value, *_) = typing_extensions.get_args( + f_def + ) # first two input are expected from Annotated, refer to https://docs.python.org/3/library/typing.html#typing.Annotated + from .fields import FieldInfo + + if not isinstance(f_value, FieldInfo): + raise PydanticUserError( + 'Field definitions should be a Annotated[, ]', + code='create-model-field-definitions', + ) + + else: + f_annotation, f_value = None, f_def + + if f_annotation: + annotations[f_name] = f_annotation + fields[f_name] = f_value + + if __module__ is None: + f = sys._getframe(1) + __module__ = f.f_globals['__name__'] + + namespace: dict[str, Any] = {'__annotations__': annotations, '__module__': __module__} + if __doc__: + namespace.update({'__doc__': __doc__}) + if __validators__: + namespace.update(__validators__) + namespace.update(fields) + if __config__: + namespace['model_config'] = _config.ConfigWrapper(__config__).config_dict + resolved_bases = types.resolve_bases(__base__) + meta, ns, kwds = types.prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) + if resolved_bases is not __base__: + ns['__orig_bases__'] = __base__ + namespace.update(ns) + + return meta( + __model_name, + resolved_bases, + namespace, + __pydantic_reset_parent_namespace__=False, + _create_model_module=__module__, + **kwds, + ) + + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/mypy.py b/venv/lib/python3.10/site-packages/pydantic/mypy.py new file mode 100644 index 0000000000000000000000000000000000000000..1a71bfe208be4d79194203ba56509e30fe739c97 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/mypy.py @@ -0,0 +1,1284 @@ +"""This module includes classes and functions designed specifically for use with the mypy plugin.""" + +from __future__ import annotations + +import sys +from configparser import ConfigParser +from typing import Any, Callable, Iterator + +from mypy.errorcodes import ErrorCode +from mypy.expandtype import expand_type, expand_type_by_instance +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + ARG_STAR2, + MDEF, + Argument, + AssignmentStmt, + Block, + CallExpr, + ClassDef, + Context, + Decorator, + DictExpr, + EllipsisExpr, + Expression, + FuncDef, + IfStmt, + JsonDict, + MemberExpr, + NameExpr, + PassStmt, + PlaceholderNode, + RefExpr, + Statement, + StrExpr, + SymbolTableNode, + TempNode, + TypeAlias, + TypeInfo, + Var, +) +from mypy.options import Options +from mypy.plugin import ( + CheckerPluginInterface, + ClassDefContext, + FunctionContext, + MethodContext, + Plugin, + ReportConfigContext, + SemanticAnalyzerPluginInterface, +) +from mypy.plugins import dataclasses +from mypy.plugins.common import ( + deserialize_and_fixup_type, +) +from mypy.semanal import set_callable_name +from mypy.server.trigger import make_wildcard_trigger +from mypy.state import state +from mypy.typeops import map_type_from_supertype +from mypy.types import ( + AnyType, + CallableType, + Instance, + NoneType, + Overloaded, + Type, + TypeOfAny, + TypeType, + TypeVarType, + UnionType, + get_proper_type, +) +from mypy.typevars import fill_typevars +from mypy.util import get_unique_redefinition_name +from mypy.version import __version__ as mypy_version + +from pydantic._internal import _fields +from pydantic.version import parse_mypy_version + +try: + from mypy.types import TypeVarDef # type: ignore[attr-defined] +except ImportError: # pragma: no cover + # Backward-compatible with TypeVarDef from Mypy 0.930. + from mypy.types import TypeVarType as TypeVarDef + +CONFIGFILE_KEY = 'pydantic-mypy' +METADATA_KEY = 'pydantic-mypy-metadata' +BASEMODEL_FULLNAME = 'pydantic.main.BaseModel' +BASESETTINGS_FULLNAME = 'pydantic_settings.main.BaseSettings' +ROOT_MODEL_FULLNAME = 'pydantic.root_model.RootModel' +MODEL_METACLASS_FULLNAME = 'pydantic._internal._model_construction.ModelMetaclass' +FIELD_FULLNAME = 'pydantic.fields.Field' +DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass' +MODEL_VALIDATOR_FULLNAME = 'pydantic.functional_validators.model_validator' +DECORATOR_FULLNAMES = { + 'pydantic.functional_validators.field_validator', + 'pydantic.functional_validators.model_validator', + 'pydantic.functional_serializers.serializer', + 'pydantic.functional_serializers.model_serializer', + 'pydantic.deprecated.class_validators.validator', + 'pydantic.deprecated.class_validators.root_validator', +} + + +MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) +BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__' + +# Increment version if plugin changes and mypy caches should be invalidated +__version__ = 2 + + +def plugin(version: str) -> type[Plugin]: + """`version` is the mypy version string. + + We might want to use this to print a warning if the mypy version being used is + newer, or especially older, than we expect (or need). + + Args: + version: The mypy version string. + + Return: + The Pydantic mypy plugin type. + """ + return PydanticPlugin + + +class PydanticPlugin(Plugin): + """The Pydantic mypy plugin.""" + + def __init__(self, options: Options) -> None: + self.plugin_config = PydanticPluginConfig(options) + self._plugin_data = self.plugin_config.to_data() + super().__init__(options) + + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], bool] | None: + """Update Pydantic model class.""" + sym = self.lookup_fully_qualified(fullname) + if sym and isinstance(sym.node, TypeInfo): # pragma: no branch + # No branching may occur if the mypy cache has not been cleared + if any(base.fullname == BASEMODEL_FULLNAME for base in sym.node.mro): + return self._pydantic_model_class_maker_callback + return None + + def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + """Update Pydantic `ModelMetaclass` definition.""" + if fullname == MODEL_METACLASS_FULLNAME: + return self._pydantic_model_metaclass_marker_callback + return None + + def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None: + """Adjust the return type of the `Field` function.""" + sym = self.lookup_fully_qualified(fullname) + if sym and sym.fullname == FIELD_FULLNAME: + return self._pydantic_field_callback + return None + + def get_method_hook(self, fullname: str) -> Callable[[MethodContext], Type] | None: + """Adjust return type of `from_orm` method call.""" + if fullname.endswith('.from_orm'): + return from_attributes_callback + return None + + def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: + """Mark pydantic.dataclasses as dataclass. + + Mypy version 1.1.1 added support for `@dataclass_transform` decorator. + """ + if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1): + return dataclasses.dataclass_class_maker_callback # type: ignore[return-value] + return None + + def report_config_data(self, ctx: ReportConfigContext) -> dict[str, Any]: + """Return all plugin config data. + + Used by mypy to determine if cache needs to be discarded. + """ + return self._plugin_data + + def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> bool: + transformer = PydanticModelTransformer(ctx.cls, ctx.reason, ctx.api, self.plugin_config) + return transformer.transform() + + def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: + """Reset dataclass_transform_spec attribute of ModelMetaclass. + + Let the plugin handle it. This behavior can be disabled + if 'debug_dataclass_transform' is set to True', for testing purposes. + """ + if self.plugin_config.debug_dataclass_transform: + return + info_metaclass = ctx.cls.info.declared_metaclass + assert info_metaclass, "callback not passed from 'get_metaclass_hook'" + if getattr(info_metaclass.type, 'dataclass_transform_spec', None): + info_metaclass.type.dataclass_transform_spec = None + + def _pydantic_field_callback(self, ctx: FunctionContext) -> Type: + """Extract the type of the `default` argument from the Field function, and use it as the return type. + + In particular: + * Check whether the default and default_factory argument is specified. + * Output an error if both are specified. + * Retrieve the type of the argument which is specified, and use it as return type for the function. + """ + default_any_type = ctx.default_return_type + + assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' + assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()' + default_args = ctx.args[0] + default_factory_args = ctx.args[1] + + if default_args and default_factory_args: + error_default_and_default_factory_specified(ctx.api, ctx.context) + return default_any_type + + if default_args: + default_type = ctx.arg_types[0][0] + default_arg = default_args[0] + + # Fallback to default Any type if the field is required + if not isinstance(default_arg, EllipsisExpr): + return default_type + + elif default_factory_args: + default_factory_type = ctx.arg_types[1][0] + + # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter + # Pydantic calls the default factory without any argument, so we retrieve the first item + if isinstance(default_factory_type, Overloaded): + default_factory_type = default_factory_type.items[0] + + if isinstance(default_factory_type, CallableType): + ret_type = default_factory_type.ret_type + # mypy doesn't think `ret_type` has `args`, you'd think mypy should know, + # add this check in case it varies by version + args = getattr(ret_type, 'args', None) + if args: + if all(isinstance(arg, TypeVarType) for arg in args): + # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any` + ret_type.args = tuple(default_any_type for _ in args) # type: ignore[attr-defined] + return ret_type + + return default_any_type + + +class PydanticPluginConfig: + """A Pydantic mypy plugin config holder. + + Attributes: + init_forbid_extra: Whether to add a `**kwargs` at the end of the generated `__init__` signature. + init_typed: Whether to annotate fields in the generated `__init__`. + warn_required_dynamic_aliases: Whether to raise required dynamic aliases error. + debug_dataclass_transform: Whether to not reset `dataclass_transform_spec` attribute + of `ModelMetaclass` for testing purposes. + """ + + __slots__ = ( + 'init_forbid_extra', + 'init_typed', + 'warn_required_dynamic_aliases', + 'debug_dataclass_transform', + ) + init_forbid_extra: bool + init_typed: bool + warn_required_dynamic_aliases: bool + debug_dataclass_transform: bool # undocumented + + def __init__(self, options: Options) -> None: + if options.config_file is None: # pragma: no cover + return + + toml_config = parse_toml(options.config_file) + if toml_config is not None: + config = toml_config.get('tool', {}).get('pydantic-mypy', {}) + for key in self.__slots__: + setting = config.get(key, False) + if not isinstance(setting, bool): + raise ValueError(f'Configuration value must be a boolean for key: {key}') + setattr(self, key, setting) + else: + plugin_config = ConfigParser() + plugin_config.read(options.config_file) + for key in self.__slots__: + setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) + setattr(self, key, setting) + + def to_data(self) -> dict[str, Any]: + """Returns a dict of config names to their values.""" + return {key: getattr(self, key) for key in self.__slots__} + + +def from_attributes_callback(ctx: MethodContext) -> Type: + """Raise an error if from_attributes is not enabled.""" + model_type: Instance + ctx_type = ctx.type + if isinstance(ctx_type, TypeType): + ctx_type = ctx_type.item + if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): + model_type = ctx_type.ret_type # called on the class + elif isinstance(ctx_type, Instance): + model_type = ctx_type # called on an instance (unusual, but still valid) + else: # pragma: no cover + detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})' + error_unexpected_behavior(detail, ctx.api, ctx.context) + return ctx.default_return_type + pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) + if pydantic_metadata is None: + return ctx.default_return_type + from_attributes = pydantic_metadata.get('config', {}).get('from_attributes') + if from_attributes is not True: + error_from_attributes(model_type.type.name, ctx.api, ctx.context) + return ctx.default_return_type + + +class PydanticModelField: + """Based on mypy.plugins.dataclasses.DataclassAttribute.""" + + def __init__( + self, + name: str, + alias: str | None, + has_dynamic_alias: bool, + has_default: bool, + line: int, + column: int, + type: Type | None, + info: TypeInfo, + ): + self.name = name + self.alias = alias + self.has_dynamic_alias = has_dynamic_alias + self.has_default = has_default + self.line = line + self.column = column + self.type = type + self.info = info + + def to_argument( + self, + current_info: TypeInfo, + typed: bool, + force_optional: bool, + use_alias: bool, + api: SemanticAnalyzerPluginInterface, + ) -> Argument: + """Based on mypy.plugins.dataclasses.DataclassAttribute.to_argument.""" + variable = self.to_var(current_info, api, use_alias) + type_annotation = self.expand_type(current_info, api) if typed else AnyType(TypeOfAny.explicit) + return Argument( + variable=variable, + type_annotation=type_annotation, + initializer=None, + kind=ARG_NAMED_OPT if force_optional or self.has_default else ARG_NAMED, + ) + + def expand_type(self, current_info: TypeInfo, api: SemanticAnalyzerPluginInterface) -> Type | None: + """Based on mypy.plugins.dataclasses.DataclassAttribute.expand_type.""" + # The getattr in the next line is used to prevent errors in legacy versions of mypy without this attribute + if self.type is not None and getattr(self.info, 'self_type', None) is not None: + # In general, it is not safe to call `expand_type()` during semantic analyzis, + # however this plugin is called very late, so all types should be fully ready. + # Also, it is tricky to avoid eager expansion of Self types here (e.g. because + # we serialize attributes). + with state.strict_optional_set(api.options.strict_optional): + return expand_type(self.type, {self.info.self_type.id: fill_typevars(current_info)}) + return self.type + + def to_var(self, current_info: TypeInfo, api: SemanticAnalyzerPluginInterface, use_alias: bool) -> Var: + """Based on mypy.plugins.dataclasses.DataclassAttribute.to_var.""" + if use_alias and self.alias is not None: + name = self.alias + else: + name = self.name + + return Var(name, self.expand_type(current_info, api)) + + def serialize(self) -> JsonDict: + """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" + assert self.type + return { + 'name': self.name, + 'alias': self.alias, + 'has_dynamic_alias': self.has_dynamic_alias, + 'has_default': self.has_default, + 'line': self.line, + 'column': self.column, + 'type': self.type.serialize(), + } + + @classmethod + def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> PydanticModelField: + """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" + data = data.copy() + typ = deserialize_and_fixup_type(data.pop('type'), api) + return cls(type=typ, info=info, **data) + + def expand_typevar_from_subtype(self, sub_type: TypeInfo, api: SemanticAnalyzerPluginInterface) -> None: + """Expands type vars in the context of a subtype when an attribute is inherited + from a generic super type. + """ + if self.type is not None: + with state.strict_optional_set(api.options.strict_optional): + self.type = map_type_from_supertype(self.type, sub_type, self.info) + + +class PydanticModelClassVar: + """Based on mypy.plugins.dataclasses.DataclassAttribute. + + ClassVars are ignored by subclasses. + + Attributes: + name: the ClassVar name + """ + + def __init__(self, name): + self.name = name + + @classmethod + def deserialize(cls, data: JsonDict) -> PydanticModelClassVar: + """Based on mypy.plugins.dataclasses.DataclassAttribute.deserialize.""" + data = data.copy() + return cls(**data) + + def serialize(self) -> JsonDict: + """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize.""" + return { + 'name': self.name, + } + + +class PydanticModelTransformer: + """Transform the BaseModel subclass according to the plugin settings. + + Attributes: + tracked_config_fields: A set of field configs that the plugin has to track their value. + """ + + tracked_config_fields: set[str] = { + 'extra', + 'frozen', + 'from_attributes', + 'populate_by_name', + 'alias_generator', + } + + def __init__( + self, + cls: ClassDef, + reason: Expression | Statement, + api: SemanticAnalyzerPluginInterface, + plugin_config: PydanticPluginConfig, + ) -> None: + self._cls = cls + self._reason = reason + self._api = api + + self.plugin_config = plugin_config + + def transform(self) -> bool: + """Configures the BaseModel subclass according to the plugin settings. + + In particular: + + * determines the model config and fields, + * adds a fields-aware signature for the initializer and construct methods + * freezes the class if frozen = True + * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses + """ + info = self._cls.info + is_root_model = any(ROOT_MODEL_FULLNAME in base.fullname for base in info.mro[:-1]) + config = self.collect_config() + fields, class_vars = self.collect_fields_and_class_vars(config, is_root_model) + if fields is None or class_vars is None: + # Some definitions are not ready. We need another pass. + return False + for field in fields: + if field.type is None: + return False + + is_settings = any(base.fullname == BASESETTINGS_FULLNAME for base in info.mro[:-1]) + self.add_initializer(fields, config, is_settings, is_root_model) + self.add_model_construct_method(fields, config, is_settings) + self.set_frozen(fields, self._api, frozen=config.frozen is True) + + self.adjust_decorator_signatures() + + info.metadata[METADATA_KEY] = { + 'fields': {field.name: field.serialize() for field in fields}, + 'class_vars': {class_var.name: class_var.serialize() for class_var in class_vars}, + 'config': config.get_values_dict(), + } + + return True + + def adjust_decorator_signatures(self) -> None: + """When we decorate a function `f` with `pydantic.validator(...)`, `pydantic.field_validator` + or `pydantic.serializer(...)`, mypy sees `f` as a regular method taking a `self` instance, + even though pydantic internally wraps `f` with `classmethod` if necessary. + + Teach mypy this by marking any function whose outermost decorator is a `validator()`, + `field_validator()` or `serializer()` call as a `classmethod`. + """ + for name, sym in self._cls.info.names.items(): + if isinstance(sym.node, Decorator): + first_dec = sym.node.original_decorators[0] + if ( + isinstance(first_dec, CallExpr) + and isinstance(first_dec.callee, NameExpr) + and first_dec.callee.fullname in DECORATOR_FULLNAMES + # @model_validator(mode="after") is an exception, it expects a regular method + and not ( + first_dec.callee.fullname == MODEL_VALIDATOR_FULLNAME + and any( + first_dec.arg_names[i] == 'mode' and isinstance(arg, StrExpr) and arg.value == 'after' + for i, arg in enumerate(first_dec.args) + ) + ) + ): + # TODO: Only do this if the first argument of the decorated function is `cls` + sym.node.func.is_class = True + + def collect_config(self) -> ModelConfigData: # noqa: C901 (ignore complexity) + """Collects the values of the config attributes that are used by the plugin, accounting for parent classes.""" + cls = self._cls + config = ModelConfigData() + + has_config_kwargs = False + has_config_from_namespace = False + + # Handle `class MyModel(BaseModel, =, ...):` + for name, expr in cls.keywords.items(): + config_data = self.get_config_update(name, expr) + if config_data: + has_config_kwargs = True + config.update(config_data) + + # Handle `model_config` + stmt: Statement | None = None + for stmt in cls.defs.body: + if not isinstance(stmt, (AssignmentStmt, ClassDef)): + continue + + if isinstance(stmt, AssignmentStmt): + lhs = stmt.lvalues[0] + if not isinstance(lhs, NameExpr) or lhs.name != 'model_config': + continue + + if isinstance(stmt.rvalue, CallExpr): # calls to `dict` or `ConfigDict` + for arg_name, arg in zip(stmt.rvalue.arg_names, stmt.rvalue.args): + if arg_name is None: + continue + config.update(self.get_config_update(arg_name, arg, lax_extra=True)) + elif isinstance(stmt.rvalue, DictExpr): # dict literals + for key_expr, value_expr in stmt.rvalue.items: + if not isinstance(key_expr, StrExpr): + continue + config.update(self.get_config_update(key_expr.value, value_expr)) + + elif isinstance(stmt, ClassDef): + if stmt.name != 'Config': # 'deprecated' Config-class + continue + for substmt in stmt.defs.body: + if not isinstance(substmt, AssignmentStmt): + continue + lhs = substmt.lvalues[0] + if not isinstance(lhs, NameExpr): + continue + config.update(self.get_config_update(lhs.name, substmt.rvalue)) + + if has_config_kwargs: + self._api.fail( + 'Specifying config in two places is ambiguous, use either Config attribute or class kwargs', + cls, + ) + break + + has_config_from_namespace = True + + if has_config_kwargs or has_config_from_namespace: + if ( + stmt + and config.has_alias_generator + and not config.populate_by_name + and self.plugin_config.warn_required_dynamic_aliases + ): + error_required_dynamic_aliases(self._api, stmt) + + for info in cls.info.mro[1:]: # 0 is the current class + if METADATA_KEY not in info.metadata: + continue + + # Each class depends on the set of fields in its ancestors + self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + for name, value in info.metadata[METADATA_KEY]['config'].items(): + config.setdefault(name, value) + return config + + def collect_fields_and_class_vars( + self, model_config: ModelConfigData, is_root_model: bool + ) -> tuple[list[PydanticModelField] | None, list[PydanticModelClassVar] | None]: + """Collects the fields for the model, accounting for parent classes.""" + cls = self._cls + + # First, collect fields and ClassVars belonging to any class in the MRO, ignoring duplicates. + # + # We iterate through the MRO in reverse because attrs defined in the parent must appear + # earlier in the attributes list than attrs defined in the child. See: + # https://docs.python.org/3/library/dataclasses.html#inheritance + # + # However, we also want fields defined in the subtype to override ones defined + # in the parent. We can implement this via a dict without disrupting the attr order + # because dicts preserve insertion order in Python 3.7+. + found_fields: dict[str, PydanticModelField] = {} + found_class_vars: dict[str, PydanticModelClassVar] = {} + for info in reversed(cls.info.mro[1:-1]): # 0 is the current class, -2 is BaseModel, -1 is object + # if BASEMODEL_METADATA_TAG_KEY in info.metadata and BASEMODEL_METADATA_KEY not in info.metadata: + # # We haven't processed the base class yet. Need another pass. + # return None, None + if METADATA_KEY not in info.metadata: + continue + + # Each class depends on the set of attributes in its dataclass ancestors. + self._api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) + + for name, data in info.metadata[METADATA_KEY]['fields'].items(): + field = PydanticModelField.deserialize(info, data, self._api) + # (The following comment comes directly from the dataclasses plugin) + # TODO: We shouldn't be performing type operations during the main + # semantic analysis pass, since some TypeInfo attributes might + # still be in flux. This should be performed in a later phase. + field.expand_typevar_from_subtype(cls.info, self._api) + found_fields[name] = field + + sym_node = cls.info.names.get(name) + if sym_node and sym_node.node and not isinstance(sym_node.node, Var): + self._api.fail( + 'BaseModel field may only be overridden by another field', + sym_node.node, + ) + # Collect ClassVars + for name, data in info.metadata[METADATA_KEY]['class_vars'].items(): + found_class_vars[name] = PydanticModelClassVar.deserialize(data) + + # Second, collect fields and ClassVars belonging to the current class. + current_field_names: set[str] = set() + current_class_vars_names: set[str] = set() + for stmt in self._get_assignment_statements_from_block(cls.defs): + maybe_field = self.collect_field_or_class_var_from_stmt(stmt, model_config, found_class_vars) + if isinstance(maybe_field, PydanticModelField): + lhs = stmt.lvalues[0] + if is_root_model and lhs.name != 'root': + error_extra_fields_on_root_model(self._api, stmt) + else: + current_field_names.add(lhs.name) + found_fields[lhs.name] = maybe_field + elif isinstance(maybe_field, PydanticModelClassVar): + lhs = stmt.lvalues[0] + current_class_vars_names.add(lhs.name) + found_class_vars[lhs.name] = maybe_field + + return list(found_fields.values()), list(found_class_vars.values()) + + def _get_assignment_statements_from_if_statement(self, stmt: IfStmt) -> Iterator[AssignmentStmt]: + for body in stmt.body: + if not body.is_unreachable: + yield from self._get_assignment_statements_from_block(body) + if stmt.else_body is not None and not stmt.else_body.is_unreachable: + yield from self._get_assignment_statements_from_block(stmt.else_body) + + def _get_assignment_statements_from_block(self, block: Block) -> Iterator[AssignmentStmt]: + for stmt in block.body: + if isinstance(stmt, AssignmentStmt): + yield stmt + elif isinstance(stmt, IfStmt): + yield from self._get_assignment_statements_from_if_statement(stmt) + + def collect_field_or_class_var_from_stmt( # noqa C901 + self, stmt: AssignmentStmt, model_config: ModelConfigData, class_vars: dict[str, PydanticModelClassVar] + ) -> PydanticModelField | PydanticModelClassVar | None: + """Get pydantic model field from statement. + + Args: + stmt: The statement. + model_config: Configuration settings for the model. + class_vars: ClassVars already known to be defined on the model. + + Returns: + A pydantic model field if it could find the field in statement. Otherwise, `None`. + """ + cls = self._cls + + lhs = stmt.lvalues[0] + if not isinstance(lhs, NameExpr) or not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': + return None + + if not stmt.new_syntax: + if ( + isinstance(stmt.rvalue, CallExpr) + and isinstance(stmt.rvalue.callee, CallExpr) + and isinstance(stmt.rvalue.callee.callee, NameExpr) + and stmt.rvalue.callee.callee.fullname in DECORATOR_FULLNAMES + ): + # This is a (possibly-reused) validator or serializer, not a field + # In particular, it looks something like: my_validator = validator('my_field')(f) + # Eventually, we may want to attempt to respect model_config['ignored_types'] + return None + + if lhs.name in class_vars: + # Class vars are not fields and are not required to be annotated + return None + + # The assignment does not have an annotation, and it's not anything else we recognize + error_untyped_fields(self._api, stmt) + return None + + lhs = stmt.lvalues[0] + if not isinstance(lhs, NameExpr): + return None + + if not _fields.is_valid_field_name(lhs.name) or lhs.name == 'model_config': + return None + + sym = cls.info.names.get(lhs.name) + if sym is None: # pragma: no cover + # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) + # This is the same logic used in the dataclasses plugin + return None + + node = sym.node + if isinstance(node, PlaceholderNode): # pragma: no cover + # See the PlaceholderNode docstring for more detail about how this can occur + # Basically, it is an edge case when dealing with complex import logic + + # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. + return None + + if isinstance(node, TypeAlias): + self._api.fail( + 'Type aliases inside BaseModel definitions are not supported at runtime', + node, + ) + # Skip processing this node. This doesn't match the runtime behaviour, + # but the only alternative would be to modify the SymbolTable, + # and it's a little hairy to do that in a plugin. + return None + + if not isinstance(node, Var): # pragma: no cover + # Don't know if this edge case still happens with the `is_valid_field` check above + # but better safe than sorry + + # The dataclasses plugin now asserts this cannot happen, but I'd rather not error if it does.. + return None + + # x: ClassVar[int] is not a field + if node.is_classvar: + return PydanticModelClassVar(lhs.name) + + # x: InitVar[int] is not supported in BaseModel + node_type = get_proper_type(node.type) + if isinstance(node_type, Instance) and node_type.type.fullname == 'dataclasses.InitVar': + self._api.fail( + 'InitVar is not supported in BaseModel', + node, + ) + + has_default = self.get_has_default(stmt) + + if sym.type is None and node.is_final and node.is_inferred: + # This follows the logic from the dataclasses plugin. The following comment is taken verbatim: + # + # This is a special case, assignment like x: Final = 42 is classified + # annotated above, but mypy strips the `Final` turning it into x = 42. + # We do not support inferred types in dataclasses, so we can try inferring + # type for simple literals, and otherwise require an explicit type + # argument for Final[...]. + typ = self._api.analyze_simple_literal_type(stmt.rvalue, is_final=True) + if typ: + node.type = typ + else: + self._api.fail( + 'Need type argument for Final[...] with non-literal default in BaseModel', + stmt, + ) + node.type = AnyType(TypeOfAny.from_error) + + alias, has_dynamic_alias = self.get_alias_info(stmt) + if has_dynamic_alias and not model_config.populate_by_name and self.plugin_config.warn_required_dynamic_aliases: + error_required_dynamic_aliases(self._api, stmt) + + init_type = self._infer_dataclass_attr_init_type(sym, lhs.name, stmt) + return PydanticModelField( + name=lhs.name, + has_dynamic_alias=has_dynamic_alias, + has_default=has_default, + alias=alias, + line=stmt.line, + column=stmt.column, + type=init_type, + info=cls.info, + ) + + def _infer_dataclass_attr_init_type(self, sym: SymbolTableNode, name: str, context: Context) -> Type | None: + """Infer __init__ argument type for an attribute. + + In particular, possibly use the signature of __set__. + """ + default = sym.type + if sym.implicit: + return default + t = get_proper_type(sym.type) + + # Perform a simple-minded inference from the signature of __set__, if present. + # We can't use mypy.checkmember here, since this plugin runs before type checking. + # We only support some basic scanerios here, which is hopefully sufficient for + # the vast majority of use cases. + if not isinstance(t, Instance): + return default + setter = t.type.get('__set__') + if setter: + if isinstance(setter.node, FuncDef): + super_info = t.type.get_containing_type_info('__set__') + assert super_info + if setter.type: + setter_type = get_proper_type(map_type_from_supertype(setter.type, t.type, super_info)) + else: + return AnyType(TypeOfAny.unannotated) + if isinstance(setter_type, CallableType) and setter_type.arg_kinds == [ + ARG_POS, + ARG_POS, + ARG_POS, + ]: + return expand_type_by_instance(setter_type.arg_types[2], t) + else: + self._api.fail(f'Unsupported signature for "__set__" in "{t.type.name}"', context) + else: + self._api.fail(f'Unsupported "__set__" in "{t.type.name}"', context) + + return default + + def add_initializer( + self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool, is_root_model: bool + ) -> None: + """Adds a fields-aware `__init__` method to the class. + + The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. + """ + if '__init__' in self._cls.info.names and not self._cls.info.names['__init__'].plugin_generated: + return # Don't generate an __init__ if one already exists + + typed = self.plugin_config.init_typed + use_alias = config.populate_by_name is not True + requires_dynamic_aliases = bool(config.has_alias_generator and not config.populate_by_name) + args = self.get_field_arguments( + fields, + typed=typed, + requires_dynamic_aliases=requires_dynamic_aliases, + use_alias=use_alias, + is_settings=is_settings, + ) + + if is_root_model and MYPY_VERSION_TUPLE <= (1, 0, 1): + # convert root argument to positional argument + # This is needed because mypy support for `dataclass_transform` isn't complete on 1.0.1 + args[0].kind = ARG_POS if args[0].kind == ARG_NAMED else ARG_OPT + + if is_settings: + base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node + if '__init__' in base_settings_node.names: + base_settings_init_node = base_settings_node.names['__init__'].node + if base_settings_init_node is not None and base_settings_init_node.type is not None: + func_type = base_settings_init_node.type + for arg_idx, arg_name in enumerate(func_type.arg_names): + if arg_name.startswith('__') or not arg_name.startswith('_'): + continue + analyzed_variable_type = self._api.anal_type(func_type.arg_types[arg_idx]) + variable = Var(arg_name, analyzed_variable_type) + args.append(Argument(variable, analyzed_variable_type, None, ARG_OPT)) + + if not self.should_init_forbid_extra(fields, config): + var = Var('kwargs') + args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) + + add_method(self._api, self._cls, '__init__', args=args, return_type=NoneType()) + + def add_model_construct_method( + self, fields: list[PydanticModelField], config: ModelConfigData, is_settings: bool + ) -> None: + """Adds a fully typed `model_construct` classmethod to the class. + + Similar to the fields-aware __init__ method, but always uses the field names (not aliases), + and does not treat settings fields as optional. + """ + set_str = self._api.named_type(f'{BUILTINS_NAME}.set', [self._api.named_type(f'{BUILTINS_NAME}.str')]) + optional_set_str = UnionType([set_str, NoneType()]) + fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) + with state.strict_optional_set(self._api.options.strict_optional): + args = self.get_field_arguments( + fields, typed=True, requires_dynamic_aliases=False, use_alias=False, is_settings=is_settings + ) + if not self.should_init_forbid_extra(fields, config): + var = Var('kwargs') + args.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) + + args = [fields_set_argument] + args + + add_method( + self._api, + self._cls, + 'model_construct', + args=args, + return_type=fill_typevars(self._cls.info), + is_classmethod=True, + ) + + def set_frozen(self, fields: list[PydanticModelField], api: SemanticAnalyzerPluginInterface, frozen: bool) -> None: + """Marks all fields as properties so that attempts to set them trigger mypy errors. + + This is the same approach used by the attrs and dataclasses plugins. + """ + info = self._cls.info + for field in fields: + sym_node = info.names.get(field.name) + if sym_node is not None: + var = sym_node.node + if isinstance(var, Var): + var.is_property = frozen + elif isinstance(var, PlaceholderNode) and not self._api.final_iteration: + # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage + self._api.defer() + else: # pragma: no cover + # I don't know whether it's possible to hit this branch, but I've added it for safety + try: + var_str = str(var) + except TypeError: + # This happens for PlaceholderNode; perhaps it will happen for other types in the future.. + var_str = repr(var) + detail = f'sym_node.node: {var_str} (of type {var.__class__})' + error_unexpected_behavior(detail, self._api, self._cls) + else: + var = field.to_var(info, api, use_alias=False) + var.info = info + var.is_property = frozen + var._fullname = info.fullname + '.' + var.name + info.names[var.name] = SymbolTableNode(MDEF, var) + + def get_config_update(self, name: str, arg: Expression, lax_extra: bool = False) -> ModelConfigData | None: + """Determines the config update due to a single kwarg in the ConfigDict definition. + + Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) + """ + if name not in self.tracked_config_fields: + return None + if name == 'extra': + if isinstance(arg, StrExpr): + forbid_extra = arg.value == 'forbid' + elif isinstance(arg, MemberExpr): + forbid_extra = arg.name == 'forbid' + else: + if not lax_extra: + # Only emit an error for other types of `arg` (e.g., `NameExpr`, `ConditionalExpr`, etc.) when + # reading from a config class, etc. If a ConfigDict is used, then we don't want to emit an error + # because you'll get type checking from the ConfigDict itself. + # + # It would be nice if we could introspect the types better otherwise, but I don't know what the API + # is to evaluate an expr into its type and then check if that type is compatible with the expected + # type. Note that you can still get proper type checking via: `model_config = ConfigDict(...)`, just + # if you don't use an explicit string, the plugin won't be able to infer whether extra is forbidden. + error_invalid_config_value(name, self._api, arg) + return None + return ModelConfigData(forbid_extra=forbid_extra) + if name == 'alias_generator': + has_alias_generator = True + if isinstance(arg, NameExpr) and arg.fullname == 'builtins.None': + has_alias_generator = False + return ModelConfigData(has_alias_generator=has_alias_generator) + if isinstance(arg, NameExpr) and arg.fullname in ('builtins.True', 'builtins.False'): + return ModelConfigData(**{name: arg.fullname == 'builtins.True'}) + error_invalid_config_value(name, self._api, arg) + return None + + @staticmethod + def get_has_default(stmt: AssignmentStmt) -> bool: + """Returns a boolean indicating whether the field defined in `stmt` is a required field.""" + expr = stmt.rvalue + if isinstance(expr, TempNode): + # TempNode means annotation-only, so has no default + return False + if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: + # The "default value" is a call to `Field`; at this point, the field has a default if and only if: + # * there is a positional argument that is not `...` + # * there is a keyword argument named "default" that is not `...` + # * there is a "default_factory" that is not `None` + for arg, name in zip(expr.args, expr.arg_names): + # If name is None, then this arg is the default because it is the only positional argument. + if name is None or name == 'default': + return arg.__class__ is not EllipsisExpr + if name == 'default_factory': + return not (isinstance(arg, NameExpr) and arg.fullname == 'builtins.None') + return False + # Has no default if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) + return not isinstance(expr, EllipsisExpr) + + @staticmethod + def get_alias_info(stmt: AssignmentStmt) -> tuple[str | None, bool]: + """Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. + + `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. + If `has_dynamic_alias` is True, `alias` will be None. + """ + expr = stmt.rvalue + if isinstance(expr, TempNode): + # TempNode means annotation-only + return None, False + + if not ( + isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME + ): + # Assigned value is not a call to pydantic.fields.Field + return None, False + + for i, arg_name in enumerate(expr.arg_names): + if arg_name != 'alias': + continue + arg = expr.args[i] + if isinstance(arg, StrExpr): + return arg.value, False + else: + return None, True + return None, False + + def get_field_arguments( + self, + fields: list[PydanticModelField], + typed: bool, + use_alias: bool, + requires_dynamic_aliases: bool, + is_settings: bool, + ) -> list[Argument]: + """Helper function used during the construction of the `__init__` and `model_construct` method signatures. + + Returns a list of mypy Argument instances for use in the generated signatures. + """ + info = self._cls.info + arguments = [ + field.to_argument( + info, + typed=typed, + force_optional=requires_dynamic_aliases or is_settings, + use_alias=use_alias, + api=self._api, + ) + for field in fields + if not (use_alias and field.has_dynamic_alias) + ] + return arguments + + def should_init_forbid_extra(self, fields: list[PydanticModelField], config: ModelConfigData) -> bool: + """Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature. + + We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, + *unless* a required dynamic alias is present (since then we can't determine a valid signature). + """ + if not config.populate_by_name: + if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): + return False + if config.forbid_extra: + return True + return self.plugin_config.init_forbid_extra + + @staticmethod + def is_dynamic_alias_present(fields: list[PydanticModelField], has_alias_generator: bool) -> bool: + """Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be + determined during static analysis. + """ + for field in fields: + if field.has_dynamic_alias: + return True + if has_alias_generator: + for field in fields: + if field.alias is None: + return True + return False + + +class ModelConfigData: + """Pydantic mypy plugin model config class.""" + + def __init__( + self, + forbid_extra: bool | None = None, + frozen: bool | None = None, + from_attributes: bool | None = None, + populate_by_name: bool | None = None, + has_alias_generator: bool | None = None, + ): + self.forbid_extra = forbid_extra + self.frozen = frozen + self.from_attributes = from_attributes + self.populate_by_name = populate_by_name + self.has_alias_generator = has_alias_generator + + def get_values_dict(self) -> dict[str, Any]: + """Returns a dict of Pydantic model config names to their values. + + It includes the config if config value is not `None`. + """ + return {k: v for k, v in self.__dict__.items() if v is not None} + + def update(self, config: ModelConfigData | None) -> None: + """Update Pydantic model config values.""" + if config is None: + return + for k, v in config.get_values_dict().items(): + setattr(self, k, v) + + def setdefault(self, key: str, value: Any) -> None: + """Set default value for Pydantic model config if config value is `None`.""" + if getattr(self, key) is None: + setattr(self, key, value) + + +ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_attributes call', 'Pydantic') +ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') +ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') +ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') +ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') +ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') +ERROR_EXTRA_FIELD_ROOT_MODEL = ErrorCode('pydantic-field', 'Extra field on RootModel subclass', 'Pydantic') + + +def error_from_attributes(model_name: str, api: CheckerPluginInterface, context: Context) -> None: + """Emits an error when the model does not have `from_attributes=True`.""" + api.fail(f'"{model_name}" does not have from_attributes=True', context, code=ERROR_ORM) + + +def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: + """Emits an error when the config value is invalid.""" + api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) + + +def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: + """Emits required dynamic aliases error. + + This will be called when `warn_required_dynamic_aliases=True`. + """ + api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) + + +def error_unexpected_behavior( + detail: str, api: CheckerPluginInterface | SemanticAnalyzerPluginInterface, context: Context +) -> None: # pragma: no cover + """Emits unexpected behavior error.""" + # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path + link = 'https://github.com/pydantic/pydantic/issues/new/choose' + full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' + full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' + api.fail(full_message, context, code=ERROR_UNEXPECTED) + + +def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: + """Emits an error when there is an untyped field in the model.""" + api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) + + +def error_extra_fields_on_root_model(api: CheckerPluginInterface, context: Context) -> None: + """Emits an error when there is more than just a root field defined for a subclass of RootModel.""" + api.fail('Only `root` is allowed as a field of a `RootModel`', context, code=ERROR_EXTRA_FIELD_ROOT_MODEL) + + +def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None: + """Emits an error when `Field` has both `default` and `default_factory` together.""" + api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS) + + +def add_method( + api: SemanticAnalyzerPluginInterface | CheckerPluginInterface, + cls: ClassDef, + name: str, + args: list[Argument], + return_type: Type, + self_type: Type | None = None, + tvar_def: TypeVarDef | None = None, + is_classmethod: bool = False, +) -> None: + """Very closely related to `mypy.plugins.common.add_method_to_class`, with a few pydantic-specific changes.""" + info = cls.info + + # First remove any previously generated methods with the same name + # to avoid clashes and problems in the semantic analyzer. + if name in info.names: + sym = info.names[name] + if sym.plugin_generated and isinstance(sym.node, FuncDef): + cls.defs.body.remove(sym.node) # pragma: no cover + + if isinstance(api, SemanticAnalyzerPluginInterface): + function_type = api.named_type('builtins.function') + else: + function_type = api.named_generic_type('builtins.function', []) + + if is_classmethod: + self_type = self_type or TypeType(fill_typevars(info)) + first = [Argument(Var('_cls'), self_type, None, ARG_POS, True)] + else: + self_type = self_type or fill_typevars(info) + # `self` is positional *ONLY* here, but this can't be expressed + # fully in the mypy internal API. ARG_POS is the closest we can get. + # Using ARG_POS will, however, give mypy errors if a `self` field + # is present on a model: + # + # Name "self" already defined (possibly by an import) [no-redef] + # + # As a workaround, we give this argument a name that will + # never conflict. By its positional nature, this name will not + # be used or exposed to users. + first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] + args = first + args + + arg_types, arg_names, arg_kinds = [], [], [] + for arg in args: + assert arg.type_annotation, 'All arguments must be fully typed.' + arg_types.append(arg.type_annotation) + arg_names.append(arg.variable.name) + arg_kinds.append(arg.kind) + + signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type) + if tvar_def: + signature.variables = [tvar_def] + + func = FuncDef(name, args, Block([PassStmt()])) + func.info = info + func.type = set_callable_name(signature, func) + func.is_class = is_classmethod + func._fullname = info.fullname + '.' + name + func.line = info.line + + # NOTE: we would like the plugin generated node to dominate, but we still + # need to keep any existing definitions so they get semantically analyzed. + if name in info.names: + # Get a nice unique name instead. + r_name = get_unique_redefinition_name(name, info.names) + info.names[r_name] = info.names[name] + + # Add decorator for is_classmethod + # The dataclasses plugin claims this is unnecessary for classmethods, but not including it results in a + # signature incompatible with the superclass, which causes mypy errors to occur for every subclass of BaseModel. + if is_classmethod: + func.is_decorated = True + v = Var(name, func.type) + v.info = info + v._fullname = func._fullname + v.is_classmethod = True + dec = Decorator(func, [NameExpr('classmethod')], v) + dec.line = info.line + sym = SymbolTableNode(MDEF, dec) + else: + sym = SymbolTableNode(MDEF, func) + sym.plugin_generated = True + info.names[name] = sym + + info.defn.defs.body.append(func) + + +def parse_toml(config_file: str) -> dict[str, Any] | None: + """Returns a dict of config keys to values. + + It reads configs from toml file and returns `None` if the file is not a toml file. + """ + if not config_file.endswith('.toml'): + return None + + if sys.version_info >= (3, 11): + import tomllib as toml_ + else: + try: + import tomli as toml_ + except ImportError: # pragma: no cover + import warnings + + warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') + return None + + with open(config_file, 'rb') as rf: + return toml_.load(rf) diff --git a/venv/lib/python3.10/site-packages/pydantic/networks.py b/venv/lib/python3.10/site-packages/pydantic/networks.py new file mode 100644 index 0000000000000000000000000000000000000000..d037a7217145cacea18aab291eb72dc8fb8fe04b --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/networks.py @@ -0,0 +1,754 @@ +"""The networks module contains types for common network-related fields.""" +from __future__ import annotations as _annotations + +import dataclasses as _dataclasses +import re +from importlib.metadata import version +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from typing import TYPE_CHECKING, Any + +from pydantic_core import MultiHostUrl, PydanticCustomError, Url, core_schema +from typing_extensions import Annotated, Self, TypeAlias + +from ._internal import _fields, _repr, _schema_generation_shared +from ._migration import getattr_migration +from .annotated_handlers import GetCoreSchemaHandler +from .json_schema import JsonSchemaValue + +if TYPE_CHECKING: + import email_validator + + NetworkType: TypeAlias = 'str | bytes | int | tuple[str | bytes | int, str | int]' + +else: + email_validator = None + + +__all__ = [ + 'AnyUrl', + 'AnyHttpUrl', + 'FileUrl', + 'FtpUrl', + 'HttpUrl', + 'WebsocketUrl', + 'AnyWebsocketUrl', + 'UrlConstraints', + 'EmailStr', + 'NameEmail', + 'IPvAnyAddress', + 'IPvAnyInterface', + 'IPvAnyNetwork', + 'PostgresDsn', + 'CockroachDsn', + 'AmqpDsn', + 'RedisDsn', + 'MongoDsn', + 'KafkaDsn', + 'NatsDsn', + 'validate_email', + 'MySQLDsn', + 'MariaDBDsn', + 'ClickHouseDsn', +] + + +@_dataclasses.dataclass +class UrlConstraints(_fields.PydanticMetadata): + """Url constraints. + + Attributes: + max_length: The maximum length of the url. Defaults to `None`. + allowed_schemes: The allowed schemes. Defaults to `None`. + host_required: Whether the host is required. Defaults to `None`. + default_host: The default host. Defaults to `None`. + default_port: The default port. Defaults to `None`. + default_path: The default path. Defaults to `None`. + """ + + max_length: int | None = None + allowed_schemes: list[str] | None = None + host_required: bool | None = None + default_host: str | None = None + default_port: int | None = None + default_path: str | None = None + + def __hash__(self) -> int: + return hash( + ( + self.max_length, + tuple(self.allowed_schemes) if self.allowed_schemes is not None else None, + self.host_required, + self.default_host, + self.default_port, + self.default_path, + ) + ) + + +AnyUrl = Url +"""Base type for all URLs. + +* Any scheme allowed +* Top-level domain (TLD) not required +* Host required + +Assuming an input URL of `http://samuel:pass@example.com:8000/the/path/?query=here#fragment=is;this=bit`, +the types export the following properties: + +- `scheme`: the URL scheme (`http`), always set. +- `host`: the URL host (`example.com`), always set. +- `username`: optional username if included (`samuel`). +- `password`: optional password if included (`pass`). +- `port`: optional port (`8000`). +- `path`: optional path (`/the/path/`). +- `query`: optional URL query (for example, `GET` arguments or "search string", such as `query=here`). +- `fragment`: optional fragment (`fragment=is;this=bit`). +""" +AnyHttpUrl = Annotated[Url, UrlConstraints(allowed_schemes=['http', 'https'])] +"""A type that will accept any http or https URL. + +* TLD not required +* Host required +""" +HttpUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=['http', 'https'])] +"""A type that will accept any http or https URL. + +* TLD required +* Host required +* Max length 2083 + +```py +from pydantic import BaseModel, HttpUrl, ValidationError + +class MyModel(BaseModel): + url: HttpUrl + +m = MyModel(url='http://www.example.com') # (1)! +print(m.url) +#> http://www.example.com/ + +try: + MyModel(url='ftp://invalid.url') +except ValidationError as e: + print(e) + ''' + 1 validation error for MyModel + url + URL scheme should be 'http' or 'https' [type=url_scheme, input_value='ftp://invalid.url', input_type=str] + ''' + +try: + MyModel(url='not a url') +except ValidationError as e: + print(e) + ''' + 1 validation error for MyModel + url + Input should be a valid URL, relative URL without a base [type=url_parsing, input_value='not a url', input_type=str] + ''' +``` + +1. Note: mypy would prefer `m = MyModel(url=HttpUrl('http://www.example.com'))`, but Pydantic will convert the string to an HttpUrl instance anyway. + +"International domains" (e.g. a URL where the host or TLD includes non-ascii characters) will be encoded via +[punycode](https://en.wikipedia.org/wiki/Punycode) (see +[this article](https://www.xudongz.com/blog/2017/idn-phishing/) for a good description of why this is important): + +```py +from pydantic import BaseModel, HttpUrl + +class MyModel(BaseModel): + url: HttpUrl + +m1 = MyModel(url='http://puny£code.com') +print(m1.url) +#> http://xn--punycode-eja.com/ +m2 = MyModel(url='https://www.аррӏе.com/') +print(m2.url) +#> https://www.xn--80ak6aa92e.com/ +m3 = MyModel(url='https://www.example.珠宝/') +print(m3.url) +#> https://www.example.xn--pbt977c/ +``` + + +!!! warning "Underscores in Hostnames" + In Pydantic, underscores are allowed in all parts of a domain except the TLD. + Technically this might be wrong - in theory the hostname cannot have underscores, but subdomains can. + + To explain this; consider the following two cases: + + - `exam_ple.co.uk`: the hostname is `exam_ple`, which should not be allowed since it contains an underscore. + - `foo_bar.example.com` the hostname is `example`, which should be allowed since the underscore is in the subdomain. + + Without having an exhaustive list of TLDs, it would be impossible to differentiate between these two. Therefore + underscores are allowed, but you can always do further validation in a validator if desired. + + Also, Chrome, Firefox, and Safari all currently accept `http://exam_ple.com` as a URL, so we're in good + (or at least big) company. +""" +AnyWebsocketUrl = Annotated[Url, UrlConstraints(allowed_schemes=['ws', 'wss'])] +"""A type that will accept any ws or wss URL. + +* TLD not required +* Host required +""" +WebsocketUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=['ws', 'wss'])] +"""A type that will accept any ws or wss URL. + +* TLD required +* Host required +* Max length 2083 +""" +FileUrl = Annotated[Url, UrlConstraints(allowed_schemes=['file'])] +"""A type that will accept any file URL. + +* Host not required +""" +FtpUrl = Annotated[Url, UrlConstraints(allowed_schemes=['ftp'])] +"""A type that will accept ftp URL. + +* TLD not required +* Host required +""" +PostgresDsn = Annotated[ + MultiHostUrl, + UrlConstraints( + host_required=True, + allowed_schemes=[ + 'postgres', + 'postgresql', + 'postgresql+asyncpg', + 'postgresql+pg8000', + 'postgresql+psycopg', + 'postgresql+psycopg2', + 'postgresql+psycopg2cffi', + 'postgresql+py-postgresql', + 'postgresql+pygresql', + ], + ), +] +"""A type that will accept any Postgres DSN. + +* User info required +* TLD not required +* Host required +* Supports multiple hosts + +If further validation is required, these properties can be used by validators to enforce specific behaviour: + +```py +from pydantic import ( + BaseModel, + HttpUrl, + PostgresDsn, + ValidationError, + field_validator, +) + +class MyModel(BaseModel): + url: HttpUrl + +m = MyModel(url='http://www.example.com') + +# the repr() method for a url will display all properties of the url +print(repr(m.url)) +#> Url('http://www.example.com/') +print(m.url.scheme) +#> http +print(m.url.host) +#> www.example.com +print(m.url.port) +#> 80 + +class MyDatabaseModel(BaseModel): + db: PostgresDsn + + @field_validator('db') + def check_db_name(cls, v): + assert v.path and len(v.path) > 1, 'database must be provided' + return v + +m = MyDatabaseModel(db='postgres://user:pass@localhost:5432/foobar') +print(m.db) +#> postgres://user:pass@localhost:5432/foobar + +try: + MyDatabaseModel(db='postgres://user:pass@localhost:5432') +except ValidationError as e: + print(e) + ''' + 1 validation error for MyDatabaseModel + db + Assertion failed, database must be provided + assert (None) + + where None = MultiHostUrl('postgres://user:pass@localhost:5432').path [type=assertion_error, input_value='postgres://user:pass@localhost:5432', input_type=str] + ''' +``` +""" + +CockroachDsn = Annotated[ + Url, + UrlConstraints( + host_required=True, + allowed_schemes=[ + 'cockroachdb', + 'cockroachdb+psycopg2', + 'cockroachdb+asyncpg', + ], + ), +] +"""A type that will accept any Cockroach DSN. + +* User info required +* TLD not required +* Host required +""" +AmqpDsn = Annotated[Url, UrlConstraints(allowed_schemes=['amqp', 'amqps'])] +"""A type that will accept any AMQP DSN. + +* User info required +* TLD not required +* Host required +""" +RedisDsn = Annotated[ + Url, + UrlConstraints(allowed_schemes=['redis', 'rediss'], default_host='localhost', default_port=6379, default_path='/0'), +] +"""A type that will accept any Redis DSN. + +* User info required +* TLD not required +* Host required (e.g., `rediss://:pass@localhost`) +""" +MongoDsn = Annotated[MultiHostUrl, UrlConstraints(allowed_schemes=['mongodb', 'mongodb+srv'], default_port=27017)] +"""A type that will accept any MongoDB DSN. + +* User info not required +* Database name not required +* Port not required +* User info may be passed without user part (e.g., `mongodb://mongodb0.example.com:27017`). +""" +KafkaDsn = Annotated[Url, UrlConstraints(allowed_schemes=['kafka'], default_host='localhost', default_port=9092)] +"""A type that will accept any Kafka DSN. + +* User info required +* TLD not required +* Host required +""" +NatsDsn = Annotated[ + MultiHostUrl, UrlConstraints(allowed_schemes=['nats', 'tls', 'ws'], default_host='localhost', default_port=4222) +] +"""A type that will accept any NATS DSN. + +NATS is a connective technology built for the ever increasingly hyper-connected world. +It is a single technology that enables applications to securely communicate across +any combination of cloud vendors, on-premise, edge, web and mobile, and devices. +More: https://nats.io +""" +MySQLDsn = Annotated[ + Url, + UrlConstraints( + allowed_schemes=[ + 'mysql', + 'mysql+mysqlconnector', + 'mysql+aiomysql', + 'mysql+asyncmy', + 'mysql+mysqldb', + 'mysql+pymysql', + 'mysql+cymysql', + 'mysql+pyodbc', + ], + default_port=3306, + ), +] +"""A type that will accept any MySQL DSN. + +* User info required +* TLD not required +* Host required +""" +MariaDBDsn = Annotated[ + Url, + UrlConstraints( + allowed_schemes=['mariadb', 'mariadb+mariadbconnector', 'mariadb+pymysql'], + default_port=3306, + ), +] +"""A type that will accept any MariaDB DSN. + +* User info required +* TLD not required +* Host required +""" +ClickHouseDsn = Annotated[ + Url, + UrlConstraints( + allowed_schemes=['clickhouse+native', 'clickhouse+asynch'], + default_host='localhost', + default_port=9000, + ), +] +"""A type that will accept any ClickHouse DSN. + +* User info required +* TLD not required +* Host required +""" + + +def import_email_validator() -> None: + global email_validator + try: + import email_validator + except ImportError as e: + raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e + if not version('email-validator').partition('.')[0] == '2': + raise ImportError('email-validator version >= 2.0 required, run pip install -U email-validator') + + +if TYPE_CHECKING: + EmailStr = Annotated[str, ...] +else: + + class EmailStr: + """ + Info: + To use this type, you need to install the optional + [`email-validator`](https://github.com/JoshData/python-email-validator) package: + + ```bash + pip install email-validator + ``` + + Validate email addresses. + + ```py + from pydantic import BaseModel, EmailStr + + class Model(BaseModel): + email: EmailStr + + print(Model(email='contact@mail.com')) + #> email='contact@mail.com' + ``` + """ # noqa: D212 + + @classmethod + def __get_pydantic_core_schema__( + cls, + _source: type[Any], + _handler: GetCoreSchemaHandler, + ) -> core_schema.CoreSchema: + import_email_validator() + return core_schema.no_info_after_validator_function(cls._validate, core_schema.str_schema()) + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = handler(core_schema) + field_schema.update(type='string', format='email') + return field_schema + + @classmethod + def _validate(cls, input_value: str, /) -> str: + return validate_email(input_value)[1] + + +class NameEmail(_repr.Representation): + """ + Info: + To use this type, you need to install the optional + [`email-validator`](https://github.com/JoshData/python-email-validator) package: + + ```bash + pip install email-validator + ``` + + Validate a name and email address combination, as specified by + [RFC 5322](https://datatracker.ietf.org/doc/html/rfc5322#section-3.4). + + The `NameEmail` has two properties: `name` and `email`. + In case the `name` is not provided, it's inferred from the email address. + + ```py + from pydantic import BaseModel, NameEmail + + class User(BaseModel): + email: NameEmail + + user = User(email='Fred Bloggs ') + print(user.email) + #> Fred Bloggs + print(user.email.name) + #> Fred Bloggs + + user = User(email='fred.bloggs@example.com') + print(user.email) + #> fred.bloggs + print(user.email.name) + #> fred.bloggs + ``` + """ # noqa: D212 + + __slots__ = 'name', 'email' + + def __init__(self, name: str, email: str): + self.name = name + self.email = email + + def __eq__(self, other: Any) -> bool: + return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email) + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = handler(core_schema) + field_schema.update(type='string', format='name-email') + return field_schema + + @classmethod + def __get_pydantic_core_schema__( + cls, + _source: type[Any], + _handler: GetCoreSchemaHandler, + ) -> core_schema.CoreSchema: + import_email_validator() + + return core_schema.no_info_after_validator_function( + cls._validate, + core_schema.json_or_python_schema( + json_schema=core_schema.str_schema(), + python_schema=core_schema.union_schema( + [core_schema.is_instance_schema(cls), core_schema.str_schema()], + custom_error_type='name_email_type', + custom_error_message='Input is not a valid NameEmail', + ), + serialization=core_schema.to_string_ser_schema(), + ), + ) + + @classmethod + def _validate(cls, input_value: Self | str, /) -> Self: + if isinstance(input_value, cls): + return input_value + else: + name, email = validate_email(input_value) + return cls(name, email) + + def __str__(self) -> str: + if '@' in self.name: + return f'"{self.name}" <{self.email}>' + + return f'{self.name} <{self.email}>' + + +class IPvAnyAddress: + """Validate an IPv4 or IPv6 address. + + ```py + from pydantic import BaseModel + from pydantic.networks import IPvAnyAddress + + class IpModel(BaseModel): + ip: IPvAnyAddress + + print(IpModel(ip='127.0.0.1')) + #> ip=IPv4Address('127.0.0.1') + + try: + IpModel(ip='http://www.example.com') + except ValueError as e: + print(e.errors()) + ''' + [ + { + 'type': 'ip_any_address', + 'loc': ('ip',), + 'msg': 'value is not a valid IPv4 or IPv6 address', + 'input': 'http://www.example.com', + } + ] + ''' + ``` + """ + + __slots__ = () + + def __new__(cls, value: Any) -> IPv4Address | IPv6Address: + """Validate an IPv4 or IPv6 address.""" + try: + return IPv4Address(value) + except ValueError: + pass + + try: + return IPv6Address(value) + except ValueError: + raise PydanticCustomError('ip_any_address', 'value is not a valid IPv4 or IPv6 address') + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = {} + field_schema.update(type='string', format='ipvanyaddress') + return field_schema + + @classmethod + def __get_pydantic_core_schema__( + cls, + _source: type[Any], + _handler: GetCoreSchemaHandler, + ) -> core_schema.CoreSchema: + return core_schema.no_info_plain_validator_function( + cls._validate, serialization=core_schema.to_string_ser_schema() + ) + + @classmethod + def _validate(cls, input_value: Any, /) -> IPv4Address | IPv6Address: + return cls(input_value) # type: ignore[return-value] + + +class IPvAnyInterface: + """Validate an IPv4 or IPv6 interface.""" + + __slots__ = () + + def __new__(cls, value: NetworkType) -> IPv4Interface | IPv6Interface: + """Validate an IPv4 or IPv6 interface.""" + try: + return IPv4Interface(value) + except ValueError: + pass + + try: + return IPv6Interface(value) + except ValueError: + raise PydanticCustomError('ip_any_interface', 'value is not a valid IPv4 or IPv6 interface') + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = {} + field_schema.update(type='string', format='ipvanyinterface') + return field_schema + + @classmethod + def __get_pydantic_core_schema__( + cls, + _source: type[Any], + _handler: GetCoreSchemaHandler, + ) -> core_schema.CoreSchema: + return core_schema.no_info_plain_validator_function( + cls._validate, serialization=core_schema.to_string_ser_schema() + ) + + @classmethod + def _validate(cls, input_value: NetworkType, /) -> IPv4Interface | IPv6Interface: + return cls(input_value) # type: ignore[return-value] + + +class IPvAnyNetwork: + """Validate an IPv4 or IPv6 network.""" + + __slots__ = () + + def __new__(cls, value: NetworkType) -> IPv4Network | IPv6Network: + """Validate an IPv4 or IPv6 network.""" + # Assume IP Network is defined with a default value for `strict` argument. + # Define your own class if you want to specify network address check strictness. + try: + return IPv4Network(value) + except ValueError: + pass + + try: + return IPv6Network(value) + except ValueError: + raise PydanticCustomError('ip_any_network', 'value is not a valid IPv4 or IPv6 network') + + @classmethod + def __get_pydantic_json_schema__( + cls, core_schema: core_schema.CoreSchema, handler: _schema_generation_shared.GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = {} + field_schema.update(type='string', format='ipvanynetwork') + return field_schema + + @classmethod + def __get_pydantic_core_schema__( + cls, + _source: type[Any], + _handler: GetCoreSchemaHandler, + ) -> core_schema.CoreSchema: + return core_schema.no_info_plain_validator_function( + cls._validate, serialization=core_schema.to_string_ser_schema() + ) + + @classmethod + def _validate(cls, input_value: NetworkType, /) -> IPv4Network | IPv6Network: + return cls(input_value) # type: ignore[return-value] + + +def _build_pretty_email_regex() -> re.Pattern[str]: + name_chars = r'[\w!#$%&\'*+\-/=?^_`{|}~]' + unquoted_name_group = rf'((?:{name_chars}+\s+)*{name_chars}+)' + quoted_name_group = r'"((?:[^"]|\")+)"' + email_group = r'<\s*(.+)\s*>' + return re.compile(rf'\s*(?:{unquoted_name_group}|{quoted_name_group})?\s*{email_group}\s*') + + +pretty_email_regex = _build_pretty_email_regex() + +MAX_EMAIL_LENGTH = 2048 +"""Maximum length for an email. +A somewhat arbitrary but very generous number compared to what is allowed by most implementations. +""" + + +def validate_email(value: str) -> tuple[str, str]: + """Email address validation using [email-validator](https://pypi.org/project/email-validator/). + + Note: + Note that: + + * Raw IP address (literal) domain parts are not allowed. + * `"John Doe "` style "pretty" email addresses are processed. + * Spaces are striped from the beginning and end of addresses, but no error is raised. + """ + if email_validator is None: + import_email_validator() + + if len(value) > MAX_EMAIL_LENGTH: + raise PydanticCustomError( + 'value_error', + 'value is not a valid email address: {reason}', + {'reason': f'Length must not exceed {MAX_EMAIL_LENGTH} characters'}, + ) + + m = pretty_email_regex.fullmatch(value) + name: str | None = None + if m: + unquoted_name, quoted_name, value = m.groups() + name = unquoted_name or quoted_name + + email = value.strip() + + try: + parts = email_validator.validate_email(email, check_deliverability=False) + except email_validator.EmailNotValidError as e: + raise PydanticCustomError( + 'value_error', 'value is not a valid email address: {reason}', {'reason': str(e.args[0])} + ) from e + + email = parts.normalized + assert email is not None + name = name or parts.local_part + return name, email + + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/parse.py b/venv/lib/python3.10/site-packages/pydantic/parse.py new file mode 100644 index 0000000000000000000000000000000000000000..ceee6342ba566197574e32601c44a3111a6caa7a --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/parse.py @@ -0,0 +1,4 @@ +"""The `parse` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/py.typed b/venv/lib/python3.10/site-packages/pydantic/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/pydantic/root_model.py b/venv/lib/python3.10/site-packages/pydantic/root_model.py new file mode 100644 index 0000000000000000000000000000000000000000..c579642b85df31221f87246e5c7132596ba8a73b --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/root_model.py @@ -0,0 +1,155 @@ +"""RootModel class and type definitions.""" + +from __future__ import annotations as _annotations + +import typing +from copy import copy, deepcopy + +from pydantic_core import PydanticUndefined + +from . import PydanticUserError +from ._internal import _model_construction, _repr +from .main import BaseModel, _object_setattr + +if typing.TYPE_CHECKING: + from typing import Any + + from typing_extensions import Literal, dataclass_transform + + from .fields import Field as PydanticModelField + + # dataclass_transform could be applied to RootModel directly, but `ModelMetaclass`'s dataclass_transform + # takes priority (at least with pyright). We trick type checkers into thinking we apply dataclass_transform + # on a new metaclass. + @dataclass_transform(kw_only_default=False, field_specifiers=(PydanticModelField,)) + class _RootModelMetaclass(_model_construction.ModelMetaclass): + ... +else: + _RootModelMetaclass = _model_construction.ModelMetaclass + +__all__ = ('RootModel',) + +Model = typing.TypeVar('Model', bound='BaseModel') +RootModelRootType = typing.TypeVar('RootModelRootType') + + +class RootModel(BaseModel, typing.Generic[RootModelRootType], metaclass=_RootModelMetaclass): + """Usage docs: https://docs.pydantic.dev/2.7/concepts/models/#rootmodel-and-custom-root-types + + A Pydantic `BaseModel` for the root object of the model. + + Attributes: + root: The root object of the model. + __pydantic_root_model__: Whether the model is a RootModel. + __pydantic_private__: Private fields in the model. + __pydantic_extra__: Extra fields in the model. + + """ + + __pydantic_root_model__ = True + __pydantic_private__ = None + __pydantic_extra__ = None + + root: RootModelRootType + + def __init_subclass__(cls, **kwargs): + extra = cls.model_config.get('extra') + if extra is not None: + raise PydanticUserError( + "`RootModel` does not support setting `model_config['extra']`", code='root-model-extra' + ) + super().__init_subclass__(**kwargs) + + def __init__(self, /, root: RootModelRootType = PydanticUndefined, **data) -> None: # type: ignore + __tracebackhide__ = True + if data: + if root is not PydanticUndefined: + raise ValueError( + '"RootModel.__init__" accepts either a single positional argument or arbitrary keyword arguments' + ) + root = data # type: ignore + self.__pydantic_validator__.validate_python(root, self_instance=self) + + __init__.__pydantic_base_init__ = True # pyright: ignore[reportFunctionMemberAccess] + + @classmethod + def model_construct(cls: type[Model], root: RootModelRootType, _fields_set: set[str] | None = None) -> Model: # type: ignore + """Create a new model using the provided root object and update fields set. + + Args: + root: The root object of the model. + _fields_set: The set of fields to be updated. + + Returns: + The new model. + + Raises: + NotImplemented: If the model is not a subclass of `RootModel`. + """ + return super().model_construct(root=root, _fields_set=_fields_set) + + def __getstate__(self) -> dict[Any, Any]: + return { + '__dict__': self.__dict__, + '__pydantic_fields_set__': self.__pydantic_fields_set__, + } + + def __setstate__(self, state: dict[Any, Any]) -> None: + _object_setattr(self, '__pydantic_fields_set__', state['__pydantic_fields_set__']) + _object_setattr(self, '__dict__', state['__dict__']) + + def __copy__(self: Model) -> Model: + """Returns a shallow copy of the model.""" + cls = type(self) + m = cls.__new__(cls) + _object_setattr(m, '__dict__', copy(self.__dict__)) + _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) + return m + + def __deepcopy__(self: Model, memo: dict[int, Any] | None = None) -> Model: + """Returns a deep copy of the model.""" + cls = type(self) + m = cls.__new__(cls) + _object_setattr(m, '__dict__', deepcopy(self.__dict__, memo=memo)) + # This next line doesn't need a deepcopy because __pydantic_fields_set__ is a set[str], + # and attempting a deepcopy would be marginally slower. + _object_setattr(m, '__pydantic_fields_set__', copy(self.__pydantic_fields_set__)) + return m + + if typing.TYPE_CHECKING: + + def model_dump( # type: ignore + self, + *, + mode: Literal['json', 'python'] | str = 'python', + include: Any = None, + exclude: Any = None, + context: dict[str, Any] | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal['none', 'warn', 'error'] = True, + serialize_as_any: bool = False, + ) -> Any: + """This method is included just to get a more accurate return type for type checkers. + It is included in this `if TYPE_CHECKING:` block since no override is actually necessary. + + See the documentation of `BaseModel.model_dump` for more details about the arguments. + + Generally, this method will have a return type of `RootModelRootType`, assuming that `RootModelRootType` is + not a `BaseModel` subclass. If `RootModelRootType` is a `BaseModel` subclass, then the return + type will likely be `dict[str, Any]`, as `model_dump` calls are recursive. The return type could + even be something different, in the case of a custom serializer. + Thus, `Any` is used here to catch all of these cases. + """ + ... + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, RootModel): + return NotImplemented + return self.model_fields['root'].annotation == other.model_fields['root'].annotation and super().__eq__(other) + + def __repr_args__(self) -> _repr.ReprArgs: + yield 'root', self.root diff --git a/venv/lib/python3.10/site-packages/pydantic/schema.py b/venv/lib/python3.10/site-packages/pydantic/schema.py new file mode 100644 index 0000000000000000000000000000000000000000..e290aed9ac770342ed89172ea3a6680a1a764b69 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/schema.py @@ -0,0 +1,4 @@ +"""The `schema` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/tools.py b/venv/lib/python3.10/site-packages/pydantic/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..8e317c9257cb8c9bdac22572db1c9bfe094b4131 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/tools.py @@ -0,0 +1,4 @@ +"""The `tools` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/type_adapter.py b/venv/lib/python3.10/site-packages/pydantic/type_adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..e055a380ad369518741d4910c7c6fd5fda3437f2 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/type_adapter.py @@ -0,0 +1,473 @@ +"""Type adapter specification.""" +from __future__ import annotations as _annotations + +import sys +from dataclasses import is_dataclass +from typing import TYPE_CHECKING, Any, Dict, Generic, Iterable, Set, TypeVar, Union, cast, final, overload + +from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator, Some +from typing_extensions import Literal, get_args, is_typeddict + +from pydantic.errors import PydanticUserError +from pydantic.main import BaseModel + +from ._internal import _config, _generate_schema, _typing_extra +from .config import ConfigDict +from .json_schema import ( + DEFAULT_REF_TEMPLATE, + GenerateJsonSchema, + JsonSchemaKeyT, + JsonSchemaMode, + JsonSchemaValue, +) +from .plugin._schema_validator import create_schema_validator + +T = TypeVar('T') + + +if TYPE_CHECKING: + # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope + IncEx = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any]] + + +def _get_schema(type_: Any, config_wrapper: _config.ConfigWrapper, parent_depth: int) -> CoreSchema: + """`BaseModel` uses its own `__module__` to find out where it was defined + and then looks for symbols to resolve forward references in those globals. + On the other hand this function can be called with arbitrary objects, + including type aliases, where `__module__` (always `typing.py`) is not useful. + So instead we look at the globals in our parent stack frame. + + This works for the case where this function is called in a module that + has the target of forward references in its scope, but + does not always work for more complex cases. + + For example, take the following: + + a.py + ```python + from typing import Dict, List + + IntList = List[int] + OuterDict = Dict[str, 'IntList'] + ``` + + b.py + ```python test="skip" + from a import OuterDict + + from pydantic import TypeAdapter + + IntList = int # replaces the symbol the forward reference is looking for + v = TypeAdapter(OuterDict) + v({'x': 1}) # should fail but doesn't + ``` + + If `OuterDict` were a `BaseModel`, this would work because it would resolve + the forward reference within the `a.py` namespace. + But `TypeAdapter(OuterDict)` can't determine what module `OuterDict` came from. + + In other words, the assumption that _all_ forward references exist in the + module we are being called from is not technically always true. + Although most of the time it is and it works fine for recursive models and such, + `BaseModel`'s behavior isn't perfect either and _can_ break in similar ways, + so there is no right or wrong between the two. + + But at the very least this behavior is _subtly_ different from `BaseModel`'s. + """ + local_ns = _typing_extra.parent_frame_namespace(parent_depth=parent_depth) + global_ns = sys._getframe(max(parent_depth - 1, 1)).f_globals.copy() + global_ns.update(local_ns or {}) + gen = _generate_schema.GenerateSchema(config_wrapper, types_namespace=global_ns, typevars_map={}) + schema = gen.generate_schema(type_) + schema = gen.clean_schema(schema) + return schema + + +def _getattr_no_parents(obj: Any, attribute: str) -> Any: + """Returns the attribute value without attempting to look up attributes from parent types.""" + if hasattr(obj, '__dict__'): + try: + return obj.__dict__[attribute] + except KeyError: + pass + + slots = getattr(obj, '__slots__', None) + if slots is not None and attribute in slots: + return getattr(obj, attribute) + else: + raise AttributeError(attribute) + + +def _type_has_config(type_: Any) -> bool: + """Returns whether the type has config.""" + try: + return issubclass(type_, BaseModel) or is_dataclass(type_) or is_typeddict(type_) + except TypeError: + # type is not a class + return False + + +@final +class TypeAdapter(Generic[T]): + """Usage docs: https://docs.pydantic.dev/2.7/concepts/type_adapter/ + + Type adapters provide a flexible way to perform validation and serialization based on a Python type. + + A `TypeAdapter` instance exposes some of the functionality from `BaseModel` instance methods + for types that do not have such methods (such as dataclasses, primitive types, and more). + + **Note:** `TypeAdapter` instances are not types, and cannot be used as type annotations for fields. + + Attributes: + core_schema: The core schema for the type. + validator (SchemaValidator): The schema validator for the type. + serializer: The schema serializer for the type. + """ + + @overload + def __init__( + self: TypeAdapter[T], + type: type[T], + *, + config: ConfigDict | None = ..., + _parent_depth: int = ..., + module: str | None = ..., + ) -> None: + ... + + # This second overload is for unsupported special forms (such as Annotated, Union, etc.) + # Currently there is no way to type this correctly + # See https://github.com/python/typing/pull/1618 + @overload + def __init__( + self: TypeAdapter[Any], + type: Any, + *, + config: ConfigDict | None = ..., + _parent_depth: int = ..., + module: str | None = ..., + ) -> None: + ... + + def __init__( + self, + type: Any, + *, + config: ConfigDict | None = None, + _parent_depth: int = 2, + module: str | None = None, + ) -> None: + """Initializes the TypeAdapter object. + + Args: + type: The type associated with the `TypeAdapter`. + config: Configuration for the `TypeAdapter`, should be a dictionary conforming to [`ConfigDict`][pydantic.config.ConfigDict]. + _parent_depth: depth at which to search the parent namespace to construct the local namespace. + module: The module that passes to plugin if provided. + + !!! note + You cannot use the `config` argument when instantiating a `TypeAdapter` if the type you're using has its own + config that cannot be overridden (ex: `BaseModel`, `TypedDict`, and `dataclass`). A + [`type-adapter-config-unused`](../errors/usage_errors.md#type-adapter-config-unused) error will be raised in this case. + + !!! note + The `_parent_depth` argument is named with an underscore to suggest its private nature and discourage use. + It may be deprecated in a minor version, so we only recommend using it if you're + comfortable with potential change in behavior / support. + + ??? tip "Compatibility with `mypy`" + Depending on the type used, `mypy` might raise an error when instantiating a `TypeAdapter`. As a workaround, you can explicitly + annotate your variable: + + ```py + from typing import Union + + from pydantic import TypeAdapter + + ta: TypeAdapter[Union[str, int]] = TypeAdapter(Union[str, int]) # type: ignore[arg-type] + ``` + + Returns: + A type adapter configured for the specified `type`. + """ + type_is_annotated: bool = _typing_extra.is_annotated(type) + annotated_type: Any = get_args(type)[0] if type_is_annotated else None + type_has_config: bool = _type_has_config(annotated_type if type_is_annotated else type) + + if type_has_config and config is not None: + raise PydanticUserError( + 'Cannot use `config` when the type is a BaseModel, dataclass or TypedDict.' + ' These types can have their own config and setting the config via the `config`' + ' parameter to TypeAdapter will not override it, thus the `config` you passed to' + ' TypeAdapter becomes meaningless, which is probably not what you want.', + code='type-adapter-config-unused', + ) + + config_wrapper = _config.ConfigWrapper(config) + + core_schema: CoreSchema + try: + core_schema = _getattr_no_parents(type, '__pydantic_core_schema__') + except AttributeError: + core_schema = _get_schema(type, config_wrapper, parent_depth=_parent_depth + 1) + + core_config = config_wrapper.core_config(None) + validator: SchemaValidator + try: + validator = _getattr_no_parents(type, '__pydantic_validator__') + except AttributeError: + if module is None: + f = sys._getframe(1) + module = cast(str, f.f_globals.get('__name__', '')) + validator = create_schema_validator( + core_schema, type, module, str(type), 'TypeAdapter', core_config, config_wrapper.plugin_settings + ) # type: ignore + + serializer: SchemaSerializer + try: + serializer = _getattr_no_parents(type, '__pydantic_serializer__') + except AttributeError: + serializer = SchemaSerializer(core_schema, core_config) + + self.core_schema = core_schema + self.validator = validator + self.serializer = serializer + + def validate_python( + self, + object: Any, + /, + *, + strict: bool | None = None, + from_attributes: bool | None = None, + context: dict[str, Any] | None = None, + ) -> T: + """Validate a Python object against the model. + + Args: + object: The Python object to validate against the model. + strict: Whether to strictly check types. + from_attributes: Whether to extract data from object attributes. + context: Additional context to pass to the validator. + + !!! note + When using `TypeAdapter` with a Pydantic `dataclass`, the use of the `from_attributes` + argument is not supported. + + Returns: + The validated object. + """ + return self.validator.validate_python(object, strict=strict, from_attributes=from_attributes, context=context) + + def validate_json( + self, data: str | bytes, /, *, strict: bool | None = None, context: dict[str, Any] | None = None + ) -> T: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-parsing + + Validate a JSON string or bytes against the model. + + Args: + data: The JSON data to validate against the model. + strict: Whether to strictly check types. + context: Additional context to use during validation. + + Returns: + The validated object. + """ + return self.validator.validate_json(data, strict=strict, context=context) + + def validate_strings(self, obj: Any, /, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> T: + """Validate object contains string data against the model. + + Args: + obj: The object contains string data to validate. + strict: Whether to strictly check types. + context: Additional context to use during validation. + + Returns: + The validated object. + """ + return self.validator.validate_strings(obj, strict=strict, context=context) + + def get_default_value(self, *, strict: bool | None = None, context: dict[str, Any] | None = None) -> Some[T] | None: + """Get the default value for the wrapped type. + + Args: + strict: Whether to strictly check types. + context: Additional context to pass to the validator. + + Returns: + The default value wrapped in a `Some` if there is one or None if not. + """ + return self.validator.get_default_value(strict=strict, context=context) + + def dump_python( + self, + instance: T, + /, + *, + mode: Literal['json', 'python'] = 'python', + include: IncEx | None = None, + exclude: IncEx | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal['none', 'warn', 'error'] = True, + serialize_as_any: bool = False, + ) -> Any: + """Dump an instance of the adapted type to a Python object. + + Args: + instance: The Python object to serialize. + mode: The output format. + include: Fields to include in the output. + exclude: Fields to exclude from the output. + by_alias: Whether to use alias names for field names. + exclude_unset: Whether to exclude unset fields. + exclude_defaults: Whether to exclude fields with default values. + exclude_none: Whether to exclude fields with None values. + round_trip: Whether to output the serialized data in a way that is compatible with deserialization. + warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, + "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. + serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. + + Returns: + The serialized object. + """ + return self.serializer.to_python( + instance, + mode=mode, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + round_trip=round_trip, + warnings=warnings, + serialize_as_any=serialize_as_any, + ) + + def dump_json( + self, + instance: T, + /, + *, + indent: int | None = None, + include: IncEx | None = None, + exclude: IncEx | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal['none', 'warn', 'error'] = True, + serialize_as_any: bool = False, + ) -> bytes: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/json/#json-serialization + + Serialize an instance of the adapted type to JSON. + + Args: + instance: The instance to be serialized. + indent: Number of spaces for JSON indentation. + include: Fields to include. + exclude: Fields to exclude. + by_alias: Whether to use alias names for field names. + exclude_unset: Whether to exclude unset fields. + exclude_defaults: Whether to exclude fields with default values. + exclude_none: Whether to exclude fields with a value of `None`. + round_trip: Whether to serialize and deserialize the instance to ensure round-tripping. + warnings: How to handle serialization errors. False/"none" ignores them, True/"warn" logs errors, + "error" raises a [`PydanticSerializationError`][pydantic_core.PydanticSerializationError]. + serialize_as_any: Whether to serialize fields with duck-typing serialization behavior. + + Returns: + The JSON representation of the given instance as bytes. + """ + return self.serializer.to_json( + instance, + indent=indent, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + round_trip=round_trip, + warnings=warnings, + serialize_as_any=serialize_as_any, + ) + + def json_schema( + self, + *, + by_alias: bool = True, + ref_template: str = DEFAULT_REF_TEMPLATE, + schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, + mode: JsonSchemaMode = 'validation', + ) -> dict[str, Any]: + """Generate a JSON schema for the adapted type. + + Args: + by_alias: Whether to use alias names for field names. + ref_template: The format string used for generating $ref strings. + schema_generator: The generator class used for creating the schema. + mode: The mode to use for schema generation. + + Returns: + The JSON schema for the model as a dictionary. + """ + schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) + return schema_generator_instance.generate(self.core_schema, mode=mode) + + @staticmethod + def json_schemas( + inputs: Iterable[tuple[JsonSchemaKeyT, JsonSchemaMode, TypeAdapter[Any]]], + /, + *, + by_alias: bool = True, + title: str | None = None, + description: str | None = None, + ref_template: str = DEFAULT_REF_TEMPLATE, + schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, + ) -> tuple[dict[tuple[JsonSchemaKeyT, JsonSchemaMode], JsonSchemaValue], JsonSchemaValue]: + """Generate a JSON schema including definitions from multiple type adapters. + + Args: + inputs: Inputs to schema generation. The first two items will form the keys of the (first) + output mapping; the type adapters will provide the core schemas that get converted into + definitions in the output JSON schema. + by_alias: Whether to use alias names. + title: The title for the schema. + description: The description for the schema. + ref_template: The format string used for generating $ref strings. + schema_generator: The generator class used for creating the schema. + + Returns: + A tuple where: + + - The first element is a dictionary whose keys are tuples of JSON schema key type and JSON mode, and + whose values are the JSON schema corresponding to that pair of inputs. (These schemas may have + JsonRef references to definitions that are defined in the second returned element.) + - The second element is a JSON schema containing all definitions referenced in the first returned + element, along with the optional title and description keys. + + """ + schema_generator_instance = schema_generator(by_alias=by_alias, ref_template=ref_template) + + inputs_ = [(key, mode, adapter.core_schema) for key, mode, adapter in inputs] + + json_schemas_map, definitions = schema_generator_instance.generate_definitions(inputs_) + + json_schema: dict[str, Any] = {} + if definitions: + json_schema['$defs'] = definitions + if title: + json_schema['title'] = title + if description: + json_schema['description'] = description + + return json_schemas_map, json_schema diff --git a/venv/lib/python3.10/site-packages/pydantic/types.py b/venv/lib/python3.10/site-packages/pydantic/types.py new file mode 100644 index 0000000000000000000000000000000000000000..f1025a5102a71dcc63cf0d50fbfde99c224194d4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/types.py @@ -0,0 +1,3004 @@ +"""The types module contains custom types used by pydantic.""" +from __future__ import annotations as _annotations + +import base64 +import dataclasses as _dataclasses +import re +from datetime import date, datetime +from decimal import Decimal +from enum import Enum +from pathlib import Path +from types import ModuleType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Dict, + FrozenSet, + Generic, + Hashable, + Iterator, + List, + Pattern, + Set, + TypeVar, + Union, + cast, + get_args, + get_origin, +) +from uuid import UUID + +import annotated_types +from annotated_types import BaseMetadata, MaxLen, MinLen +from pydantic_core import CoreSchema, PydanticCustomError, core_schema +from typing_extensions import Annotated, Literal, Protocol, TypeAlias, TypeAliasType, deprecated + +from ._internal import ( + _core_utils, + _fields, + _internal_dataclass, + _typing_extra, + _utils, + _validators, +) +from ._migration import getattr_migration +from .annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler +from .errors import PydanticUserError +from .json_schema import JsonSchemaValue +from .warnings import PydanticDeprecatedSince20 + +__all__ = ( + 'Strict', + 'StrictStr', + 'conbytes', + 'conlist', + 'conset', + 'confrozenset', + 'constr', + 'ImportString', + 'conint', + 'PositiveInt', + 'NegativeInt', + 'NonNegativeInt', + 'NonPositiveInt', + 'confloat', + 'PositiveFloat', + 'NegativeFloat', + 'NonNegativeFloat', + 'NonPositiveFloat', + 'FiniteFloat', + 'condecimal', + 'UUID1', + 'UUID3', + 'UUID4', + 'UUID5', + 'FilePath', + 'DirectoryPath', + 'NewPath', + 'Json', + 'Secret', + 'SecretStr', + 'SecretBytes', + 'StrictBool', + 'StrictBytes', + 'StrictInt', + 'StrictFloat', + 'PaymentCardNumber', + 'ByteSize', + 'PastDate', + 'FutureDate', + 'PastDatetime', + 'FutureDatetime', + 'condate', + 'AwareDatetime', + 'NaiveDatetime', + 'AllowInfNan', + 'EncoderProtocol', + 'EncodedBytes', + 'EncodedStr', + 'Base64Encoder', + 'Base64Bytes', + 'Base64Str', + 'Base64UrlBytes', + 'Base64UrlStr', + 'GetPydanticSchema', + 'StringConstraints', + 'Tag', + 'Discriminator', + 'JsonValue', + 'OnErrorOmit', +) + + +T = TypeVar('T') + + +@_dataclasses.dataclass +class Strict(_fields.PydanticMetadata, BaseMetadata): + """Usage docs: https://docs.pydantic.dev/2.7/concepts/strict_mode/#strict-mode-with-annotated-strict + + A field metadata class to indicate that a field should be validated in strict mode. + + Attributes: + strict: Whether to validate the field in strict mode. + + Example: + ```python + from typing_extensions import Annotated + + from pydantic.types import Strict + + StrictBool = Annotated[bool, Strict()] + ``` + """ + + strict: bool = True + + def __hash__(self) -> int: + return hash(self.strict) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +StrictBool = Annotated[bool, Strict()] +"""A boolean that must be either ``True`` or ``False``.""" + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +def conint( + *, + strict: bool | None = None, + gt: int | None = None, + ge: int | None = None, + lt: int | None = None, + le: int | None = None, + multiple_of: int | None = None, +) -> type[int]: + """ + !!! warning "Discouraged" + This function is **discouraged** in favor of using + [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with + [`Field`][pydantic.fields.Field] instead. + + This function will be **deprecated** in Pydantic 3.0. + + The reason is that `conint` returns a type, which doesn't play well with static analysis tools. + + === ":x: Don't do this" + ```py + from pydantic import BaseModel, conint + + class Foo(BaseModel): + bar: conint(strict=True, gt=0) + ``` + + === ":white_check_mark: Do this" + ```py + from typing_extensions import Annotated + + from pydantic import BaseModel, Field + + class Foo(BaseModel): + bar: Annotated[int, Field(strict=True, gt=0)] + ``` + + A wrapper around `int` that allows for additional constraints. + + Args: + strict: Whether to validate the integer in strict mode. Defaults to `None`. + gt: The value must be greater than this. + ge: The value must be greater than or equal to this. + lt: The value must be less than this. + le: The value must be less than or equal to this. + multiple_of: The value must be a multiple of this. + + Returns: + The wrapped integer type. + + ```py + from pydantic import BaseModel, ValidationError, conint + + class ConstrainedExample(BaseModel): + constrained_int: conint(gt=1) + + m = ConstrainedExample(constrained_int=2) + print(repr(m)) + #> ConstrainedExample(constrained_int=2) + + try: + ConstrainedExample(constrained_int=0) + except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'greater_than', + 'loc': ('constrained_int',), + 'msg': 'Input should be greater than 1', + 'input': 0, + 'ctx': {'gt': 1}, + 'url': 'https://errors.pydantic.dev/2/v/greater_than', + } + ] + ''' + ``` + + """ # noqa: D212 + return Annotated[ + int, + Strict(strict) if strict is not None else None, + annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), + annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None, + ] + + +PositiveInt = Annotated[int, annotated_types.Gt(0)] +"""An integer that must be greater than zero. + +```py +from pydantic import BaseModel, PositiveInt, ValidationError + +class Model(BaseModel): + positive_int: PositiveInt + +m = Model(positive_int=1) +print(repr(m)) +#> Model(positive_int=1) + +try: + Model(positive_int=-1) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'greater_than', + 'loc': ('positive_int',), + 'msg': 'Input should be greater than 0', + 'input': -1, + 'ctx': {'gt': 0}, + 'url': 'https://errors.pydantic.dev/2/v/greater_than', + } + ] + ''' +``` +""" +NegativeInt = Annotated[int, annotated_types.Lt(0)] +"""An integer that must be less than zero. + +```py +from pydantic import BaseModel, NegativeInt, ValidationError + +class Model(BaseModel): + negative_int: NegativeInt + +m = Model(negative_int=-1) +print(repr(m)) +#> Model(negative_int=-1) + +try: + Model(negative_int=1) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'less_than', + 'loc': ('negative_int',), + 'msg': 'Input should be less than 0', + 'input': 1, + 'ctx': {'lt': 0}, + 'url': 'https://errors.pydantic.dev/2/v/less_than', + } + ] + ''' +``` +""" +NonPositiveInt = Annotated[int, annotated_types.Le(0)] +"""An integer that must be less than or equal to zero. + +```py +from pydantic import BaseModel, NonPositiveInt, ValidationError + +class Model(BaseModel): + non_positive_int: NonPositiveInt + +m = Model(non_positive_int=0) +print(repr(m)) +#> Model(non_positive_int=0) + +try: + Model(non_positive_int=1) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'less_than_equal', + 'loc': ('non_positive_int',), + 'msg': 'Input should be less than or equal to 0', + 'input': 1, + 'ctx': {'le': 0}, + 'url': 'https://errors.pydantic.dev/2/v/less_than_equal', + } + ] + ''' +``` +""" +NonNegativeInt = Annotated[int, annotated_types.Ge(0)] +"""An integer that must be greater than or equal to zero. + +```py +from pydantic import BaseModel, NonNegativeInt, ValidationError + +class Model(BaseModel): + non_negative_int: NonNegativeInt + +m = Model(non_negative_int=0) +print(repr(m)) +#> Model(non_negative_int=0) + +try: + Model(non_negative_int=-1) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'greater_than_equal', + 'loc': ('non_negative_int',), + 'msg': 'Input should be greater than or equal to 0', + 'input': -1, + 'ctx': {'ge': 0}, + 'url': 'https://errors.pydantic.dev/2/v/greater_than_equal', + } + ] + ''' +``` +""" +StrictInt = Annotated[int, Strict()] +"""An integer that must be validated in strict mode. + +```py +from pydantic import BaseModel, StrictInt, ValidationError + +class StrictIntModel(BaseModel): + strict_int: StrictInt + +try: + StrictIntModel(strict_int=3.14159) +except ValidationError as e: + print(e) + ''' + 1 validation error for StrictIntModel + strict_int + Input should be a valid integer [type=int_type, input_value=3.14159, input_type=float] + ''' +``` +""" + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +@_dataclasses.dataclass +class AllowInfNan(_fields.PydanticMetadata): + """A field metadata class to indicate that a field should allow ``-inf``, ``inf``, and ``nan``.""" + + allow_inf_nan: bool = True + + def __hash__(self) -> int: + return hash(self.allow_inf_nan) + + +def confloat( + *, + strict: bool | None = None, + gt: float | None = None, + ge: float | None = None, + lt: float | None = None, + le: float | None = None, + multiple_of: float | None = None, + allow_inf_nan: bool | None = None, +) -> type[float]: + """ + !!! warning "Discouraged" + This function is **discouraged** in favor of using + [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with + [`Field`][pydantic.fields.Field] instead. + + This function will be **deprecated** in Pydantic 3.0. + + The reason is that `confloat` returns a type, which doesn't play well with static analysis tools. + + === ":x: Don't do this" + ```py + from pydantic import BaseModel, confloat + + class Foo(BaseModel): + bar: confloat(strict=True, gt=0) + ``` + + === ":white_check_mark: Do this" + ```py + from typing_extensions import Annotated + + from pydantic import BaseModel, Field + + class Foo(BaseModel): + bar: Annotated[float, Field(strict=True, gt=0)] + ``` + + A wrapper around `float` that allows for additional constraints. + + Args: + strict: Whether to validate the float in strict mode. + gt: The value must be greater than this. + ge: The value must be greater than or equal to this. + lt: The value must be less than this. + le: The value must be less than or equal to this. + multiple_of: The value must be a multiple of this. + allow_inf_nan: Whether to allow `-inf`, `inf`, and `nan`. + + Returns: + The wrapped float type. + + ```py + from pydantic import BaseModel, ValidationError, confloat + + class ConstrainedExample(BaseModel): + constrained_float: confloat(gt=1.0) + + m = ConstrainedExample(constrained_float=1.1) + print(repr(m)) + #> ConstrainedExample(constrained_float=1.1) + + try: + ConstrainedExample(constrained_float=0.9) + except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'greater_than', + 'loc': ('constrained_float',), + 'msg': 'Input should be greater than 1', + 'input': 0.9, + 'ctx': {'gt': 1.0}, + 'url': 'https://errors.pydantic.dev/2/v/greater_than', + } + ] + ''' + ``` + """ # noqa: D212 + return Annotated[ + float, + Strict(strict) if strict is not None else None, + annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), + annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None, + AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None, + ] + + +PositiveFloat = Annotated[float, annotated_types.Gt(0)] +"""A float that must be greater than zero. + +```py +from pydantic import BaseModel, PositiveFloat, ValidationError + +class Model(BaseModel): + positive_float: PositiveFloat + +m = Model(positive_float=1.0) +print(repr(m)) +#> Model(positive_float=1.0) + +try: + Model(positive_float=-1.0) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'greater_than', + 'loc': ('positive_float',), + 'msg': 'Input should be greater than 0', + 'input': -1.0, + 'ctx': {'gt': 0.0}, + 'url': 'https://errors.pydantic.dev/2/v/greater_than', + } + ] + ''' +``` +""" +NegativeFloat = Annotated[float, annotated_types.Lt(0)] +"""A float that must be less than zero. + +```py +from pydantic import BaseModel, NegativeFloat, ValidationError + +class Model(BaseModel): + negative_float: NegativeFloat + +m = Model(negative_float=-1.0) +print(repr(m)) +#> Model(negative_float=-1.0) + +try: + Model(negative_float=1.0) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'less_than', + 'loc': ('negative_float',), + 'msg': 'Input should be less than 0', + 'input': 1.0, + 'ctx': {'lt': 0.0}, + 'url': 'https://errors.pydantic.dev/2/v/less_than', + } + ] + ''' +``` +""" +NonPositiveFloat = Annotated[float, annotated_types.Le(0)] +"""A float that must be less than or equal to zero. + +```py +from pydantic import BaseModel, NonPositiveFloat, ValidationError + +class Model(BaseModel): + non_positive_float: NonPositiveFloat + +m = Model(non_positive_float=0.0) +print(repr(m)) +#> Model(non_positive_float=0.0) + +try: + Model(non_positive_float=1.0) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'less_than_equal', + 'loc': ('non_positive_float',), + 'msg': 'Input should be less than or equal to 0', + 'input': 1.0, + 'ctx': {'le': 0.0}, + 'url': 'https://errors.pydantic.dev/2/v/less_than_equal', + } + ] + ''' +``` +""" +NonNegativeFloat = Annotated[float, annotated_types.Ge(0)] +"""A float that must be greater than or equal to zero. + +```py +from pydantic import BaseModel, NonNegativeFloat, ValidationError + +class Model(BaseModel): + non_negative_float: NonNegativeFloat + +m = Model(non_negative_float=0.0) +print(repr(m)) +#> Model(non_negative_float=0.0) + +try: + Model(non_negative_float=-1.0) +except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'greater_than_equal', + 'loc': ('non_negative_float',), + 'msg': 'Input should be greater than or equal to 0', + 'input': -1.0, + 'ctx': {'ge': 0.0}, + 'url': 'https://errors.pydantic.dev/2/v/greater_than_equal', + } + ] + ''' +``` +""" +StrictFloat = Annotated[float, Strict(True)] +"""A float that must be validated in strict mode. + +```py +from pydantic import BaseModel, StrictFloat, ValidationError + +class StrictFloatModel(BaseModel): + strict_float: StrictFloat + +try: + StrictFloatModel(strict_float='1.0') +except ValidationError as e: + print(e) + ''' + 1 validation error for StrictFloatModel + strict_float + Input should be a valid number [type=float_type, input_value='1.0', input_type=str] + ''' +``` +""" +FiniteFloat = Annotated[float, AllowInfNan(False)] +"""A float that must be finite (not ``-inf``, ``inf``, or ``nan``). + +```py +from pydantic import BaseModel, FiniteFloat + +class Model(BaseModel): + finite: FiniteFloat + +m = Model(finite=1.0) +print(m) +#> finite=1.0 +``` +""" + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +def conbytes( + *, + min_length: int | None = None, + max_length: int | None = None, + strict: bool | None = None, +) -> type[bytes]: + """A wrapper around `bytes` that allows for additional constraints. + + Args: + min_length: The minimum length of the bytes. + max_length: The maximum length of the bytes. + strict: Whether to validate the bytes in strict mode. + + Returns: + The wrapped bytes type. + """ + return Annotated[ + bytes, + Strict(strict) if strict is not None else None, + annotated_types.Len(min_length or 0, max_length), + ] + + +StrictBytes = Annotated[bytes, Strict()] +"""A bytes that must be validated in strict mode.""" + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +@_dataclasses.dataclass(frozen=True) +class StringConstraints(annotated_types.GroupedMetadata): + """Usage docs: https://docs.pydantic.dev/2.7/concepts/fields/#string-constraints + + Apply constraints to `str` types. + + Attributes: + strip_whitespace: Whether to strip whitespace from the string. + to_upper: Whether to convert the string to uppercase. + to_lower: Whether to convert the string to lowercase. + strict: Whether to validate the string in strict mode. + min_length: The minimum length of the string. + max_length: The maximum length of the string. + pattern: A regex pattern that the string must match. + """ + + strip_whitespace: bool | None = None + to_upper: bool | None = None + to_lower: bool | None = None + strict: bool | None = None + min_length: int | None = None + max_length: int | None = None + pattern: str | Pattern[str] | None = None + + def __iter__(self) -> Iterator[BaseMetadata]: + if self.min_length is not None: + yield MinLen(self.min_length) + if self.max_length is not None: + yield MaxLen(self.max_length) + if self.strict is not None: + yield Strict() + if ( + self.strip_whitespace is not None + or self.pattern is not None + or self.to_lower is not None + or self.to_upper is not None + ): + yield _fields.pydantic_general_metadata( + strip_whitespace=self.strip_whitespace, + to_upper=self.to_upper, + to_lower=self.to_lower, + pattern=self.pattern.pattern if isinstance(self.pattern, Pattern) else self.pattern, + ) + + +def constr( + *, + strip_whitespace: bool | None = None, + to_upper: bool | None = None, + to_lower: bool | None = None, + strict: bool | None = None, + min_length: int | None = None, + max_length: int | None = None, + pattern: str | Pattern[str] | None = None, +) -> type[str]: + """ + !!! warning "Discouraged" + This function is **discouraged** in favor of using + [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with + [`StringConstraints`][pydantic.types.StringConstraints] instead. + + This function will be **deprecated** in Pydantic 3.0. + + The reason is that `constr` returns a type, which doesn't play well with static analysis tools. + + === ":x: Don't do this" + ```py + from pydantic import BaseModel, constr + + class Foo(BaseModel): + bar: constr(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$') + ``` + + === ":white_check_mark: Do this" + ```py + from typing_extensions import Annotated + + from pydantic import BaseModel, StringConstraints + + class Foo(BaseModel): + bar: Annotated[str, StringConstraints(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$')] + ``` + + A wrapper around `str` that allows for additional constraints. + + ```py + from pydantic import BaseModel, constr + + class Foo(BaseModel): + bar: constr(strip_whitespace=True, to_upper=True, pattern=r'^[A-Z]+$') + + + foo = Foo(bar=' hello ') + print(foo) + #> bar='HELLO' + ``` + + Args: + strip_whitespace: Whether to remove leading and trailing whitespace. + to_upper: Whether to turn all characters to uppercase. + to_lower: Whether to turn all characters to lowercase. + strict: Whether to validate the string in strict mode. + min_length: The minimum length of the string. + max_length: The maximum length of the string. + pattern: A regex pattern to validate the string against. + + Returns: + The wrapped string type. + """ # noqa: D212 + return Annotated[ + str, + StringConstraints( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + strict=strict, + min_length=min_length, + max_length=max_length, + pattern=pattern, + ), + ] + + +StrictStr = Annotated[str, Strict()] +"""A string that must be validated in strict mode.""" + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ COLLECTION TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +HashableItemType = TypeVar('HashableItemType', bound=Hashable) + + +def conset( + item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None +) -> type[set[HashableItemType]]: + """A wrapper around `typing.Set` that allows for additional constraints. + + Args: + item_type: The type of the items in the set. + min_length: The minimum length of the set. + max_length: The maximum length of the set. + + Returns: + The wrapped set type. + """ + return Annotated[Set[item_type], annotated_types.Len(min_length or 0, max_length)] + + +def confrozenset( + item_type: type[HashableItemType], *, min_length: int | None = None, max_length: int | None = None +) -> type[frozenset[HashableItemType]]: + """A wrapper around `typing.FrozenSet` that allows for additional constraints. + + Args: + item_type: The type of the items in the frozenset. + min_length: The minimum length of the frozenset. + max_length: The maximum length of the frozenset. + + Returns: + The wrapped frozenset type. + """ + return Annotated[FrozenSet[item_type], annotated_types.Len(min_length or 0, max_length)] + + +AnyItemType = TypeVar('AnyItemType') + + +def conlist( + item_type: type[AnyItemType], + *, + min_length: int | None = None, + max_length: int | None = None, + unique_items: bool | None = None, +) -> type[list[AnyItemType]]: + """A wrapper around typing.List that adds validation. + + Args: + item_type: The type of the items in the list. + min_length: The minimum length of the list. Defaults to None. + max_length: The maximum length of the list. Defaults to None. + unique_items: Whether the items in the list must be unique. Defaults to None. + !!! warning Deprecated + The `unique_items` parameter is deprecated, use `Set` instead. + See [this issue](https://github.com/pydantic/pydantic-core/issues/296) for more details. + + Returns: + The wrapped list type. + """ + if unique_items is not None: + raise PydanticUserError( + ( + '`unique_items` is removed, use `Set` instead' + '(this feature is discussed in https://github.com/pydantic/pydantic-core/issues/296)' + ), + code='removed-kwargs', + ) + return Annotated[List[item_type], annotated_types.Len(min_length or 0, max_length)] + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~ IMPORT STRING TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +AnyType = TypeVar('AnyType') +if TYPE_CHECKING: + ImportString = Annotated[AnyType, ...] +else: + + class ImportString: + """A type that can be used to import a type from a string. + + `ImportString` expects a string and loads the Python object importable at that dotted path. + Attributes of modules may be separated from the module by `:` or `.`, e.g. if `'math:cos'` was provided, + the resulting field value would be the function`cos`. If a `.` is used and both an attribute and submodule + are present at the same path, the module will be preferred. + + On model instantiation, pointers will be evaluated and imported. There is + some nuance to this behavior, demonstrated in the examples below. + + **Good behavior:** + ```py + from math import cos + + from pydantic import BaseModel, Field, ImportString, ValidationError + + + class ImportThings(BaseModel): + obj: ImportString + + + # A string value will cause an automatic import + my_cos = ImportThings(obj='math.cos') + + # You can use the imported function as you would expect + cos_of_0 = my_cos.obj(0) + assert cos_of_0 == 1 + + + # A string whose value cannot be imported will raise an error + try: + ImportThings(obj='foo.bar') + except ValidationError as e: + print(e) + ''' + 1 validation error for ImportThings + obj + Invalid python path: No module named 'foo.bar' [type=import_error, input_value='foo.bar', input_type=str] + ''' + + + # Actual python objects can be assigned as well + my_cos = ImportThings(obj=cos) + my_cos_2 = ImportThings(obj='math.cos') + my_cos_3 = ImportThings(obj='math:cos') + assert my_cos == my_cos_2 == my_cos_3 + + + # You can set default field value either as Python object: + class ImportThingsDefaultPyObj(BaseModel): + obj: ImportString = math.cos + + + # or as a string value (but only if used with `validate_default=True`) + class ImportThingsDefaultString(BaseModel): + obj: ImportString = Field(default='math.cos', validate_default=True) + + + my_cos_default1 = ImportThingsDefaultPyObj() + my_cos_default2 = ImportThingsDefaultString() + assert my_cos_default1.obj == my_cos_default2.obj == math.cos + + + # note: this will not work! + class ImportThingsMissingValidateDefault(BaseModel): + obj: ImportString = 'math.cos' + + my_cos_default3 = ImportThingsMissingValidateDefault() + assert my_cos_default3.obj == 'math.cos' # just string, not evaluated + ``` + + Serializing an `ImportString` type to json is also possible. + + ```py + from pydantic import BaseModel, ImportString + + + class ImportThings(BaseModel): + obj: ImportString + + + # Create an instance + m = ImportThings(obj='math.cos') + print(m) + #> obj= + print(m.model_dump_json()) + #> {"obj":"math.cos"} + ``` + """ + + @classmethod + def __class_getitem__(cls, item: AnyType) -> AnyType: + return Annotated[item, cls()] + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + serializer = core_schema.plain_serializer_function_ser_schema(cls._serialize, when_used='json') + if cls is source: + # Treat bare usage of ImportString (`schema is None`) as the same as ImportString[Any] + return core_schema.no_info_plain_validator_function( + function=_validators.import_string, serialization=serializer + ) + else: + return core_schema.no_info_before_validator_function( + function=_validators.import_string, schema=handler(source), serialization=serializer + ) + + @staticmethod + def _serialize(v: Any) -> str: + if isinstance(v, ModuleType): + return v.__name__ + elif hasattr(v, '__module__') and hasattr(v, '__name__'): + return f'{v.__module__}.{v.__name__}' + else: + return v + + def __repr__(self) -> str: + return 'ImportString' + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +def condecimal( + *, + strict: bool | None = None, + gt: int | Decimal | None = None, + ge: int | Decimal | None = None, + lt: int | Decimal | None = None, + le: int | Decimal | None = None, + multiple_of: int | Decimal | None = None, + max_digits: int | None = None, + decimal_places: int | None = None, + allow_inf_nan: bool | None = None, +) -> type[Decimal]: + """ + !!! warning "Discouraged" + This function is **discouraged** in favor of using + [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated) with + [`Field`][pydantic.fields.Field] instead. + + This function will be **deprecated** in Pydantic 3.0. + + The reason is that `condecimal` returns a type, which doesn't play well with static analysis tools. + + === ":x: Don't do this" + ```py + from pydantic import BaseModel, condecimal + + class Foo(BaseModel): + bar: condecimal(strict=True, allow_inf_nan=True) + ``` + + === ":white_check_mark: Do this" + ```py + from decimal import Decimal + + from typing_extensions import Annotated + + from pydantic import BaseModel, Field + + class Foo(BaseModel): + bar: Annotated[Decimal, Field(strict=True, allow_inf_nan=True)] + ``` + + A wrapper around Decimal that adds validation. + + Args: + strict: Whether to validate the value in strict mode. Defaults to `None`. + gt: The value must be greater than this. Defaults to `None`. + ge: The value must be greater than or equal to this. Defaults to `None`. + lt: The value must be less than this. Defaults to `None`. + le: The value must be less than or equal to this. Defaults to `None`. + multiple_of: The value must be a multiple of this. Defaults to `None`. + max_digits: The maximum number of digits. Defaults to `None`. + decimal_places: The number of decimal places. Defaults to `None`. + allow_inf_nan: Whether to allow infinity and NaN. Defaults to `None`. + + ```py + from decimal import Decimal + + from pydantic import BaseModel, ValidationError, condecimal + + class ConstrainedExample(BaseModel): + constrained_decimal: condecimal(gt=Decimal('1.0')) + + m = ConstrainedExample(constrained_decimal=Decimal('1.1')) + print(repr(m)) + #> ConstrainedExample(constrained_decimal=Decimal('1.1')) + + try: + ConstrainedExample(constrained_decimal=Decimal('0.9')) + except ValidationError as e: + print(e.errors()) + ''' + [ + { + 'type': 'greater_than', + 'loc': ('constrained_decimal',), + 'msg': 'Input should be greater than 1.0', + 'input': Decimal('0.9'), + 'ctx': {'gt': Decimal('1.0')}, + 'url': 'https://errors.pydantic.dev/2/v/greater_than', + } + ] + ''' + ``` + """ # noqa: D212 + return Annotated[ + Decimal, + Strict(strict) if strict is not None else None, + annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), + annotated_types.MultipleOf(multiple_of) if multiple_of is not None else None, + _fields.pydantic_general_metadata(max_digits=max_digits, decimal_places=decimal_places), + AllowInfNan(allow_inf_nan) if allow_inf_nan is not None else None, + ] + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +@_dataclasses.dataclass(**_internal_dataclass.slots_true) +class UuidVersion: + """A field metadata class to indicate a [UUID](https://docs.python.org/3/library/uuid.html) version.""" + + uuid_version: Literal[1, 3, 4, 5] + + def __get_pydantic_json_schema__( + self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = handler(core_schema) + field_schema.pop('anyOf', None) # remove the bytes/str union + field_schema.update(type='string', format=f'uuid{self.uuid_version}') + return field_schema + + def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + if isinstance(self, source): + # used directly as a type + return core_schema.uuid_schema(version=self.uuid_version) + else: + # update existing schema with self.uuid_version + schema = handler(source) + _check_annotated_type(schema['type'], 'uuid', self.__class__.__name__) + schema['version'] = self.uuid_version # type: ignore + return schema + + def __hash__(self) -> int: + return hash(type(self.uuid_version)) + + +UUID1 = Annotated[UUID, UuidVersion(1)] +"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 1. + +```py +import uuid + +from pydantic import UUID1, BaseModel + +class Model(BaseModel): + uuid1: UUID1 + +Model(uuid1=uuid.uuid1()) +``` +""" +UUID3 = Annotated[UUID, UuidVersion(3)] +"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 3. + +```py +import uuid + +from pydantic import UUID3, BaseModel + +class Model(BaseModel): + uuid3: UUID3 + +Model(uuid3=uuid.uuid3(uuid.NAMESPACE_DNS, 'pydantic.org')) +``` +""" +UUID4 = Annotated[UUID, UuidVersion(4)] +"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 4. + +```py +import uuid + +from pydantic import UUID4, BaseModel + +class Model(BaseModel): + uuid4: UUID4 + +Model(uuid4=uuid.uuid4()) +``` +""" +UUID5 = Annotated[UUID, UuidVersion(5)] +"""A [UUID](https://docs.python.org/3/library/uuid.html) that must be version 5. + +```py +import uuid + +from pydantic import UUID5, BaseModel + +class Model(BaseModel): + uuid5: UUID5 + +Model(uuid5=uuid.uuid5(uuid.NAMESPACE_DNS, 'pydantic.org')) +``` +""" + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +@_dataclasses.dataclass +class PathType: + path_type: Literal['file', 'dir', 'new'] + + def __get_pydantic_json_schema__( + self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = handler(core_schema) + format_conversion = {'file': 'file-path', 'dir': 'directory-path'} + field_schema.update(format=format_conversion.get(self.path_type, 'path'), type='string') + return field_schema + + def __get_pydantic_core_schema__(self, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + function_lookup = { + 'file': cast(core_schema.WithInfoValidatorFunction, self.validate_file), + 'dir': cast(core_schema.WithInfoValidatorFunction, self.validate_directory), + 'new': cast(core_schema.WithInfoValidatorFunction, self.validate_new), + } + + return core_schema.with_info_after_validator_function( + function_lookup[self.path_type], + handler(source), + ) + + @staticmethod + def validate_file(path: Path, _: core_schema.ValidationInfo) -> Path: + if path.is_file(): + return path + else: + raise PydanticCustomError('path_not_file', 'Path does not point to a file') + + @staticmethod + def validate_directory(path: Path, _: core_schema.ValidationInfo) -> Path: + if path.is_dir(): + return path + else: + raise PydanticCustomError('path_not_directory', 'Path does not point to a directory') + + @staticmethod + def validate_new(path: Path, _: core_schema.ValidationInfo) -> Path: + if path.exists(): + raise PydanticCustomError('path_exists', 'Path already exists') + elif not path.parent.exists(): + raise PydanticCustomError('parent_does_not_exist', 'Parent directory does not exist') + else: + return path + + def __hash__(self) -> int: + return hash(type(self.path_type)) + + +FilePath = Annotated[Path, PathType('file')] +"""A path that must point to a file. + +```py +from pathlib import Path + +from pydantic import BaseModel, FilePath, ValidationError + +class Model(BaseModel): + f: FilePath + +path = Path('text.txt') +path.touch() +m = Model(f='text.txt') +print(m.model_dump()) +#> {'f': PosixPath('text.txt')} +path.unlink() + +path = Path('directory') +path.mkdir(exist_ok=True) +try: + Model(f='directory') # directory +except ValidationError as e: + print(e) + ''' + 1 validation error for Model + f + Path does not point to a file [type=path_not_file, input_value='directory', input_type=str] + ''' +path.rmdir() + +try: + Model(f='not-exists-file') +except ValidationError as e: + print(e) + ''' + 1 validation error for Model + f + Path does not point to a file [type=path_not_file, input_value='not-exists-file', input_type=str] + ''' +``` +""" +DirectoryPath = Annotated[Path, PathType('dir')] +"""A path that must point to a directory. + +```py +from pathlib import Path + +from pydantic import BaseModel, DirectoryPath, ValidationError + +class Model(BaseModel): + f: DirectoryPath + +path = Path('directory/') +path.mkdir() +m = Model(f='directory/') +print(m.model_dump()) +#> {'f': PosixPath('directory')} +path.rmdir() + +path = Path('file.txt') +path.touch() +try: + Model(f='file.txt') # file +except ValidationError as e: + print(e) + ''' + 1 validation error for Model + f + Path does not point to a directory [type=path_not_directory, input_value='file.txt', input_type=str] + ''' +path.unlink() + +try: + Model(f='not-exists-directory') +except ValidationError as e: + print(e) + ''' + 1 validation error for Model + f + Path does not point to a directory [type=path_not_directory, input_value='not-exists-directory', input_type=str] + ''' +``` +""" +NewPath = Annotated[Path, PathType('new')] +"""A path for a new file or directory that must not already exist.""" + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + # Json[list[str]] will be recognized by type checkers as list[str] + Json = Annotated[AnyType, ...] + +else: + + class Json: + """A special type wrapper which loads JSON before parsing. + + You can use the `Json` data type to make Pydantic first load a raw JSON string before + validating the loaded data into the parametrized type: + + ```py + from typing import Any, List + + from pydantic import BaseModel, Json, ValidationError + + + class AnyJsonModel(BaseModel): + json_obj: Json[Any] + + + class ConstrainedJsonModel(BaseModel): + json_obj: Json[List[int]] + + + print(AnyJsonModel(json_obj='{"b": 1}')) + #> json_obj={'b': 1} + print(ConstrainedJsonModel(json_obj='[1, 2, 3]')) + #> json_obj=[1, 2, 3] + + try: + ConstrainedJsonModel(json_obj=12) + except ValidationError as e: + print(e) + ''' + 1 validation error for ConstrainedJsonModel + json_obj + JSON input should be string, bytes or bytearray [type=json_type, input_value=12, input_type=int] + ''' + + try: + ConstrainedJsonModel(json_obj='[a, b]') + except ValidationError as e: + print(e) + ''' + 1 validation error for ConstrainedJsonModel + json_obj + Invalid JSON: expected value at line 1 column 2 [type=json_invalid, input_value='[a, b]', input_type=str] + ''' + + try: + ConstrainedJsonModel(json_obj='["a", "b"]') + except ValidationError as e: + print(e) + ''' + 2 validation errors for ConstrainedJsonModel + json_obj.0 + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str] + json_obj.1 + Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='b', input_type=str] + ''' + ``` + + When you dump the model using `model_dump` or `model_dump_json`, the dumped value will be the result of validation, + not the original JSON string. However, you can use the argument `round_trip=True` to get the original JSON string back: + + ```py + from typing import List + + from pydantic import BaseModel, Json + + + class ConstrainedJsonModel(BaseModel): + json_obj: Json[List[int]] + + + print(ConstrainedJsonModel(json_obj='[1, 2, 3]').model_dump_json()) + #> {"json_obj":[1,2,3]} + print( + ConstrainedJsonModel(json_obj='[1, 2, 3]').model_dump_json(round_trip=True) + ) + #> {"json_obj":"[1,2,3]"} + ``` + """ + + @classmethod + def __class_getitem__(cls, item: AnyType) -> AnyType: + return Annotated[item, cls()] + + @classmethod + def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + if cls is source: + return core_schema.json_schema(None) + else: + return core_schema.json_schema(handler(source)) + + def __repr__(self) -> str: + return 'Json' + + def __hash__(self) -> int: + return hash(type(self)) + + def __eq__(self, other: Any) -> bool: + return type(other) == type(self) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +SecretType = TypeVar('SecretType') + + +class _SecretBase(Generic[SecretType]): + def __init__(self, secret_value: SecretType) -> None: + self._secret_value: SecretType = secret_value + + def get_secret_value(self) -> SecretType: + """Get the secret value. + + Returns: + The secret value. + """ + return self._secret_value + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() + + def __hash__(self) -> int: + return hash(self.get_secret_value()) + + def __str__(self) -> str: + return str(self._display()) + + def __repr__(self) -> str: + return f'{self.__class__.__name__}({self._display()!r})' + + def _display(self) -> str | bytes: + raise NotImplementedError + + +class Secret(_SecretBase[SecretType]): + """A generic base class used for defining a field with sensitive information that you do not want to be visible in logging or tracebacks. + + You may either directly parametrize `Secret` with a type, or subclass from `Secret` with a parametrized type. The benefit of subclassing + is that you can define a custom `_display` method, which will be used for `repr()` and `str()` methods. The examples below demonstrate both + ways of using `Secret` to create a new secret type. + + 1. Directly parametrizing `Secret` with a type: + + ```py + from pydantic import BaseModel, Secret + + SecretBool = Secret[bool] + + class Model(BaseModel): + secret_bool: SecretBool + + m = Model(secret_bool=True) + print(m.model_dump()) + #> {'secret_bool': Secret('**********')} + + print(m.model_dump_json()) + #> {"secret_bool":"**********"} + + print(m.secret_bool.get_secret_value()) + #> True + ``` + + 2. Subclassing from parametrized `Secret`: + + ```py + from datetime import date + + from pydantic import BaseModel, Secret + + class SecretDate(Secret[date]): + def _display(self) -> str: + return '****/**/**' + + class Model(BaseModel): + secret_date: SecretDate + + m = Model(secret_date=date(2022, 1, 1)) + print(m.model_dump()) + #> {'secret_date': SecretDate('****/**/**')} + + print(m.model_dump_json()) + #> {"secret_date":"****/**/**"} + + print(m.secret_date.get_secret_value()) + #> 2022-01-01 + ``` + + The value returned by the `_display` method will be used for `repr()` and `str()`. + """ + + def _display(self) -> str | bytes: + return '**********' if self.get_secret_value() else '' + + @classmethod + def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + inner_type = None + # if origin_type is Secret, then cls is a GenericAlias, and we can extract the inner type directly + origin_type = get_origin(source) + if origin_type is not None: + inner_type = get_args(source)[0] + # otherwise, we need to get the inner type from the base class + else: + bases = getattr(cls, '__orig_bases__', getattr(cls, '__bases__', [])) + for base in bases: + if get_origin(base) is Secret: + inner_type = get_args(base)[0] + if bases == [] or inner_type is None: + raise TypeError( + f"Can't get secret type from {cls.__name__}. " + 'Please use Secret[], or subclass from Secret[] instead.' + ) + + inner_schema = handler.generate_schema(inner_type) # type: ignore + + def validate_secret_value(value, handler) -> Secret[SecretType]: + if isinstance(value, Secret): + value = value.get_secret_value() + validated_inner = handler(value) + return cls(validated_inner) + + def serialize(value: Secret[SecretType], info: core_schema.SerializationInfo) -> str | Secret[SecretType]: + if info.mode == 'json': + return str(value) + else: + return value + + return core_schema.json_or_python_schema( + python_schema=core_schema.no_info_wrap_validator_function( + validate_secret_value, + inner_schema, + ), + json_schema=core_schema.no_info_after_validator_function(lambda x: cls(x), inner_schema), + serialization=core_schema.plain_serializer_function_ser_schema( + serialize, + info_arg=True, + when_used='always', + ), + ) + + +def _secret_display(value: SecretType) -> str: # type: ignore + return '**********' if value else '' + + +class _SecretField(_SecretBase[SecretType]): + _inner_schema: ClassVar[CoreSchema] + _error_kind: ClassVar[str] + + @classmethod + def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + def serialize( + value: _SecretField[SecretType], info: core_schema.SerializationInfo + ) -> str | _SecretField[SecretType]: + if info.mode == 'json': + # we want the output to always be string without the `b'` prefix for bytes, + # hence we just use `secret_display` + return _secret_display(value.get_secret_value()) + else: + return value + + def get_json_schema(_core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: + json_schema = handler(cls._inner_schema) + _utils.update_not_none( + json_schema, + type='string', + writeOnly=True, + format='password', + ) + return json_schema + + json_schema = core_schema.no_info_after_validator_function( + source, # construct the type + cls._inner_schema, + ) + + def get_secret_schema(strict: bool) -> CoreSchema: + return core_schema.json_or_python_schema( + python_schema=core_schema.union_schema( + [ + core_schema.is_instance_schema(source), + json_schema, + ], + custom_error_type=cls._error_kind, + strict=strict, + ), + json_schema=json_schema, + serialization=core_schema.plain_serializer_function_ser_schema( + serialize, + info_arg=True, + return_schema=core_schema.str_schema(), + when_used='json', + ), + ) + + return core_schema.lax_or_strict_schema( + lax_schema=get_secret_schema(strict=False), + strict_schema=get_secret_schema(strict=True), + metadata={'pydantic_js_functions': [get_json_schema]}, + ) + + +class SecretStr(_SecretField[str]): + """A string used for storing sensitive information that you do not want to be visible in logging or tracebacks. + + When the secret value is nonempty, it is displayed as `'**********'` instead of the underlying value in + calls to `repr()` and `str()`. If the value _is_ empty, it is displayed as `''`. + + ```py + from pydantic import BaseModel, SecretStr + + class User(BaseModel): + username: str + password: SecretStr + + user = User(username='scolvin', password='password1') + + print(user) + #> username='scolvin' password=SecretStr('**********') + print(user.password.get_secret_value()) + #> password1 + print((SecretStr('password'), SecretStr(''))) + #> (SecretStr('**********'), SecretStr('')) + ``` + """ + + _inner_schema: ClassVar[CoreSchema] = core_schema.str_schema() + _error_kind: ClassVar[str] = 'string_type' + + def __len__(self) -> int: + return len(self._secret_value) + + def _display(self) -> str: + return _secret_display(self._secret_value) + + +class SecretBytes(_SecretField[bytes]): + """A bytes used for storing sensitive information that you do not want to be visible in logging or tracebacks. + + It displays `b'**********'` instead of the string value on `repr()` and `str()` calls. + When the secret value is nonempty, it is displayed as `b'**********'` instead of the underlying value in + calls to `repr()` and `str()`. If the value _is_ empty, it is displayed as `b''`. + + ```py + from pydantic import BaseModel, SecretBytes + + class User(BaseModel): + username: str + password: SecretBytes + + user = User(username='scolvin', password=b'password1') + #> username='scolvin' password=SecretBytes(b'**********') + print(user.password.get_secret_value()) + #> b'password1' + print((SecretBytes(b'password'), SecretBytes(b''))) + #> (SecretBytes(b'**********'), SecretBytes(b'')) + ``` + """ + + _inner_schema: ClassVar[CoreSchema] = core_schema.bytes_schema() + _error_kind: ClassVar[str] = 'bytes_type' + + def __len__(self) -> int: + return len(self._secret_value) + + def _display(self) -> bytes: + return _secret_display(self._secret_value).encode() + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class PaymentCardBrand(str, Enum): + amex = 'American Express' + mastercard = 'Mastercard' + visa = 'Visa' + other = 'other' + + def __str__(self) -> str: + return self.value + + +@deprecated( + 'The `PaymentCardNumber` class is deprecated, use `pydantic_extra_types` instead. ' + 'See https://docs.pydantic.dev/latest/api/pydantic_extra_types_payment/#pydantic_extra_types.payment.PaymentCardNumber.', + category=PydanticDeprecatedSince20, +) +class PaymentCardNumber(str): + """Based on: https://en.wikipedia.org/wiki/Payment_card_number.""" + + strip_whitespace: ClassVar[bool] = True + min_length: ClassVar[int] = 12 + max_length: ClassVar[int] = 19 + bin: str + last4: str + brand: PaymentCardBrand + + def __init__(self, card_number: str): + self.validate_digits(card_number) + + card_number = self.validate_luhn_check_digit(card_number) + + self.bin = card_number[:6] + self.last4 = card_number[-4:] + self.brand = self.validate_brand(card_number) + + @classmethod + def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + return core_schema.with_info_after_validator_function( + cls.validate, + core_schema.str_schema( + min_length=cls.min_length, max_length=cls.max_length, strip_whitespace=cls.strip_whitespace + ), + ) + + @classmethod + def validate(cls, input_value: str, /, _: core_schema.ValidationInfo) -> PaymentCardNumber: + """Validate the card number and return a `PaymentCardNumber` instance.""" + return cls(input_value) + + @property + def masked(self) -> str: + """Mask all but the last 4 digits of the card number. + + Returns: + A masked card number string. + """ + num_masked = len(self) - 10 # len(bin) + len(last4) == 10 + return f'{self.bin}{"*" * num_masked}{self.last4}' + + @classmethod + def validate_digits(cls, card_number: str) -> None: + """Validate that the card number is all digits.""" + if not card_number.isdigit(): + raise PydanticCustomError('payment_card_number_digits', 'Card number is not all digits') + + @classmethod + def validate_luhn_check_digit(cls, card_number: str) -> str: + """Based on: https://en.wikipedia.org/wiki/Luhn_algorithm.""" + sum_ = int(card_number[-1]) + length = len(card_number) + parity = length % 2 + for i in range(length - 1): + digit = int(card_number[i]) + if i % 2 == parity: + digit *= 2 + if digit > 9: + digit -= 9 + sum_ += digit + valid = sum_ % 10 == 0 + if not valid: + raise PydanticCustomError('payment_card_number_luhn', 'Card number is not luhn valid') + return card_number + + @staticmethod + def validate_brand(card_number: str) -> PaymentCardBrand: + """Validate length based on BIN for major brands: + https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN). + """ + if card_number[0] == '4': + brand = PaymentCardBrand.visa + elif 51 <= int(card_number[:2]) <= 55: + brand = PaymentCardBrand.mastercard + elif card_number[:2] in {'34', '37'}: + brand = PaymentCardBrand.amex + else: + brand = PaymentCardBrand.other + + required_length: None | int | str = None + if brand in PaymentCardBrand.mastercard: + required_length = 16 + valid = len(card_number) == required_length + elif brand == PaymentCardBrand.visa: + required_length = '13, 16 or 19' + valid = len(card_number) in {13, 16, 19} + elif brand == PaymentCardBrand.amex: + required_length = 15 + valid = len(card_number) == required_length + else: + valid = True + + if not valid: + raise PydanticCustomError( + 'payment_card_number_brand', + 'Length for a {brand} card must be {required_length}', + {'brand': brand, 'required_length': required_length}, + ) + return brand + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ByteSize(int): + """Converts a string representing a number of bytes with units (such as `'1KB'` or `'11.5MiB'`) into an integer. + + You can use the `ByteSize` data type to (case-insensitively) convert a string representation of a number of bytes into + an integer, and also to print out human-readable strings representing a number of bytes. + + In conformance with [IEC 80000-13 Standard](https://en.wikipedia.org/wiki/ISO/IEC_80000) we interpret `'1KB'` to mean 1000 bytes, + and `'1KiB'` to mean 1024 bytes. In general, including a middle `'i'` will cause the unit to be interpreted as a power of 2, + rather than a power of 10 (so, for example, `'1 MB'` is treated as `1_000_000` bytes, whereas `'1 MiB'` is treated as `1_048_576` bytes). + + !!! info + Note that `1b` will be parsed as "1 byte" and not "1 bit". + + ```py + from pydantic import BaseModel, ByteSize + + class MyModel(BaseModel): + size: ByteSize + + print(MyModel(size=52000).size) + #> 52000 + print(MyModel(size='3000 KiB').size) + #> 3072000 + + m = MyModel(size='50 PB') + print(m.size.human_readable()) + #> 44.4PiB + print(m.size.human_readable(decimal=True)) + #> 50.0PB + print(m.size.human_readable(separator=' ')) + #> 44.4 PiB + + print(m.size.to('TiB')) + #> 45474.73508864641 + ``` + """ + + byte_sizes = { + 'b': 1, + 'kb': 10**3, + 'mb': 10**6, + 'gb': 10**9, + 'tb': 10**12, + 'pb': 10**15, + 'eb': 10**18, + 'kib': 2**10, + 'mib': 2**20, + 'gib': 2**30, + 'tib': 2**40, + 'pib': 2**50, + 'eib': 2**60, + 'bit': 1 / 8, + 'kbit': 10**3 / 8, + 'mbit': 10**6 / 8, + 'gbit': 10**9 / 8, + 'tbit': 10**12 / 8, + 'pbit': 10**15 / 8, + 'ebit': 10**18 / 8, + 'kibit': 2**10 / 8, + 'mibit': 2**20 / 8, + 'gibit': 2**30 / 8, + 'tibit': 2**40 / 8, + 'pibit': 2**50 / 8, + 'eibit': 2**60 / 8, + } + byte_sizes.update({k.lower()[0]: v for k, v in byte_sizes.items() if 'i' not in k}) + + byte_string_pattern = r'^\s*(\d*\.?\d+)\s*(\w+)?' + byte_string_re = re.compile(byte_string_pattern, re.IGNORECASE) + + @classmethod + def __get_pydantic_core_schema__(cls, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + return core_schema.with_info_after_validator_function( + function=cls._validate, + schema=core_schema.union_schema( + [ + core_schema.str_schema(pattern=cls.byte_string_pattern), + core_schema.int_schema(ge=0), + ], + custom_error_type='byte_size', + custom_error_message='could not parse value and unit from byte string', + ), + serialization=core_schema.plain_serializer_function_ser_schema( + int, return_schema=core_schema.int_schema(ge=0) + ), + ) + + @classmethod + def _validate(cls, input_value: Any, /, _: core_schema.ValidationInfo) -> ByteSize: + try: + return cls(int(input_value)) + except ValueError: + pass + + str_match = cls.byte_string_re.match(str(input_value)) + if str_match is None: + raise PydanticCustomError('byte_size', 'could not parse value and unit from byte string') + + scalar, unit = str_match.groups() + if unit is None: + unit = 'b' + + try: + unit_mult = cls.byte_sizes[unit.lower()] + except KeyError: + raise PydanticCustomError('byte_size_unit', 'could not interpret byte unit: {unit}', {'unit': unit}) + + return cls(int(float(scalar) * unit_mult)) + + def human_readable(self, decimal: bool = False, separator: str = '') -> str: + """Converts a byte size to a human readable string. + + Args: + decimal: If True, use decimal units (e.g. 1000 bytes per KB). If False, use binary units + (e.g. 1024 bytes per KiB). + separator: A string used to split the value and unit. Defaults to an empty string (''). + + Returns: + A human readable string representation of the byte size. + """ + if decimal: + divisor = 1000 + units = 'B', 'KB', 'MB', 'GB', 'TB', 'PB' + final_unit = 'EB' + else: + divisor = 1024 + units = 'B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB' + final_unit = 'EiB' + + num = float(self) + for unit in units: + if abs(num) < divisor: + if unit == 'B': + return f'{num:0.0f}{separator}{unit}' + else: + return f'{num:0.1f}{separator}{unit}' + num /= divisor + + return f'{num:0.1f}{separator}{final_unit}' + + def to(self, unit: str) -> float: + """Converts a byte size to another unit, including both byte and bit units. + + Args: + unit: The unit to convert to. Must be one of the following: B, KB, MB, GB, TB, PB, EB, + KiB, MiB, GiB, TiB, PiB, EiB (byte units) and + bit, kbit, mbit, gbit, tbit, pbit, ebit, + kibit, mibit, gibit, tibit, pibit, eibit (bit units). + + Returns: + The byte size in the new unit. + """ + try: + unit_div = self.byte_sizes[unit.lower()] + except KeyError: + raise PydanticCustomError('byte_size_unit', 'Could not interpret byte unit: {unit}', {'unit': unit}) + + return self / unit_div + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +def _check_annotated_type(annotated_type: str, expected_type: str, annotation: str) -> None: + if annotated_type != expected_type: + raise PydanticUserError(f"'{annotation}' cannot annotate '{annotated_type}'.", code='invalid_annotated_type') + + +if TYPE_CHECKING: + PastDate = Annotated[date, ...] + FutureDate = Annotated[date, ...] +else: + + class PastDate: + """A date in the past.""" + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + if cls is source: + # used directly as a type + return core_schema.date_schema(now_op='past') + else: + schema = handler(source) + _check_annotated_type(schema['type'], 'date', cls.__name__) + schema['now_op'] = 'past' + return schema + + def __repr__(self) -> str: + return 'PastDate' + + class FutureDate: + """A date in the future.""" + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + if cls is source: + # used directly as a type + return core_schema.date_schema(now_op='future') + else: + schema = handler(source) + _check_annotated_type(schema['type'], 'date', cls.__name__) + schema['now_op'] = 'future' + return schema + + def __repr__(self) -> str: + return 'FutureDate' + + +def condate( + *, + strict: bool | None = None, + gt: date | None = None, + ge: date | None = None, + lt: date | None = None, + le: date | None = None, +) -> type[date]: + """A wrapper for date that adds constraints. + + Args: + strict: Whether to validate the date value in strict mode. Defaults to `None`. + gt: The value must be greater than this. Defaults to `None`. + ge: The value must be greater than or equal to this. Defaults to `None`. + lt: The value must be less than this. Defaults to `None`. + le: The value must be less than or equal to this. Defaults to `None`. + + Returns: + A date type with the specified constraints. + """ + return Annotated[ + date, + Strict(strict) if strict is not None else None, + annotated_types.Interval(gt=gt, ge=ge, lt=lt, le=le), + ] + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATETIME TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + AwareDatetime = Annotated[datetime, ...] + NaiveDatetime = Annotated[datetime, ...] + PastDatetime = Annotated[datetime, ...] + FutureDatetime = Annotated[datetime, ...] + +else: + + class AwareDatetime: + """A datetime that requires timezone info.""" + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + if cls is source: + # used directly as a type + return core_schema.datetime_schema(tz_constraint='aware') + else: + schema = handler(source) + _check_annotated_type(schema['type'], 'datetime', cls.__name__) + schema['tz_constraint'] = 'aware' + return schema + + def __repr__(self) -> str: + return 'AwareDatetime' + + class NaiveDatetime: + """A datetime that doesn't require timezone info.""" + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + if cls is source: + # used directly as a type + return core_schema.datetime_schema(tz_constraint='naive') + else: + schema = handler(source) + _check_annotated_type(schema['type'], 'datetime', cls.__name__) + schema['tz_constraint'] = 'naive' + return schema + + def __repr__(self) -> str: + return 'NaiveDatetime' + + class PastDatetime: + """A datetime that must be in the past.""" + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + if cls is source: + # used directly as a type + return core_schema.datetime_schema(now_op='past') + else: + schema = handler(source) + _check_annotated_type(schema['type'], 'datetime', cls.__name__) + schema['now_op'] = 'past' + return schema + + def __repr__(self) -> str: + return 'PastDatetime' + + class FutureDatetime: + """A datetime that must be in the future.""" + + @classmethod + def __get_pydantic_core_schema__( + cls, source: type[Any], handler: GetCoreSchemaHandler + ) -> core_schema.CoreSchema: + if cls is source: + # used directly as a type + return core_schema.datetime_schema(now_op='future') + else: + schema = handler(source) + _check_annotated_type(schema['type'], 'datetime', cls.__name__) + schema['now_op'] = 'future' + return schema + + def __repr__(self) -> str: + return 'FutureDatetime' + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Encoded TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class EncoderProtocol(Protocol): + """Protocol for encoding and decoding data to and from bytes.""" + + @classmethod + def decode(cls, data: bytes) -> bytes: + """Decode the data using the encoder. + + Args: + data: The data to decode. + + Returns: + The decoded data. + """ + ... + + @classmethod + def encode(cls, value: bytes) -> bytes: + """Encode the data using the encoder. + + Args: + value: The data to encode. + + Returns: + The encoded data. + """ + ... + + @classmethod + def get_json_format(cls) -> str: + """Get the JSON format for the encoded data. + + Returns: + The JSON format for the encoded data. + """ + ... + + +class Base64Encoder(EncoderProtocol): + """Standard (non-URL-safe) Base64 encoder.""" + + @classmethod + def decode(cls, data: bytes) -> bytes: + """Decode the data from base64 encoded bytes to original bytes data. + + Args: + data: The data to decode. + + Returns: + The decoded data. + """ + try: + return base64.decodebytes(data) + except ValueError as e: + raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)}) + + @classmethod + def encode(cls, value: bytes) -> bytes: + """Encode the data from bytes to a base64 encoded bytes. + + Args: + value: The data to encode. + + Returns: + The encoded data. + """ + return base64.encodebytes(value) + + @classmethod + def get_json_format(cls) -> Literal['base64']: + """Get the JSON format for the encoded data. + + Returns: + The JSON format for the encoded data. + """ + return 'base64' + + +class Base64UrlEncoder(EncoderProtocol): + """URL-safe Base64 encoder.""" + + @classmethod + def decode(cls, data: bytes) -> bytes: + """Decode the data from base64 encoded bytes to original bytes data. + + Args: + data: The data to decode. + + Returns: + The decoded data. + """ + try: + return base64.urlsafe_b64decode(data) + except ValueError as e: + raise PydanticCustomError('base64_decode', "Base64 decoding error: '{error}'", {'error': str(e)}) + + @classmethod + def encode(cls, value: bytes) -> bytes: + """Encode the data from bytes to a base64 encoded bytes. + + Args: + value: The data to encode. + + Returns: + The encoded data. + """ + return base64.urlsafe_b64encode(value) + + @classmethod + def get_json_format(cls) -> Literal['base64url']: + """Get the JSON format for the encoded data. + + Returns: + The JSON format for the encoded data. + """ + return 'base64url' + + +@_dataclasses.dataclass(**_internal_dataclass.slots_true) +class EncodedBytes: + """A bytes type that is encoded and decoded using the specified encoder. + + `EncodedBytes` needs an encoder that implements `EncoderProtocol` to operate. + + ```py + from typing_extensions import Annotated + + from pydantic import BaseModel, EncodedBytes, EncoderProtocol, ValidationError + + class MyEncoder(EncoderProtocol): + @classmethod + def decode(cls, data: bytes) -> bytes: + if data == b'**undecodable**': + raise ValueError('Cannot decode data') + return data[13:] + + @classmethod + def encode(cls, value: bytes) -> bytes: + return b'**encoded**: ' + value + + @classmethod + def get_json_format(cls) -> str: + return 'my-encoder' + + MyEncodedBytes = Annotated[bytes, EncodedBytes(encoder=MyEncoder)] + + class Model(BaseModel): + my_encoded_bytes: MyEncodedBytes + + # Initialize the model with encoded data + m = Model(my_encoded_bytes=b'**encoded**: some bytes') + + # Access decoded value + print(m.my_encoded_bytes) + #> b'some bytes' + + # Serialize into the encoded form + print(m.model_dump()) + #> {'my_encoded_bytes': b'**encoded**: some bytes'} + + # Validate encoded data + try: + Model(my_encoded_bytes=b'**undecodable**') + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + my_encoded_bytes + Value error, Cannot decode data [type=value_error, input_value=b'**undecodable**', input_type=bytes] + ''' + ``` + """ + + encoder: type[EncoderProtocol] + + def __get_pydantic_json_schema__( + self, core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + ) -> JsonSchemaValue: + field_schema = handler(core_schema) + field_schema.update(type='string', format=self.encoder.get_json_format()) + return field_schema + + def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + return core_schema.with_info_after_validator_function( + function=self.decode, + schema=core_schema.bytes_schema(), + serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode), + ) + + def decode(self, data: bytes, _: core_schema.ValidationInfo) -> bytes: + """Decode the data using the specified encoder. + + Args: + data: The data to decode. + + Returns: + The decoded data. + """ + return self.encoder.decode(data) + + def encode(self, value: bytes) -> bytes: + """Encode the data using the specified encoder. + + Args: + value: The data to encode. + + Returns: + The encoded data. + """ + return self.encoder.encode(value) + + def __hash__(self) -> int: + return hash(self.encoder) + + +@_dataclasses.dataclass(**_internal_dataclass.slots_true) +class EncodedStr(EncodedBytes): + """A str type that is encoded and decoded using the specified encoder. + + `EncodedStr` needs an encoder that implements `EncoderProtocol` to operate. + + ```py + from typing_extensions import Annotated + + from pydantic import BaseModel, EncodedStr, EncoderProtocol, ValidationError + + class MyEncoder(EncoderProtocol): + @classmethod + def decode(cls, data: bytes) -> bytes: + if data == b'**undecodable**': + raise ValueError('Cannot decode data') + return data[13:] + + @classmethod + def encode(cls, value: bytes) -> bytes: + return b'**encoded**: ' + value + + @classmethod + def get_json_format(cls) -> str: + return 'my-encoder' + + MyEncodedStr = Annotated[str, EncodedStr(encoder=MyEncoder)] + + class Model(BaseModel): + my_encoded_str: MyEncodedStr + + # Initialize the model with encoded data + m = Model(my_encoded_str='**encoded**: some str') + + # Access decoded value + print(m.my_encoded_str) + #> some str + + # Serialize into the encoded form + print(m.model_dump()) + #> {'my_encoded_str': '**encoded**: some str'} + + # Validate encoded data + try: + Model(my_encoded_str='**undecodable**') + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + my_encoded_str + Value error, Cannot decode data [type=value_error, input_value='**undecodable**', input_type=str] + ''' + ``` + """ + + def __get_pydantic_core_schema__(self, source: type[Any], handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + return core_schema.with_info_after_validator_function( + function=self.decode_str, + schema=super(EncodedStr, self).__get_pydantic_core_schema__(source=source, handler=handler), # noqa: UP008 + serialization=core_schema.plain_serializer_function_ser_schema(function=self.encode_str), + ) + + def decode_str(self, data: bytes, _: core_schema.ValidationInfo) -> str: + """Decode the data using the specified encoder. + + Args: + data: The data to decode. + + Returns: + The decoded data. + """ + return data.decode() + + def encode_str(self, value: str) -> str: + """Encode the data using the specified encoder. + + Args: + value: The data to encode. + + Returns: + The encoded data. + """ + return super(EncodedStr, self).encode(value=value.encode()).decode() # noqa: UP008 + + def __hash__(self) -> int: + return hash(self.encoder) + + +Base64Bytes = Annotated[bytes, EncodedBytes(encoder=Base64Encoder)] +"""A bytes type that is encoded and decoded using the standard (non-URL-safe) base64 encoder. + +Note: + Under the hood, `Base64Bytes` use standard library `base64.encodebytes` and `base64.decodebytes` functions. + + As a result, attempting to decode url-safe base64 data using the `Base64Bytes` type may fail or produce an incorrect + decoding. + +```py +from pydantic import Base64Bytes, BaseModel, ValidationError + +class Model(BaseModel): + base64_bytes: Base64Bytes + +# Initialize the model with base64 data +m = Model(base64_bytes=b'VGhpcyBpcyB0aGUgd2F5') + +# Access decoded value +print(m.base64_bytes) +#> b'This is the way' + +# Serialize into the base64 form +print(m.model_dump()) +#> {'base64_bytes': b'VGhpcyBpcyB0aGUgd2F5\n'} + +# Validate base64 data +try: + print(Model(base64_bytes=b'undecodable').base64_bytes) +except ValidationError as e: + print(e) + ''' + 1 validation error for Model + base64_bytes + Base64 decoding error: 'Incorrect padding' [type=base64_decode, input_value=b'undecodable', input_type=bytes] + ''' +``` +""" +Base64Str = Annotated[str, EncodedStr(encoder=Base64Encoder)] +"""A str type that is encoded and decoded using the standard (non-URL-safe) base64 encoder. + +Note: + Under the hood, `Base64Bytes` use standard library `base64.encodebytes` and `base64.decodebytes` functions. + + As a result, attempting to decode url-safe base64 data using the `Base64Str` type may fail or produce an incorrect + decoding. + +```py +from pydantic import Base64Str, BaseModel, ValidationError + +class Model(BaseModel): + base64_str: Base64Str + +# Initialize the model with base64 data +m = Model(base64_str='VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y') + +# Access decoded value +print(m.base64_str) +#> These aren't the droids you're looking for + +# Serialize into the base64 form +print(m.model_dump()) +#> {'base64_str': 'VGhlc2UgYXJlbid0IHRoZSBkcm9pZHMgeW91J3JlIGxvb2tpbmcgZm9y\n'} + +# Validate base64 data +try: + print(Model(base64_str='undecodable').base64_str) +except ValidationError as e: + print(e) + ''' + 1 validation error for Model + base64_str + Base64 decoding error: 'Incorrect padding' [type=base64_decode, input_value='undecodable', input_type=str] + ''' +``` +""" +Base64UrlBytes = Annotated[bytes, EncodedBytes(encoder=Base64UrlEncoder)] +"""A bytes type that is encoded and decoded using the URL-safe base64 encoder. + +Note: + Under the hood, `Base64UrlBytes` use standard library `base64.urlsafe_b64encode` and `base64.urlsafe_b64decode` + functions. + + As a result, the `Base64UrlBytes` type can be used to faithfully decode "vanilla" base64 data + (using `'+'` and `'/'`). + +```py +from pydantic import Base64UrlBytes, BaseModel + +class Model(BaseModel): + base64url_bytes: Base64UrlBytes + +# Initialize the model with base64 data +m = Model(base64url_bytes=b'SHc_dHc-TXc==') +print(m) +#> base64url_bytes=b'Hw?tw>Mw' +``` +""" +Base64UrlStr = Annotated[str, EncodedStr(encoder=Base64UrlEncoder)] +"""A str type that is encoded and decoded using the URL-safe base64 encoder. + +Note: + Under the hood, `Base64UrlStr` use standard library `base64.urlsafe_b64encode` and `base64.urlsafe_b64decode` + functions. + + As a result, the `Base64UrlStr` type can be used to faithfully decode "vanilla" base64 data (using `'+'` and `'/'`). + +```py +from pydantic import Base64UrlStr, BaseModel + +class Model(BaseModel): + base64url_str: Base64UrlStr + +# Initialize the model with base64 data +m = Model(base64url_str='SHc_dHc-TXc==') +print(m) +#> base64url_str='Hw?tw>Mw' +``` +""" + + +__getattr__ = getattr_migration(__name__) + + +@_dataclasses.dataclass(**_internal_dataclass.slots_true) +class GetPydanticSchema: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/types/#using-getpydanticschema-to-reduce-boilerplate + + A convenience class for creating an annotation that provides pydantic custom type hooks. + + This class is intended to eliminate the need to create a custom "marker" which defines the + `__get_pydantic_core_schema__` and `__get_pydantic_json_schema__` custom hook methods. + + For example, to have a field treated by type checkers as `int`, but by pydantic as `Any`, you can do: + ```python + from typing import Any + + from typing_extensions import Annotated + + from pydantic import BaseModel, GetPydanticSchema + + HandleAsAny = GetPydanticSchema(lambda _s, h: h(Any)) + + class Model(BaseModel): + x: Annotated[int, HandleAsAny] # pydantic sees `x: Any` + + print(repr(Model(x='abc').x)) + #> 'abc' + ``` + """ + + get_pydantic_core_schema: Callable[[Any, GetCoreSchemaHandler], CoreSchema] | None = None + get_pydantic_json_schema: Callable[[Any, GetJsonSchemaHandler], JsonSchemaValue] | None = None + + # Note: we may want to consider adding a convenience staticmethod `def for_type(type_: Any) -> GetPydanticSchema:` + # which returns `GetPydanticSchema(lambda _s, h: h(type_))` + + if not TYPE_CHECKING: + # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access + + def __getattr__(self, item: str) -> Any: + """Use this rather than defining `__get_pydantic_core_schema__` etc. to reduce the number of nested calls.""" + if item == '__get_pydantic_core_schema__' and self.get_pydantic_core_schema: + return self.get_pydantic_core_schema + elif item == '__get_pydantic_json_schema__' and self.get_pydantic_json_schema: + return self.get_pydantic_json_schema + else: + return object.__getattribute__(self, item) + + __hash__ = object.__hash__ + + +@_dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) +class Tag: + """Provides a way to specify the expected tag to use for a case of a (callable) discriminated union. + + Also provides a way to label a union case in error messages. + + When using a callable `Discriminator`, attach a `Tag` to each case in the `Union` to specify the tag that + should be used to identify that case. For example, in the below example, the `Tag` is used to specify that + if `get_discriminator_value` returns `'apple'`, the input should be validated as an `ApplePie`, and if it + returns `'pumpkin'`, the input should be validated as a `PumpkinPie`. + + The primary role of the `Tag` here is to map the return value from the callable `Discriminator` function to + the appropriate member of the `Union` in question. + + ```py + from typing import Any, Union + + from typing_extensions import Annotated, Literal + + from pydantic import BaseModel, Discriminator, Tag + + class Pie(BaseModel): + time_to_cook: int + num_ingredients: int + + class ApplePie(Pie): + fruit: Literal['apple'] = 'apple' + + class PumpkinPie(Pie): + filling: Literal['pumpkin'] = 'pumpkin' + + def get_discriminator_value(v: Any) -> str: + if isinstance(v, dict): + return v.get('fruit', v.get('filling')) + return getattr(v, 'fruit', getattr(v, 'filling', None)) + + class ThanksgivingDinner(BaseModel): + dessert: Annotated[ + Union[ + Annotated[ApplePie, Tag('apple')], + Annotated[PumpkinPie, Tag('pumpkin')], + ], + Discriminator(get_discriminator_value), + ] + + apple_variation = ThanksgivingDinner.model_validate( + {'dessert': {'fruit': 'apple', 'time_to_cook': 60, 'num_ingredients': 8}} + ) + print(repr(apple_variation)) + ''' + ThanksgivingDinner(dessert=ApplePie(time_to_cook=60, num_ingredients=8, fruit='apple')) + ''' + + pumpkin_variation = ThanksgivingDinner.model_validate( + { + 'dessert': { + 'filling': 'pumpkin', + 'time_to_cook': 40, + 'num_ingredients': 6, + } + } + ) + print(repr(pumpkin_variation)) + ''' + ThanksgivingDinner(dessert=PumpkinPie(time_to_cook=40, num_ingredients=6, filling='pumpkin')) + ''' + ``` + + !!! note + You must specify a `Tag` for every case in a `Tag` that is associated with a + callable `Discriminator`. Failing to do so will result in a `PydanticUserError` with code + [`callable-discriminator-no-tag`](../errors/usage_errors.md#callable-discriminator-no-tag). + + See the [Discriminated Unions] concepts docs for more details on how to use `Tag`s. + + [Discriminated Unions]: ../concepts/unions.md#discriminated-unions + """ + + tag: str + + def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: + schema = handler(source_type) + metadata = schema.setdefault('metadata', {}) + assert isinstance(metadata, dict) + metadata[_core_utils.TAGGED_UNION_TAG_KEY] = self.tag + return schema + + +@_dataclasses.dataclass(**_internal_dataclass.slots_true, frozen=True) +class Discriminator: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/unions/#discriminated-unions-with-callable-discriminator + + Provides a way to use a custom callable as the way to extract the value of a union discriminator. + + This allows you to get validation behavior like you'd get from `Field(discriminator=)`, + but without needing to have a single shared field across all the union choices. This also makes it + possible to handle unions of models and primitive types with discriminated-union-style validation errors. + Finally, this allows you to use a custom callable as the way to identify which member of a union a value + belongs to, while still seeing all the performance benefits of a discriminated union. + + Consider this example, which is much more performant with the use of `Discriminator` and thus a `TaggedUnion` + than it would be as a normal `Union`. + + ```py + from typing import Any, Union + + from typing_extensions import Annotated, Literal + + from pydantic import BaseModel, Discriminator, Tag + + class Pie(BaseModel): + time_to_cook: int + num_ingredients: int + + class ApplePie(Pie): + fruit: Literal['apple'] = 'apple' + + class PumpkinPie(Pie): + filling: Literal['pumpkin'] = 'pumpkin' + + def get_discriminator_value(v: Any) -> str: + if isinstance(v, dict): + return v.get('fruit', v.get('filling')) + return getattr(v, 'fruit', getattr(v, 'filling', None)) + + class ThanksgivingDinner(BaseModel): + dessert: Annotated[ + Union[ + Annotated[ApplePie, Tag('apple')], + Annotated[PumpkinPie, Tag('pumpkin')], + ], + Discriminator(get_discriminator_value), + ] + + apple_variation = ThanksgivingDinner.model_validate( + {'dessert': {'fruit': 'apple', 'time_to_cook': 60, 'num_ingredients': 8}} + ) + print(repr(apple_variation)) + ''' + ThanksgivingDinner(dessert=ApplePie(time_to_cook=60, num_ingredients=8, fruit='apple')) + ''' + + pumpkin_variation = ThanksgivingDinner.model_validate( + { + 'dessert': { + 'filling': 'pumpkin', + 'time_to_cook': 40, + 'num_ingredients': 6, + } + } + ) + print(repr(pumpkin_variation)) + ''' + ThanksgivingDinner(dessert=PumpkinPie(time_to_cook=40, num_ingredients=6, filling='pumpkin')) + ''' + ``` + + See the [Discriminated Unions] concepts docs for more details on how to use `Discriminator`s. + + [Discriminated Unions]: ../concepts/unions.md#discriminated-unions + """ + + discriminator: str | Callable[[Any], Hashable] + """The callable or field name for discriminating the type in a tagged union. + + A `Callable` discriminator must extract the value of the discriminator from the input. + A `str` discriminator must be the name of a field to discriminate against. + """ + custom_error_type: str | None = None + """Type to use in [custom errors](../errors/errors.md#custom-errors) replacing the standard discriminated union + validation errors. + """ + custom_error_message: str | None = None + """Message to use in custom errors.""" + custom_error_context: dict[str, int | str | float] | None = None + """Context to use in custom errors.""" + + def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: + origin = _typing_extra.get_origin(source_type) + if not origin or not _typing_extra.origin_is_union(origin): + raise TypeError(f'{type(self).__name__} must be used with a Union type, not {source_type}') + + if isinstance(self.discriminator, str): + from pydantic import Field + + return handler(Annotated[source_type, Field(discriminator=self.discriminator)]) + else: + original_schema = handler(source_type) + return self._convert_schema(original_schema) + + def _convert_schema(self, original_schema: core_schema.CoreSchema) -> core_schema.TaggedUnionSchema: + if original_schema['type'] != 'union': + # This likely indicates that the schema was a single-item union that was simplified. + # In this case, we do the same thing we do in + # `pydantic._internal._discriminated_union._ApplyInferredDiscriminator._apply_to_root`, namely, + # package the generated schema back into a single-item union. + original_schema = core_schema.union_schema([original_schema]) + + tagged_union_choices = {} + for i, choice in enumerate(original_schema['choices']): + tag = None + if isinstance(choice, tuple): + choice, tag = choice + metadata = choice.get('metadata') + if metadata is not None: + metadata_tag = metadata.get(_core_utils.TAGGED_UNION_TAG_KEY) + if metadata_tag is not None: + tag = metadata_tag + if tag is None: + raise PydanticUserError( + f'`Tag` not provided for choice {choice} used with `Discriminator`', + code='callable-discriminator-no-tag', + ) + tagged_union_choices[tag] = choice + + # Have to do these verbose checks to ensure falsy values ('' and {}) don't get ignored + custom_error_type = self.custom_error_type + if custom_error_type is None: + custom_error_type = original_schema.get('custom_error_type') + + custom_error_message = self.custom_error_message + if custom_error_message is None: + custom_error_message = original_schema.get('custom_error_message') + + custom_error_context = self.custom_error_context + if custom_error_context is None: + custom_error_context = original_schema.get('custom_error_context') + + custom_error_type = original_schema.get('custom_error_type') if custom_error_type is None else custom_error_type + return core_schema.tagged_union_schema( + tagged_union_choices, + self.discriminator, + custom_error_type=custom_error_type, + custom_error_message=custom_error_message, + custom_error_context=custom_error_context, + strict=original_schema.get('strict'), + ref=original_schema.get('ref'), + metadata=original_schema.get('metadata'), + serialization=original_schema.get('serialization'), + ) + + +_JSON_TYPES = {int, float, str, bool, list, dict, type(None)} + + +def _get_type_name(x: Any) -> str: + type_ = type(x) + if type_ in _JSON_TYPES: + return type_.__name__ + + # Handle proper subclasses; note we don't need to handle None or bool here + if isinstance(x, int): + return 'int' + if isinstance(x, float): + return 'float' + if isinstance(x, str): + return 'str' + if isinstance(x, list): + return 'list' + if isinstance(x, dict): + return 'dict' + + # Fail by returning the type's actual name + return getattr(type_, '__name__', '') + + +class _AllowAnyJson: + @classmethod + def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: + python_schema = handler(source_type) + return core_schema.json_or_python_schema(json_schema=core_schema.any_schema(), python_schema=python_schema) + + +if TYPE_CHECKING: + # This seems to only be necessary for mypy + JsonValue: TypeAlias = Union[ + List['JsonValue'], + Dict[str, 'JsonValue'], + str, + bool, + int, + float, + None, + ] + """A `JsonValue` is used to represent a value that can be serialized to JSON. + + It may be one of: + + * `List['JsonValue']` + * `Dict[str, 'JsonValue']` + * `str` + * `bool` + * `int` + * `float` + * `None` + + The following example demonstrates how to use `JsonValue` to validate JSON data, + and what kind of errors to expect when input data is not json serializable. + + ```py + import json + + from pydantic import BaseModel, JsonValue, ValidationError + + class Model(BaseModel): + j: JsonValue + + valid_json_data = {'j': {'a': {'b': {'c': 1, 'd': [2, None]}}}} + invalid_json_data = {'j': {'a': {'b': ...}}} + + print(repr(Model.model_validate(valid_json_data))) + #> Model(j={'a': {'b': {'c': 1, 'd': [2, None]}}}) + print(repr(Model.model_validate_json(json.dumps(valid_json_data)))) + #> Model(j={'a': {'b': {'c': 1, 'd': [2, None]}}}) + + try: + Model.model_validate(invalid_json_data) + except ValidationError as e: + print(e) + ''' + 1 validation error for Model + j.dict.a.dict.b + input was not a valid JSON value [type=invalid-json-value, input_value=Ellipsis, input_type=ellipsis] + ''' + ``` + """ + +else: + JsonValue = TypeAliasType( + 'JsonValue', + Annotated[ + Union[ + Annotated[List['JsonValue'], Tag('list')], + Annotated[Dict[str, 'JsonValue'], Tag('dict')], + Annotated[str, Tag('str')], + Annotated[bool, Tag('bool')], + Annotated[int, Tag('int')], + Annotated[float, Tag('float')], + Annotated[None, Tag('NoneType')], + ], + Discriminator( + _get_type_name, + custom_error_type='invalid-json-value', + custom_error_message='input was not a valid JSON value', + ), + _AllowAnyJson, + ], + ) + + +class _OnErrorOmit: + @classmethod + def __get_pydantic_core_schema__(cls, source_type: Any, handler: GetCoreSchemaHandler) -> CoreSchema: + # there is no actual default value here but we use with_default_schema since it already has the on_error + # behavior implemented and it would be no more efficient to implement it on every other validator + # or as a standalone validator + return core_schema.with_default_schema(schema=handler(source_type), on_error='omit') + + +OnErrorOmit = Annotated[T, _OnErrorOmit] +""" +When used as an item in a list, the key type in a dict, optional values of a TypedDict, etc. +this annotation omits the item from the iteration if there is any error validating it. +That is, instead of a [`ValidationError`][pydantic_core.ValidationError] being propagated up and the entire iterable being discarded +any invalid items are discarded and the valid ones are returned. +""" diff --git a/venv/lib/python3.10/site-packages/pydantic/typing.py b/venv/lib/python3.10/site-packages/pydantic/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..f1b32ba22bbae4ba3dbd871c00c1b9807e1fb700 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/typing.py @@ -0,0 +1,4 @@ +"""`typing` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/utils.py b/venv/lib/python3.10/site-packages/pydantic/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1619d1db640c79d8940432f3e355bc29dc83ceba --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/utils.py @@ -0,0 +1,4 @@ +"""The `utils` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/validate_call_decorator.py b/venv/lib/python3.10/site-packages/pydantic/validate_call_decorator.py new file mode 100644 index 0000000000000000000000000000000000000000..5b82e19d85bfb4c0529da7919f83cbda61f45533 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/validate_call_decorator.py @@ -0,0 +1,68 @@ +"""Decorator for validating function calls.""" +from __future__ import annotations as _annotations + +import functools +from typing import TYPE_CHECKING, Any, Callable, TypeVar, overload + +from ._internal import _validate_call + +__all__ = ('validate_call',) + +if TYPE_CHECKING: + from .config import ConfigDict + + AnyCallableT = TypeVar('AnyCallableT', bound=Callable[..., Any]) + + +@overload +def validate_call( + *, config: ConfigDict | None = None, validate_return: bool = False +) -> Callable[[AnyCallableT], AnyCallableT]: + ... + + +@overload +def validate_call(func: AnyCallableT, /) -> AnyCallableT: + ... + + +def validate_call( + func: AnyCallableT | None = None, + /, + *, + config: ConfigDict | None = None, + validate_return: bool = False, +) -> AnyCallableT | Callable[[AnyCallableT], AnyCallableT]: + """Usage docs: https://docs.pydantic.dev/2.7/concepts/validation_decorator/ + + Returns a decorated wrapper around the function that validates the arguments and, optionally, the return value. + + Usage may be either as a plain decorator `@validate_call` or with arguments `@validate_call(...)`. + + Args: + func: The function to be decorated. + config: The configuration dictionary. + validate_return: Whether to validate the return value. + + Returns: + The decorated function. + """ + + def validate(function: AnyCallableT) -> AnyCallableT: + if isinstance(function, (classmethod, staticmethod)): + name = type(function).__name__ + raise TypeError(f'The `@{name}` decorator should be applied after `@validate_call` (put `@{name}` on top)') + validate_call_wrapper = _validate_call.ValidateCallWrapper(function, config, validate_return) + + @functools.wraps(function) + def wrapper_function(*args, **kwargs): + return validate_call_wrapper(*args, **kwargs) + + wrapper_function.raw_function = function # type: ignore + + return wrapper_function # type: ignore + + if func: + return validate(func) + else: + return validate diff --git a/venv/lib/python3.10/site-packages/pydantic/validators.py b/venv/lib/python3.10/site-packages/pydantic/validators.py new file mode 100644 index 0000000000000000000000000000000000000000..55b0339e9fa69e48e58d2f77395a7cc2a8711d8b --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/validators.py @@ -0,0 +1,4 @@ +"""The `validators` module is a backport module from V1.""" +from ._migration import getattr_migration + +__getattr__ = getattr_migration(__name__) diff --git a/venv/lib/python3.10/site-packages/pydantic/version.py b/venv/lib/python3.10/site-packages/pydantic/version.py new file mode 100644 index 0000000000000000000000000000000000000000..b7b87a212a0cc81b7e3d986a753fa4d0890b37e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/version.py @@ -0,0 +1,77 @@ +"""The `version` module holds the version information for Pydantic.""" +from __future__ import annotations as _annotations + +__all__ = 'VERSION', 'version_info' + +VERSION = '2.7.1' +"""The version of Pydantic.""" + + +def version_short() -> str: + """Return the `major.minor` part of Pydantic version. + + It returns '2.1' if Pydantic version is '2.1.1'. + """ + return '.'.join(VERSION.split('.')[:2]) + + +def version_info() -> str: + """Return complete version information for Pydantic and its dependencies.""" + import importlib.metadata as importlib_metadata + import os + import platform + import sys + from pathlib import Path + + import pydantic_core._pydantic_core as pdc + + from ._internal import _git as git + + # get data about packages that are closely related to pydantic, use pydantic or often conflict with pydantic + package_names = { + 'email-validator', + 'fastapi', + 'mypy', + 'pydantic-extra-types', + 'pydantic-settings', + 'pyright', + 'typing_extensions', + } + related_packages = [] + + for dist in importlib_metadata.distributions(): + name = dist.metadata['Name'] + if name in package_names: + related_packages.append(f'{name}-{dist.version}') + + pydantic_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + most_recent_commit = ( + git.git_revision(pydantic_dir) if git.is_git_repo(pydantic_dir) and git.have_git() else 'unknown' + ) + + info = { + 'pydantic version': VERSION, + 'pydantic-core version': pdc.__version__, + 'pydantic-core build': getattr(pdc, 'build_info', None) or pdc.build_profile, + 'install path': Path(__file__).resolve().parent, + 'python version': sys.version, + 'platform': platform.platform(), + 'related packages': ' '.join(related_packages), + 'commit': most_recent_commit, + } + return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items()) + + +def parse_mypy_version(version: str) -> tuple[int, ...]: + """Parse mypy string version to tuple of ints. + + It parses normal version like `0.930` and extra info followed by a `+` sign + like `0.940+dev.04cac4b5d911c4f9529e6ce86a27b44f28846f5d.dirty`. + + Args: + version: The mypy version string. + + Returns: + A tuple of ints. e.g. (0, 930). + """ + return tuple(map(int, version.partition('+')[0].split('.'))) diff --git a/venv/lib/python3.10/site-packages/pydantic/warnings.py b/venv/lib/python3.10/site-packages/pydantic/warnings.py new file mode 100644 index 0000000000000000000000000000000000000000..aedd4fba9ddd7583749cb613c095ba64fb182529 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pydantic/warnings.py @@ -0,0 +1,58 @@ +"""Pydantic-specific warnings.""" +from __future__ import annotations as _annotations + +from .version import version_short + +__all__ = 'PydanticDeprecatedSince20', 'PydanticDeprecationWarning' + + +class PydanticDeprecationWarning(DeprecationWarning): + """A Pydantic specific deprecation warning. + + This warning is raised when using deprecated functionality in Pydantic. It provides information on when the + deprecation was introduced and the expected version in which the corresponding functionality will be removed. + + Attributes: + message: Description of the warning. + since: Pydantic version in what the deprecation was introduced. + expected_removal: Pydantic version in what the corresponding functionality expected to be removed. + """ + + message: str + since: tuple[int, int] + expected_removal: tuple[int, int] + + def __init__( + self, message: str, *args: object, since: tuple[int, int], expected_removal: tuple[int, int] | None = None + ) -> None: + super().__init__(message, *args) + self.message = message.rstrip('.') + self.since = since + self.expected_removal = expected_removal if expected_removal is not None else (since[0] + 1, 0) + + def __str__(self) -> str: + message = ( + f'{self.message}. Deprecated in Pydantic V{self.since[0]}.{self.since[1]}' + f' to be removed in V{self.expected_removal[0]}.{self.expected_removal[1]}.' + ) + if self.since == (2, 0): + message += f' See Pydantic V2 Migration Guide at https://errors.pydantic.dev/{version_short()}/migration/' + return message + + +class PydanticDeprecatedSince20(PydanticDeprecationWarning): + """A specific `PydanticDeprecationWarning` subclass defining functionality deprecated since Pydantic 2.0.""" + + def __init__(self, message: str, *args: object) -> None: + super().__init__(message, *args, since=(2, 0), expected_removal=(3, 0)) + + +class PydanticDeprecatedSince26(PydanticDeprecationWarning): + """A specific `PydanticDeprecationWarning` subclass defining functionality deprecated since Pydantic 2.6.""" + + def __init__(self, message: str, *args: object) -> None: + super().__init__(message, *args, since=(2, 0), expected_removal=(3, 0)) + + +class GenericBeforeBaseModelWarning(Warning): + pass