diff --git a/.gitattributes b/.gitattributes index d28b703a99d727635424e7b1cd9d354e9460b876..4cb8dd7709c32c343d4ee9e272f3317a7708f2fa 100644 --- a/.gitattributes +++ b/.gitattributes @@ -461,3 +461,5 @@ pi0_nomagic_precise_packing/finetune_precise_packing/25000/params/ocdbt.process_ pi0_nomagic_precise_packing/finetune_precise_packing/25000/train_state/ocdbt.process_0/d/0a1e03395829d5e74804a190f25b836b filter=lfs diff=lfs merge=lfs -text pi0_nomagic_precise_packing/finetune_precise_packing/25000/train_state/ocdbt.process_0/d/922969e2302a82121fe4d88f61f6c61c filter=lfs diff=lfs merge=lfs -text pi0_nomagic_precise_packing/finetune_precise_packing/25000/train_state/ocdbt.process_0/d/3f11c461efb1a7f5e1620c48d0ae6f5d filter=lfs diff=lfs merge=lfs -text +collecting_film--resume/20000/params/ocdbt.process_0/d/585a567506dba284f858a9c92dbf5442 filter=lfs diff=lfs merge=lfs -text +collecting_film--resume/20000/params/ocdbt.process_0/d/adfd15b2e7551b00f53f9032e0961fc8 filter=lfs diff=lfs merge=lfs -text diff --git a/collecting_film--resume/20000/_CHECKPOINT_METADATA b/collecting_film--resume/20000/_CHECKPOINT_METADATA new file mode 100644 index 0000000000000000000000000000000000000000..b7eecf9236c2fdce06a84c04ea384dd7918d4b56 --- /dev/null +++ b/collecting_film--resume/20000/_CHECKPOINT_METADATA @@ -0,0 +1 @@ +{"item_handlers": {"assets": "openpi.training.checkpoints.CallbackHandler", "params": "orbax.checkpoint._src.handlers.pytree_checkpoint_handler.PyTreeCheckpointHandler", "train_state": "orbax.checkpoint._src.handlers.pytree_checkpoint_handler.PyTreeCheckpointHandler"}, "metrics": {}, "performance_metrics": {}, "init_timestamp_nsecs": 1747589894222227834, "commit_timestamp_nsecs": 1747590066360623678, "custom": {}} \ No newline at end of file diff --git a/collecting_film--resume/20000/assets/ur10e/norm_stats.json b/collecting_film--resume/20000/assets/ur10e/norm_stats.json new file mode 100644 index 0000000000000000000000000000000000000000..02c0078b3fc873fa372fbc1e3df3778fb7d93dc7 --- /dev/null +++ b/collecting_film--resume/20000/assets/ur10e/norm_stats.json @@ -0,0 +1,280 @@ +{ + "norm_stats": { + "state": { + "mean": [ + -0.02567109651863575, + -0.6717175245285034, + 0.04765819013118744, + -0.30536580085754395, + 1.4770880937576294, + 0.22507140040397644, + 0.445521742105484, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "std": [ + 0.26767438650131226, + 0.09560061991214752, + 0.15486207604408264, + 2.018665313720703, + 0.08953272551298141, + 1.7133640050888062, + 0.49702325463294983, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "q01": [ + -0.7150094509124756, + -0.8856564164161682, + -0.17373406887054443, + -3.1038565635681152, + 1.0793952941894531, + -3.082521677017212, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "q99": [ + 0.38523364067077637, + -0.4436938762664795, + 0.3646169602870941, + 3.1001057624816895, + 1.5628662109375, + 3.0800065994262695, + 0.9997999668121338, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + }, + "actions": { + "mean": [ + -0.0003907651698682457, + 0.00006541451875818893, + 0.00043602054938673973, + 0.00009901820885716006, + 0.0002067217428702861, + -0.0003359736583661288, + 0.44410789012908936, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "std": [ + 0.006131487898528576, + 0.0029658444691449404, + 0.006969461217522621, + 0.004112967289984226, + 0.0045164297334849834, + 0.008203143253922462, + 0.49686625599861145, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "q01": [ + -0.018025483936071396, + -0.008224858902394772, + -0.014416997320950031, + -0.010709216818213463, + -0.012488648295402527, + -0.024349097162485123, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ], + "q99": [ + 0.016839243471622467, + 0.00923294760286808, + 0.0199727900326252, + 0.011877017095685005, + 0.012633688747882843, + 0.02208421379327774, + 0.9997999668121338, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0 + ] + } + } +} \ No newline at end of file diff --git a/collecting_film--resume/20000/params/_METADATA b/collecting_film--resume/20000/params/_METADATA new file mode 100644 index 0000000000000000000000000000000000000000..fe69e5c7c966b232fdce8d2d95aee040592861f3 --- /dev/null +++ b/collecting_film--resume/20000/params/_METADATA @@ -0,0 +1 @@ +{"tree_metadata": {"('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'embedding', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'embedding', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'head', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'head', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'pos_embedding', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "pos_embedding", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'embedder', 'input_embedding', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "embedder", "key_type": 2}, {"key": "input_embedding", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'final_norm', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "final_norm", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'final_norm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "final_norm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'attn_vec_einsum', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "attn_vec_einsum", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'attn_vec_einsum_1', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "attn_vec_einsum_1", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'kv_einsum', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "kv_einsum", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'kv_einsum_1', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "kv_einsum_1", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'q_einsum', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "q_einsum", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'q_einsum_1', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "q_einsum_1", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp', 'gating_einsum', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp", "key_type": 2}, {"key": "gating_einsum", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp', 'linear', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp", "key_type": 2}, {"key": "linear", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp_1', 'gating_einsum', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp_1", "key_type": 2}, {"key": "gating_einsum", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp_1', 'linear', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp_1", "key_type": 2}, {"key": "linear", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_attention_norm', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_attention_norm", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_attention_norm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_attention_norm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_ffw_norm', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_ffw_norm", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_ffw_norm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_ffw_norm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_in_proj', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_in_proj', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_out_proj', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_out_proj', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_in', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_in', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_out', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_out', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'state_proj', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'state_proj', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}}, "use_zarr3": false, "store_array_data_equal_to_fill_value": true, "custom_metadata": null} \ No newline at end of file diff --git a/collecting_film--resume/20000/params/_sharding b/collecting_film--resume/20000/params/_sharding new file mode 100644 index 0000000000000000000000000000000000000000..df09c5d72a5b9fd469ad2dce4f83b7aa2e09fd66 --- /dev/null +++ b/collecting_film--resume/20000/params/_sharding @@ -0,0 +1 @@ +{"cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJfbm9ybV9maWxtZWQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJfbm9ybV9maWxtZWQuc2NhbGUudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2NhbGVfcHJvamVjdG9yLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2NhbGVfcHJvamVjdG9yLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2hpZnRfcHJvamVjdG9yLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2hpZnRfcHJvamVjdG9yLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzAuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzAuc2NhbGUudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzEuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzEuc2NhbGUudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8wLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8wLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8xLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8xLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLm91dC5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLm91dC5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLmtleS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLmtleS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnF1ZXJ5LmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnF1ZXJ5Lmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnZhbHVlLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnZhbHVlLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuZW1iZWRkaW5nLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuZW1iZWRkaW5nLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuaGVhZC5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuaGVhZC5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcucG9zX2VtYmVkZGluZy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0uZW1iZWRkZXIuaW5wdXRfZW1iZWRkaW5nLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0uZmluYWxfbm9ybS5zY2FsZS52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0uZmluYWxfbm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scC5nYXRpbmdfZWluc3VtLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scC5saW5lYXIudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scF8xLmdhdGluZ19laW5zdW0udmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scF8xLmxpbmVhci52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4uYXR0bl92ZWNfZWluc3VtLncudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4uYXR0bl92ZWNfZWluc3VtXzEudy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ua3ZfZWluc3VtLncudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ua3ZfZWluc3VtXzEudy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ucV9laW5zdW0udy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ucV9laW5zdW1fMS53LnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9hdHRlbnRpb25fbm9ybS5zY2FsZS52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9hdHRlbnRpb25fbm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9mZndfbm9ybS5zY2FsZS52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9mZndfbm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9pbi5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9pbi5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9vdXQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9vdXQua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9pbl9wcm9qLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9pbl9wcm9qLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9vdXRfcHJvai5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9vdXRfcHJvai5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLnN0YXRlX3Byb2ouYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLnN0YXRlX3Byb2oua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}"} \ No newline at end of file diff --git a/collecting_film--resume/20000/params/d/26b2241f1725f13db15b1d86f36cfbc9 b/collecting_film--resume/20000/params/d/26b2241f1725f13db15b1d86f36cfbc9 new file mode 100644 index 0000000000000000000000000000000000000000..7a4c10715b8c25c54bbbd918aa64453faa478dc2 Binary files /dev/null and b/collecting_film--resume/20000/params/d/26b2241f1725f13db15b1d86f36cfbc9 differ diff --git a/collecting_film--resume/20000/params/manifest.ocdbt b/collecting_film--resume/20000/params/manifest.ocdbt new file mode 100644 index 0000000000000000000000000000000000000000..56fd9513ccc4ddf9d278bceafaacfc46b8c413c7 Binary files /dev/null and b/collecting_film--resume/20000/params/manifest.ocdbt differ diff --git a/collecting_film--resume/20000/params/ocdbt.process_0/d/080d60551f16dec04ecd4021afb0a9a0 b/collecting_film--resume/20000/params/ocdbt.process_0/d/080d60551f16dec04ecd4021afb0a9a0 new file mode 100644 index 0000000000000000000000000000000000000000..6ceec31e38d24f8e38874c1a0b28549c23c5b00a Binary files /dev/null and b/collecting_film--resume/20000/params/ocdbt.process_0/d/080d60551f16dec04ecd4021afb0a9a0 differ diff --git a/collecting_film--resume/20000/params/ocdbt.process_0/d/585a567506dba284f858a9c92dbf5442 b/collecting_film--resume/20000/params/ocdbt.process_0/d/585a567506dba284f858a9c92dbf5442 new file mode 100644 index 0000000000000000000000000000000000000000..0a1653126359bab5d5d6f47cf5da5a630d83542c --- /dev/null +++ b/collecting_film--resume/20000/params/ocdbt.process_0/d/585a567506dba284f858a9c92dbf5442 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0b221e842eded19d34d59fc6e81f4afa8a6d2cac768775117671a6f8ad609183 +size 3467441 diff --git a/collecting_film--resume/20000/params/ocdbt.process_0/d/738abacd4e735c198044bc7e3ba60916 b/collecting_film--resume/20000/params/ocdbt.process_0/d/738abacd4e735c198044bc7e3ba60916 new file mode 100644 index 0000000000000000000000000000000000000000..d2639bd847309223cb78c50afa5ed353d1ac5bf5 Binary files /dev/null and b/collecting_film--resume/20000/params/ocdbt.process_0/d/738abacd4e735c198044bc7e3ba60916 differ diff --git a/collecting_film--resume/20000/params/ocdbt.process_0/d/8d5cec5ac91af777f342a9594c12e1ad b/collecting_film--resume/20000/params/ocdbt.process_0/d/8d5cec5ac91af777f342a9594c12e1ad new file mode 100644 index 0000000000000000000000000000000000000000..46fd1fd952c111e2925c1e448b44c9583b7cd7d6 Binary files /dev/null and b/collecting_film--resume/20000/params/ocdbt.process_0/d/8d5cec5ac91af777f342a9594c12e1ad differ diff --git a/collecting_film--resume/20000/params/ocdbt.process_0/d/94766476608a20d0c2dcc4c65d0d2689 b/collecting_film--resume/20000/params/ocdbt.process_0/d/94766476608a20d0c2dcc4c65d0d2689 new file mode 100644 index 0000000000000000000000000000000000000000..ae25a69ef0fe8080cf4b30c0f6628176318799d0 Binary files /dev/null and b/collecting_film--resume/20000/params/ocdbt.process_0/d/94766476608a20d0c2dcc4c65d0d2689 differ diff --git a/collecting_film--resume/20000/params/ocdbt.process_0/d/adfd15b2e7551b00f53f9032e0961fc8 b/collecting_film--resume/20000/params/ocdbt.process_0/d/adfd15b2e7551b00f53f9032e0961fc8 new file mode 100644 index 0000000000000000000000000000000000000000..d0209b4da1e411e039e4d1c8b42bcaddadb277e9 --- /dev/null +++ b/collecting_film--resume/20000/params/ocdbt.process_0/d/adfd15b2e7551b00f53f9032e0961fc8 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03ec10a115270e803e295905907ff8c426fe294d10fb604826e31f5473a6be14 +size 5043016 diff --git a/collecting_film--resume/20000/params/ocdbt.process_0/manifest.ocdbt b/collecting_film--resume/20000/params/ocdbt.process_0/manifest.ocdbt new file mode 100644 index 0000000000000000000000000000000000000000..8fd8c9f365c597a0dae8710fe53ce97745870c46 Binary files /dev/null and b/collecting_film--resume/20000/params/ocdbt.process_0/manifest.ocdbt differ diff --git a/collecting_film--resume/20000/train_state/_METADATA b/collecting_film--resume/20000/train_state/_METADATA new file mode 100644 index 0000000000000000000000000000000000000000..d1d3c831624581920fd85ea680242b487848eea4 --- /dev/null +++ b/collecting_film--resume/20000/train_state/_METADATA @@ -0,0 +1 @@ +{"tree_metadata": {"('step',)": {"key_metadata": [{"key": "step", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'embedding', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'embedding', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'head', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'head', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'img', 'pos_embedding', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "pos_embedding", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'embedder', 'input_embedding', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "embedder", "key_type": 2}, {"key": "input_embedding", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'final_norm', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "final_norm", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'final_norm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "final_norm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'attn_vec_einsum', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "attn_vec_einsum", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'attn_vec_einsum_1', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "attn_vec_einsum_1", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'kv_einsum', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "kv_einsum", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'kv_einsum_1', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "kv_einsum_1", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'q_einsum', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "q_einsum", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'attn', 'q_einsum_1', 'w', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "attn", "key_type": 2}, {"key": "q_einsum_1", "key_type": 2}, {"key": "w", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp', 'gating_einsum', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp", "key_type": 2}, {"key": "gating_einsum", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp', 'linear', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp", "key_type": 2}, {"key": "linear", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp_1', 'gating_einsum', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp_1", "key_type": 2}, {"key": "gating_einsum", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'mlp_1', 'linear', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "mlp_1", "key_type": 2}, {"key": "linear", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_attention_norm', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_attention_norm", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_attention_norm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_attention_norm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_ffw_norm', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_ffw_norm", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'PaliGemma', 'llm', 'layers', 'pre_ffw_norm_1', 'scale', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "llm", "key_type": 2}, {"key": "layers", "key_type": 2}, {"key": "pre_ffw_norm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_in_proj', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_in_proj', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_out_proj', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_out_proj', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_in', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_in', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_out', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'action_time_mlp_out', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'state_proj', 'bias', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('params', 'state_proj', 'kernel', 'value')": {"key_metadata": [{"key": "params", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('model_def',)": {"key_metadata": [{"key": "model_def", "key_type": 2}], "value_metadata": {"value_type": "None", "skip_deserialize": true}}, "('opt_state', '0')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "0", "key_type": 1}], "value_metadata": {"value_type": "None", "skip_deserialize": true}}, "('opt_state', '1', '0', 'count')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "count", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'scale', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'scale', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'scale', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'embedding', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'embedding', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'head', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'head', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'PaliGemma', 'img', 'pos_embedding', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "pos_embedding", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_in_proj', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_in_proj', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_out_proj', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_out_proj', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_time_mlp_in', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_time_mlp_in', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_time_mlp_out', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'action_time_mlp_out', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'state_proj', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'mu', 'state_proj', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "mu", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoder_norm_filmed', 'scale', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoder_norm_filmed", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'scale_projector', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "scale_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'FiLMLayer_0', 'shift_projector', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "FiLMLayer_0", "key_type": 2}, {"key": "shift_projector", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_0', 'scale', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_0", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'LayerNorm_1', 'scale', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "LayerNorm_1", "key_type": 2}, {"key": "scale", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_0', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_0", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MlpBlock_0', 'Dense_1', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MlpBlock_0", "key_type": 2}, {"key": "Dense_1", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'key', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "key", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'out', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'query', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "query", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'Transformer_FiLMed', 'encoderblock_filmed', 'MultiHeadDotProductAttention_0', 'value', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "Transformer_FiLMed", "key_type": 2}, {"key": "encoderblock_filmed", "key_type": 2}, {"key": "MultiHeadDotProductAttention_0", "key_type": 2}, {"key": "value", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'embedding', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'embedding', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "embedding", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'head', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'head', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "head", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'PaliGemma', 'img', 'pos_embedding', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "PaliGemma", "key_type": 2}, {"key": "img", "key_type": 2}, {"key": "pos_embedding", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_in_proj', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_in_proj', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_in_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_out_proj', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_out_proj', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_out_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_time_mlp_in', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_time_mlp_in', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_time_mlp_in", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_time_mlp_out', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'action_time_mlp_out', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "action_time_mlp_out", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'state_proj', 'bias', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "bias", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '0', 'nu', 'state_proj', 'kernel', 'value')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "0", "key_type": 1}, {"key": "nu", "key_type": 2}, {"key": "state_proj", "key_type": 2}, {"key": "kernel", "key_type": 2}, {"key": "value", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('opt_state', '1', '1')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "1", "key_type": 1}], "value_metadata": {"value_type": "None", "skip_deserialize": true}}, "('opt_state', '1', '2', 'count')": {"key_metadata": [{"key": "opt_state", "key_type": 2}, {"key": "1", "key_type": 1}, {"key": "2", "key_type": 1}, {"key": "count", "key_type": 2}], "value_metadata": {"value_type": "jax.Array", "skip_deserialize": false}}, "('ema_params',)": {"key_metadata": [{"key": "ema_params", "key_type": 2}], "value_metadata": {"value_type": "None", "skip_deserialize": true}}}, "use_zarr3": false, "store_array_data_equal_to_fill_value": true, "custom_metadata": null} \ No newline at end of file diff --git a/collecting_film--resume/20000/train_state/_sharding b/collecting_film--resume/20000/train_state/_sharding new file mode 100644 index 0000000000000000000000000000000000000000..c294283e16df0cff4247caa76bdfd73e3c9fc1fc --- /dev/null +++ b/collecting_film--resume/20000/train_state/_sharding @@ -0,0 +1 @@ +{"b3B0X3N0YXRlLjEuMC5jb3VudA==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyX25vcm1fZmlsbWVkLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyX25vcm1fZmlsbWVkLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC52YWx1ZS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC52YWx1ZS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5rZXkuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5rZXkua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5vdXQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5vdXQua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5xdWVyeS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5xdWVyeS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMC5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMC5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNjYWxlX3Byb2plY3Rvci5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNjYWxlX3Byb2plY3Rvci5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNoaWZ0X3Byb2plY3Rvci5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNoaWZ0X3Byb2plY3Rvci5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8wLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8wLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8xLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLmVtYmVkZGluZy5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLmVtYmVkZGluZy5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLmhlYWQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLmhlYWQua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5QYWxpR2VtbWEuaW1nLnBvc19lbWJlZGRpbmcudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25faW5fcHJvai5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25faW5fcHJvai5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25fb3V0X3Byb2ouYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25fb3V0X3Byb2oua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25fdGltZV9tbHBfaW4uYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25fdGltZV9tbHBfaW4ua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25fdGltZV9tbHBfb3V0LmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5hY3Rpb25fdGltZV9tbHBfb3V0Lmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5zdGF0ZV9wcm9qLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5tdS5zdGF0ZV9wcm9qLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyX25vcm1fZmlsbWVkLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyX25vcm1fZmlsbWVkLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC52YWx1ZS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC52YWx1ZS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5rZXkuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5rZXkua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5vdXQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5vdXQua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5xdWVyeS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk11bHRpSGVhZERvdFByb2R1Y3RBdHRlbnRpb25fMC5xdWVyeS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMC5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMC5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLk1scEJsb2NrXzAuRGVuc2VfMS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNjYWxlX3Byb2plY3Rvci5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNjYWxlX3Byb2plY3Rvci5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNoaWZ0X3Byb2plY3Rvci5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkZpTE1MYXllcl8wLnNoaWZ0X3Byb2plY3Rvci5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8wLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8wLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8xLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLlRyYW5zZm9ybWVyX0ZpTE1lZC5lbmNvZGVyYmxvY2tfZmlsbWVkLkxheWVyTm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLmVtYmVkZGluZy5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLmVtYmVkZGluZy5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLmhlYWQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLmhlYWQua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5QYWxpR2VtbWEuaW1nLnBvc19lbWJlZGRpbmcudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25faW5fcHJvai5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25faW5fcHJvai5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25fb3V0X3Byb2ouYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25fb3V0X3Byb2oua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25fdGltZV9tbHBfaW4uYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25fdGltZV9tbHBfaW4ua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25fdGltZV9tbHBfb3V0LmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5hY3Rpb25fdGltZV9tbHBfb3V0Lmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5zdGF0ZV9wcm9qLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMC5udS5zdGF0ZV9wcm9qLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","b3B0X3N0YXRlLjEuMi5jb3VudA==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","c3RlcA==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJfbm9ybV9maWxtZWQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJfbm9ybV9maWxtZWQuc2NhbGUudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2NhbGVfcHJvamVjdG9yLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2NhbGVfcHJvamVjdG9yLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2hpZnRfcHJvamVjdG9yLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuRmlMTUxheWVyXzAuc2hpZnRfcHJvamVjdG9yLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzAuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzAuc2NhbGUudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzEuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTGF5ZXJOb3JtXzEuc2NhbGUudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8wLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8wLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8xLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTWxwQmxvY2tfMC5EZW5zZV8xLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLm91dC5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLm91dC5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLmtleS5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLmtleS5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnF1ZXJ5LmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnF1ZXJ5Lmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnZhbHVlLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuVHJhbnNmb3JtZXJfRmlMTWVkLmVuY29kZXJibG9ja19maWxtZWQuTXVsdGlIZWFkRG90UHJvZHVjdEF0dGVudGlvbl8wLnZhbHVlLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuZW1iZWRkaW5nLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuZW1iZWRkaW5nLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuaGVhZC5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcuaGVhZC5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5pbWcucG9zX2VtYmVkZGluZy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0uZW1iZWRkZXIuaW5wdXRfZW1iZWRkaW5nLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0uZmluYWxfbm9ybS5zY2FsZS52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0uZmluYWxfbm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scC5nYXRpbmdfZWluc3VtLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scC5saW5lYXIudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scF8xLmdhdGluZ19laW5zdW0udmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLm1scF8xLmxpbmVhci52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4uYXR0bl92ZWNfZWluc3VtLncudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4uYXR0bl92ZWNfZWluc3VtXzEudy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ua3ZfZWluc3VtLncudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ua3ZfZWluc3VtXzEudy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ucV9laW5zdW0udy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLmF0dG4ucV9laW5zdW1fMS53LnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9hdHRlbnRpb25fbm9ybS5zY2FsZS52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9hdHRlbnRpb25fbm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9mZndfbm9ybS5zY2FsZS52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLlBhbGlHZW1tYS5sbG0ubGF5ZXJzLnByZV9mZndfbm9ybV8xLnNjYWxlLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9pbi5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9pbi5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9vdXQuYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl90aW1lX21scF9vdXQua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9pbl9wcm9qLmJpYXMudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9pbl9wcm9qLmtlcm5lbC52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9vdXRfcHJvai5iaWFzLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLmFjdGlvbl9vdXRfcHJvai5rZXJuZWwudmFsdWU=":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLnN0YXRlX3Byb2ouYmlhcy52YWx1ZQ==":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}","cGFyYW1zLnN0YXRlX3Byb2oua2VybmVsLnZhbHVl":"{\"sharding_type\": \"NamedSharding\", \"shape\": [1, 1], \"axis_names\": [\"batch\", \"fsdp\"], \"partition_spec\": [], \"device_mesh\": {\"mesh\": [[{\"id\": 0}]]}}"} \ No newline at end of file diff --git a/collecting_film--resume/20000/train_state/d/54ef83affd5fc42c16fcc33a825ff22f b/collecting_film--resume/20000/train_state/d/54ef83affd5fc42c16fcc33a825ff22f new file mode 100644 index 0000000000000000000000000000000000000000..5acf73a08e13e8a4ee34170ae8f5eeb9ba533644 Binary files /dev/null and b/collecting_film--resume/20000/train_state/d/54ef83affd5fc42c16fcc33a825ff22f differ diff --git a/collecting_film--resume/20000/train_state/manifest.ocdbt b/collecting_film--resume/20000/train_state/manifest.ocdbt new file mode 100644 index 0000000000000000000000000000000000000000..e04998a4613a090988e2722ec6941607184ba7f6 Binary files /dev/null and b/collecting_film--resume/20000/train_state/manifest.ocdbt differ diff --git a/collecting_film--resume/20000/train_state/ocdbt.process_0/d/00b1b8af63fea049790fb4268ae2fc15 b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/00b1b8af63fea049790fb4268ae2fc15 new file mode 100644 index 0000000000000000000000000000000000000000..f12a62e270f465771ebcc6408fdfdaf1d62a94d8 Binary files /dev/null and b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/00b1b8af63fea049790fb4268ae2fc15 differ diff --git a/collecting_film--resume/20000/train_state/ocdbt.process_0/d/3e80fc8ae29d95915857d39a8e188891 b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/3e80fc8ae29d95915857d39a8e188891 new file mode 100644 index 0000000000000000000000000000000000000000..3f223dcf6478152d3332b2b35606baffcfc670a9 Binary files /dev/null and b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/3e80fc8ae29d95915857d39a8e188891 differ diff --git a/collecting_film--resume/20000/train_state/ocdbt.process_0/d/5a7a109ca28ebb6ef1e076cf81d98f49 b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/5a7a109ca28ebb6ef1e076cf81d98f49 new file mode 100644 index 0000000000000000000000000000000000000000..32aeb3b90d60b0a7055cd0d594f6225851c8357d Binary files /dev/null and b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/5a7a109ca28ebb6ef1e076cf81d98f49 differ diff --git a/collecting_film--resume/20000/train_state/ocdbt.process_0/d/7d0de4f93e4fa084a7e450b093053a58 b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/7d0de4f93e4fa084a7e450b093053a58 new file mode 100644 index 0000000000000000000000000000000000000000..8d1e9b4776ee4402997b7aa81a3e09dd4e7488ca Binary files /dev/null and b/collecting_film--resume/20000/train_state/ocdbt.process_0/d/7d0de4f93e4fa084a7e450b093053a58 differ diff --git a/collecting_film--resume/20000/train_state/ocdbt.process_0/manifest.ocdbt b/collecting_film--resume/20000/train_state/ocdbt.process_0/manifest.ocdbt new file mode 100644 index 0000000000000000000000000000000000000000..d87b1003d4e1c8e189ab4cb7fe93af6ac31996e8 Binary files /dev/null and b/collecting_film--resume/20000/train_state/ocdbt.process_0/manifest.ocdbt differ diff --git a/collecting_film--resume/wandb_id.txt b/collecting_film--resume/wandb_id.txt new file mode 100644 index 0000000000000000000000000000000000000000..fe79096152cee67dc75a213ca5ca832321385efe --- /dev/null +++ b/collecting_film--resume/wandb_id.txt @@ -0,0 +1 @@ +rbq7gp64 \ No newline at end of file diff --git a/temp_venv/.gitignore b/temp_venv/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..f59ec20aabf5842d237244ece8c81ab184faeac1 --- /dev/null +++ b/temp_venv/.gitignore @@ -0,0 +1 @@ +* \ No newline at end of file diff --git a/temp_venv/.lock b/temp_venv/.lock new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/temp_venv/CACHEDIR.TAG b/temp_venv/CACHEDIR.TAG new file mode 100644 index 0000000000000000000000000000000000000000..bc1ecb967a482524e7736038de0df6e08f9ee452 --- /dev/null +++ b/temp_venv/CACHEDIR.TAG @@ -0,0 +1 @@ +Signature: 8a477f597d28d172789f06886806bc55 \ No newline at end of file diff --git a/temp_venv/bin/activate b/temp_venv/bin/activate new file mode 100644 index 0000000000000000000000000000000000000000..0f7b85d037deb80b14d55740d760b83c0e30b208 --- /dev/null +++ b/temp_venv/bin/activate @@ -0,0 +1,116 @@ +# Copyright (c) 2020-202x The virtualenv developers +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +# Get script path (only used if environment is relocatable). +if [ -n "${BASH_VERSION:+x}" ] ; then + SCRIPT_PATH="${BASH_SOURCE[0]}" + if [ "$SCRIPT_PATH" = "$0" ]; then + # Only bash has a reasonably robust check for source'dness. + echo "You must source this script: \$ source $0" >&2 + exit 33 + fi +elif [ -n "${ZSH_VERSION:+x}" ] ; then + SCRIPT_PATH="${(%):-%x}" +elif [ -n "${KSH_VERSION:+x}" ] ; then + SCRIPT_PATH="${.sh.file}" +fi + +deactivate () { + unset -f pydoc >/dev/null 2>&1 || true + + # reset old environment variables + # ! [ -z ${VAR+_} ] returns true if VAR is declared at all + if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then + PATH="$_OLD_VIRTUAL_PATH" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then + PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # The hash command must be called to get it to forget past + # commands. Without forgetting past commands the $PATH changes + # we made may not be respected + hash -r 2>/dev/null + + if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then + PS1="$_OLD_VIRTUAL_PS1" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + unset VIRTUAL_ENV_PROMPT + if [ ! "${1-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV='/Users/elan/pi0_nomagic_collecting_film/temp_venv' +if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then + VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV") +fi +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +if [ "x" != x ] ; then + VIRTUAL_ENV_PROMPT="() " +else + VIRTUAL_ENV_PROMPT="($(basename "$VIRTUAL_ENV")) " +fi +export VIRTUAL_ENV_PROMPT + +# unset PYTHONHOME if set +if ! [ -z "${PYTHONHOME+_}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1-}" + PS1="${VIRTUAL_ENV_PROMPT}${PS1-}" + export PS1 +fi + +# Make sure to unalias pydoc if it's already there +alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true + +pydoc () { + python -m pydoc "$@" +} + +# The hash command must be called to get it to forget past +# commands. Without forgetting past commands the $PATH changes +# we made may not be respected +hash -r 2>/dev/null diff --git a/temp_venv/bin/activate.bat b/temp_venv/bin/activate.bat new file mode 100644 index 0000000000000000000000000000000000000000..97ee593259b3c0e675d512665dc4891f715fdb78 --- /dev/null +++ b/temp_venv/bin/activate.bat @@ -0,0 +1,71 @@ +@REM Copyright (c) 2020-202x The virtualenv developers +@REM +@REM Permission is hereby granted, free of charge, to any person obtaining +@REM a copy of this software and associated documentation files (the +@REM "Software"), to deal in the Software without restriction, including +@REM without limitation the rights to use, copy, modify, merge, publish, +@REM distribute, sublicense, and/or sell copies of the Software, and to +@REM permit persons to whom the Software is furnished to do so, subject to +@REM the following conditions: +@REM +@REM The above copyright notice and this permission notice shall be +@REM included in all copies or substantial portions of the Software. +@REM +@REM THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +@REM EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +@REM MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +@REM NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +@REM LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +@REM OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +@REM WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@REM This file is UTF-8 encoded, so we need to update the current code page while executing it +@for /f "tokens=2 delims=:." %%a in ('"%SystemRoot%\System32\chcp.com"') do @set _OLD_CODEPAGE=%%a +@if defined _OLD_CODEPAGE ( + @"%SystemRoot%\System32\chcp.com" 65001 > nul +) + +@for %%i in ("/Users/elan/pi0_nomagic_collecting_film/temp_venv") do @set "VIRTUAL_ENV=%%~fi" + +@set "VIRTUAL_ENV_PROMPT=" +@if NOT DEFINED VIRTUAL_ENV_PROMPT ( + @for %%d in ("%VIRTUAL_ENV%") do @set "VIRTUAL_ENV_PROMPT=%%~nxd" +) + +@if defined _OLD_VIRTUAL_PROMPT ( + @set "PROMPT=%_OLD_VIRTUAL_PROMPT%" +) else ( + @if not defined PROMPT ( + @set "PROMPT=$P$G" + ) + @if not defined VIRTUAL_ENV_DISABLE_PROMPT ( + @set "_OLD_VIRTUAL_PROMPT=%PROMPT%" + ) +) +@if not defined VIRTUAL_ENV_DISABLE_PROMPT ( + @set "PROMPT=(%VIRTUAL_ENV_PROMPT%) %PROMPT%" +) + +@REM Don't use () to avoid problems with them in %PATH% +@if defined _OLD_VIRTUAL_PYTHONHOME @goto ENDIFVHOME + @set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%" +:ENDIFVHOME + +@set PYTHONHOME= + +@REM if defined _OLD_VIRTUAL_PATH ( +@if not defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH1 + @set "PATH=%_OLD_VIRTUAL_PATH%" +:ENDIFVPATH1 +@REM ) else ( +@if defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH2 + @set "_OLD_VIRTUAL_PATH=%PATH%" +:ENDIFVPATH2 + +@set "PATH=%VIRTUAL_ENV%\bin;%PATH%" + +:END +@if defined _OLD_CODEPAGE ( + @"%SystemRoot%\System32\chcp.com" %_OLD_CODEPAGE% > nul + @set _OLD_CODEPAGE= +) diff --git a/temp_venv/bin/activate.csh b/temp_venv/bin/activate.csh new file mode 100644 index 0000000000000000000000000000000000000000..4249fe06d703835b03292e12fabccaf59373883a --- /dev/null +++ b/temp_venv/bin/activate.csh @@ -0,0 +1,76 @@ +# Copyright (c) 2020-202x The virtualenv developers +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . + +set newline='\ +' + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV '/Users/elan/pi0_nomagic_collecting_film/temp_venv' + +set _OLD_VIRTUAL_PATH="$PATH:q" +setenv PATH "$VIRTUAL_ENV:q/bin:$PATH:q" + + + +if ('' != "") then + setenv VIRTUAL_ENV_PROMPT '' +else + setenv VIRTUAL_ENV_PROMPT "$VIRTUAL_ENV:t:q" +endif + +if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then + if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then + set do_prompt = "1" + else + set do_prompt = "0" + endif +else + set do_prompt = "1" +endif + +if ( $do_prompt == "1" ) then + # Could be in a non-interactive environment, + # in which case, $prompt is undefined and we wouldn't + # care about the prompt anyway. + if ( $?prompt ) then + set _OLD_VIRTUAL_PROMPT="$prompt:q" + if ( "$prompt:q" =~ *"$newline:q"* ) then + : + else + set prompt = '('"$VIRTUAL_ENV_PROMPT:q"') '"$prompt:q" + endif + endif +endif + +unset env_name +unset do_prompt + +alias pydoc python -m pydoc + +rehash diff --git a/temp_venv/bin/activate.fish b/temp_venv/bin/activate.fish new file mode 100644 index 0000000000000000000000000000000000000000..3ee268092ce822cdaa492ad96b9de27b871f33d8 --- /dev/null +++ b/temp_venv/bin/activate.fish @@ -0,0 +1,124 @@ +# Copyright (c) 2020-202x The virtualenv developers +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. +# Do not run it directly. + +function _bashify_path -d "Converts a fish path to something bash can recognize" + set fishy_path $argv + set bashy_path $fishy_path[1] + for path_part in $fishy_path[2..-1] + set bashy_path "$bashy_path:$path_part" + end + echo $bashy_path +end + +function _fishify_path -d "Converts a bash path to something fish can recognize" + echo $argv | tr ':' '\n' +end + +function deactivate -d 'Exit virtualenv mode and return to the normal environment.' + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling + if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH") + else + set -gx PATH $_OLD_VIRTUAL_PATH + end + set -e _OLD_VIRTUAL_PATH + end + + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME" + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + and functions -q _old_fish_prompt + # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. + set -l fish_function_path + + # Erase virtualenv's `fish_prompt` and restore the original. + functions -e fish_prompt + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + end + + set -e VIRTUAL_ENV + set -e VIRTUAL_ENV_PROMPT + + if test "$argv[1]" != 'nondestructive' + # Self-destruct! + functions -e pydoc + functions -e deactivate + functions -e _bashify_path + functions -e _fishify_path + end +end + +# Unset irrelevant variables. +deactivate nondestructive + +set -gx VIRTUAL_ENV '/Users/elan/pi0_nomagic_collecting_film/temp_venv' + +# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling +if test (echo $FISH_VERSION | head -c 1) -lt 3 + set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) +else + set -gx _OLD_VIRTUAL_PATH $PATH +end +set -gx PATH "$VIRTUAL_ENV"'/bin' $PATH + +# Prompt override provided? +# If not, just use the environment name. +if test -n '' + set -gx VIRTUAL_ENV_PROMPT '' +else + set -gx VIRTUAL_ENV_PROMPT (basename "$VIRTUAL_ENV") +end + +# Unset `$PYTHONHOME` if set. +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +function pydoc + python -m pydoc $argv +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # Copy the current `fish_prompt` function as `_old_fish_prompt`. + functions -c fish_prompt _old_fish_prompt + + function fish_prompt + # Run the user's prompt first; it might depend on (pipe)status. + set -l prompt (_old_fish_prompt) + + printf '(%s) ' $VIRTUAL_ENV_PROMPT + + string join -- \n $prompt # handle multi-line prompts + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/temp_venv/bin/activate.nu b/temp_venv/bin/activate.nu new file mode 100644 index 0000000000000000000000000000000000000000..e89c3ffeaa153f2497f8b34941aa346092d464de --- /dev/null +++ b/temp_venv/bin/activate.nu @@ -0,0 +1,117 @@ +# Copyright (c) 2020-202x The virtualenv developers +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +# virtualenv activation module +# Activate with `overlay use activate.nu` +# Deactivate with `deactivate`, as usual +# +# To customize the overlay name, you can call `overlay use activate.nu as foo`, +# but then simply `deactivate` won't work because it is just an alias to hide +# the "activate" overlay. You'd need to call `overlay hide foo` manually. + +export-env { + def is-string [x] { + ($x | describe) == 'string' + } + + def has-env [...names] { + $names | each {|n| + $n in $env + } | all {|i| $i == true} + } + + # Emulates a `test -z`, but better as it handles e.g 'false' + def is-env-true [name: string] { + if (has-env $name) { + # Try to parse 'true', '0', '1', and fail if not convertible + let parsed = (do -i { $env | get $name | into bool }) + if ($parsed | describe) == 'bool' { + $parsed + } else { + not ($env | get -i $name | is-empty) + } + } else { + false + } + } + + let virtual_env = '/Users/elan/pi0_nomagic_collecting_film/temp_venv' + let bin = 'bin' + + let is_windows = ($nu.os-info.family) == 'windows' + let path_name = (if (has-env 'Path') { + 'Path' + } else { + 'PATH' + } + ) + + let venv_path = ([$virtual_env $bin] | path join) + let new_path = ($env | get $path_name | prepend $venv_path) + + # If there is no default prompt, then use the env name instead + let virtual_env_prompt = (if ('' | is-empty) { + ($virtual_env | path basename) + } else { + '' + }) + + let new_env = { + $path_name : $new_path + VIRTUAL_ENV : $virtual_env + VIRTUAL_ENV_PROMPT : $virtual_env_prompt + } + + let new_env = (if (is-env-true 'VIRTUAL_ENV_DISABLE_PROMPT') { + $new_env + } else { + # Creating the new prompt for the session + let virtual_prefix = $'(char lparen)($virtual_env_prompt)(char rparen) ' + + # Back up the old prompt builder + let old_prompt_command = (if (has-env 'PROMPT_COMMAND') { + $env.PROMPT_COMMAND + } else { + '' + }) + + let new_prompt = (if (has-env 'PROMPT_COMMAND') { + if 'closure' in ($old_prompt_command | describe) { + {|| $'($virtual_prefix)(do $old_prompt_command)' } + } else { + {|| $'($virtual_prefix)($old_prompt_command)' } + } + } else { + {|| $'($virtual_prefix)' } + }) + + $new_env | merge { + PROMPT_COMMAND : $new_prompt + VIRTUAL_PREFIX : $virtual_prefix + } + }) + + # Environment variables that will be loaded as the virtual env + load-env $new_env +} + +export alias pydoc = python -m pydoc +export alias deactivate = overlay hide activate diff --git a/temp_venv/bin/activate.ps1 b/temp_venv/bin/activate.ps1 new file mode 100644 index 0000000000000000000000000000000000000000..2d2bc9ab7b80c9dd7f9a6278fee99a389174bbf9 --- /dev/null +++ b/temp_venv/bin/activate.ps1 @@ -0,0 +1,82 @@ +# Copyright (c) 2020-202x The virtualenv developers +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +$script:THIS_PATH = $myinvocation.mycommand.path +$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent + +function global:deactivate([switch] $NonDestructive) { + if (Test-Path variable:_OLD_VIRTUAL_PATH) { + $env:PATH = $variable:_OLD_VIRTUAL_PATH + Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global + } + + if (Test-Path function:_old_virtual_prompt) { + $function:prompt = $function:_old_virtual_prompt + Remove-Item function:\_old_virtual_prompt + } + + if ($env:VIRTUAL_ENV) { + Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue + } + + if ($env:VIRTUAL_ENV_PROMPT) { + Remove-Item env:VIRTUAL_ENV_PROMPT -ErrorAction SilentlyContinue + } + + if (!$NonDestructive) { + # Self destruct! + Remove-Item function:deactivate + Remove-Item function:pydoc + } +} + +function global:pydoc { + python -m pydoc $args +} + +# unset irrelevant variables +deactivate -nondestructive + +$VIRTUAL_ENV = $BASE_DIR +$env:VIRTUAL_ENV = $VIRTUAL_ENV + +if ("" -ne "") { + $env:VIRTUAL_ENV_PROMPT = "" +} +else { + $env:VIRTUAL_ENV_PROMPT = $( Split-Path $env:VIRTUAL_ENV -Leaf ) +} + +New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH + +$env:PATH = "$env:VIRTUAL_ENV/bin:" + $env:PATH +if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) { + function global:_old_virtual_prompt { + "" + } + $function:_old_virtual_prompt = $function:prompt + + function global:prompt { + # Add the custom prefix to the existing prompt + $previous_prompt_value = & $function:_old_virtual_prompt + ("(" + $env:VIRTUAL_ENV_PROMPT + ") " + $previous_prompt_value) + } +} diff --git a/temp_venv/bin/activate_this.py b/temp_venv/bin/activate_this.py new file mode 100644 index 0000000000000000000000000000000000000000..ac341929eb2341732fa3324ed0a31796a90fa13c --- /dev/null +++ b/temp_venv/bin/activate_this.py @@ -0,0 +1,59 @@ +# Copyright (c) 2020-202x The virtualenv developers +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +""" +Activate virtualenv for current interpreter: + +import runpy +runpy.run_path(this_file) + +This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. +""" # noqa: D415 + +from __future__ import annotations + +import os +import site +import sys + +try: + abs_file = os.path.abspath(__file__) +except NameError as exc: + msg = "You must use import runpy; runpy.run_path(this_file)" + raise AssertionError(msg) from exc + +bin_dir = os.path.dirname(abs_file) +base = bin_dir[: -len("bin") - 1] # strip away the bin part from the __file__, plus the path separator + +# prepend bin to PATH (this file is inside the bin directory) +os.environ["PATH"] = os.pathsep.join([bin_dir, *os.environ.get("PATH", "").split(os.pathsep)]) +os.environ["VIRTUAL_ENV"] = base # virtual env is right above bin directory +os.environ["VIRTUAL_ENV_PROMPT"] = "" or os.path.basename(base) # noqa: SIM222 + +# add the virtual environments libraries to the host python import mechanism +prev_length = len(sys.path) +for lib in "../lib/python3.13/site-packages".split(os.pathsep): + path = os.path.realpath(os.path.join(bin_dir, lib)) + site.addsitedir(path) +sys.path[:] = sys.path[prev_length:] + sys.path[0:prev_length] + +sys.real_prefix = sys.prefix +sys.prefix = base diff --git a/temp_venv/bin/deactivate.bat b/temp_venv/bin/deactivate.bat new file mode 100644 index 0000000000000000000000000000000000000000..07041bc45129994cbb4b338f6fb61aaf502571fb --- /dev/null +++ b/temp_venv/bin/deactivate.bat @@ -0,0 +1,39 @@ +@REM Copyright (c) 2020-202x The virtualenv developers +@REM +@REM Permission is hereby granted, free of charge, to any person obtaining +@REM a copy of this software and associated documentation files (the +@REM "Software"), to deal in the Software without restriction, including +@REM without limitation the rights to use, copy, modify, merge, publish, +@REM distribute, sublicense, and/or sell copies of the Software, and to +@REM permit persons to whom the Software is furnished to do so, subject to +@REM the following conditions: +@REM +@REM The above copyright notice and this permission notice shall be +@REM included in all copies or substantial portions of the Software. +@REM +@REM THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +@REM EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +@REM MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +@REM NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +@REM LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +@REM OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +@REM WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@set VIRTUAL_ENV= +@set VIRTUAL_ENV_PROMPT= + +@REM Don't use () to avoid problems with them in %PATH% +@if not defined _OLD_VIRTUAL_PROMPT @goto ENDIFVPROMPT + @set "PROMPT=%_OLD_VIRTUAL_PROMPT%" + @set _OLD_VIRTUAL_PROMPT= +:ENDIFVPROMPT + +@if not defined _OLD_VIRTUAL_PYTHONHOME @goto ENDIFVHOME + @set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%" + @set _OLD_VIRTUAL_PYTHONHOME= +:ENDIFVHOME + +@if not defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH + @set "PATH=%_OLD_VIRTUAL_PATH%" + @set _OLD_VIRTUAL_PATH= +:ENDIFVPATH \ No newline at end of file diff --git a/temp_venv/bin/huggingface-cli b/temp_venv/bin/huggingface-cli new file mode 100644 index 0000000000000000000000000000000000000000..3f6d030a6fbf29c6b0cbbbb0a591d64de3b64f07 --- /dev/null +++ b/temp_venv/bin/huggingface-cli @@ -0,0 +1,10 @@ +#!/Users/elan/pi0_nomagic_collecting_film/temp_venv/bin/python3 +# -*- coding: utf-8 -*- +import sys +from huggingface_hub.commands.huggingface_cli import main +if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/temp_venv/bin/ipython b/temp_venv/bin/ipython new file mode 100644 index 0000000000000000000000000000000000000000..8eec4f41a907b0ece10cc9d79a6080d173abf06f --- /dev/null +++ b/temp_venv/bin/ipython @@ -0,0 +1,10 @@ +#!/Users/elan/pi0_nomagic_collecting_film/temp_venv/bin/python3 +# -*- coding: utf-8 -*- +import sys +from IPython import start_ipython +if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(start_ipython()) diff --git a/temp_venv/bin/ipython3 b/temp_venv/bin/ipython3 new file mode 100644 index 0000000000000000000000000000000000000000..8eec4f41a907b0ece10cc9d79a6080d173abf06f --- /dev/null +++ b/temp_venv/bin/ipython3 @@ -0,0 +1,10 @@ +#!/Users/elan/pi0_nomagic_collecting_film/temp_venv/bin/python3 +# -*- coding: utf-8 -*- +import sys +from IPython import start_ipython +if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(start_ipython()) diff --git a/temp_venv/bin/normalizer b/temp_venv/bin/normalizer new file mode 100644 index 0000000000000000000000000000000000000000..30dc219534014151cc4ec1c822db8bbdc51dcf46 --- /dev/null +++ b/temp_venv/bin/normalizer @@ -0,0 +1,10 @@ +#!/Users/elan/pi0_nomagic_collecting_film/temp_venv/bin/python3 +# -*- coding: utf-8 -*- +import sys +from charset_normalizer import cli +if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(cli.cli_detect()) diff --git a/temp_venv/bin/pydoc.bat b/temp_venv/bin/pydoc.bat new file mode 100644 index 0000000000000000000000000000000000000000..daa20590b181e9f5a1f4f642f6c4eaf471cff00f --- /dev/null +++ b/temp_venv/bin/pydoc.bat @@ -0,0 +1,22 @@ +@REM Copyright (c) 2020-202x The virtualenv developers +@REM +@REM Permission is hereby granted, free of charge, to any person obtaining +@REM a copy of this software and associated documentation files (the +@REM "Software"), to deal in the Software without restriction, including +@REM without limitation the rights to use, copy, modify, merge, publish, +@REM distribute, sublicense, and/or sell copies of the Software, and to +@REM permit persons to whom the Software is furnished to do so, subject to +@REM the following conditions: +@REM +@REM The above copyright notice and this permission notice shall be +@REM included in all copies or substantial portions of the Software. +@REM +@REM THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +@REM EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +@REM MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +@REM NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +@REM LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +@REM OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +@REM WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +python.exe -m pydoc %* \ No newline at end of file diff --git a/temp_venv/bin/pygmentize b/temp_venv/bin/pygmentize new file mode 100644 index 0000000000000000000000000000000000000000..7e4330e751f99adfed6e27490848ab7b81a1cb8e --- /dev/null +++ b/temp_venv/bin/pygmentize @@ -0,0 +1,10 @@ +#!/Users/elan/pi0_nomagic_collecting_film/temp_venv/bin/python3 +# -*- coding: utf-8 -*- +import sys +from pygments.cmdline import main +if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/temp_venv/bin/python b/temp_venv/bin/python new file mode 100644 index 0000000000000000000000000000000000000000..47ede0bf1009fbf3cc55c57ac26e383665c7d7c3 Binary files /dev/null and b/temp_venv/bin/python differ diff --git a/temp_venv/bin/python3 b/temp_venv/bin/python3 new file mode 100644 index 0000000000000000000000000000000000000000..47ede0bf1009fbf3cc55c57ac26e383665c7d7c3 Binary files /dev/null and b/temp_venv/bin/python3 differ diff --git a/temp_venv/bin/python3.13 b/temp_venv/bin/python3.13 new file mode 100644 index 0000000000000000000000000000000000000000..47ede0bf1009fbf3cc55c57ac26e383665c7d7c3 Binary files /dev/null and b/temp_venv/bin/python3.13 differ diff --git a/temp_venv/bin/tqdm b/temp_venv/bin/tqdm new file mode 100644 index 0000000000000000000000000000000000000000..8f3064ed6d8362fa9765c7802d5605e3b5c9dd3a --- /dev/null +++ b/temp_venv/bin/tqdm @@ -0,0 +1,10 @@ +#!/Users/elan/pi0_nomagic_collecting_film/temp_venv/bin/python3 +# -*- coding: utf-8 -*- +import sys +from tqdm.cli import main +if __name__ == "__main__": + if sys.argv[0].endswith("-script.pyw"): + sys.argv[0] = sys.argv[0][:-11] + elif sys.argv[0].endswith(".exe"): + sys.argv[0] = sys.argv[0][:-4] + sys.exit(main()) diff --git a/temp_venv/lib/python3.13/site-packages/_virtualenv.py b/temp_venv/lib/python3.13/site-packages/_virtualenv.py new file mode 100644 index 0000000000000000000000000000000000000000..6c1f22640d567f04c9880b90566436ee5ba14400 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/_virtualenv.py @@ -0,0 +1,101 @@ +"""Patches that are applied at runtime to the virtual environment.""" + +import os +import sys + +VIRTUALENV_PATCH_FILE = os.path.join(__file__) + + +def patch_dist(dist): + """ + Distutils allows user to configure some arguments via a configuration file: + https://docs.python.org/3.11/install/index.html#distutils-configuration-files. + + Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up. + """ # noqa: D205 + # we cannot allow some install config as that would get packages installed outside of the virtual environment + old_parse_config_files = dist.Distribution.parse_config_files + + def parse_config_files(self, *args, **kwargs): + result = old_parse_config_files(self, *args, **kwargs) + install = self.get_option_dict("install") + + if "prefix" in install: # the prefix governs where to install the libraries + install["prefix"] = VIRTUALENV_PATCH_FILE, os.path.abspath(sys.prefix) + for base in ("purelib", "platlib", "headers", "scripts", "data"): + key = f"install_{base}" + if key in install: # do not allow global configs to hijack venv paths + install.pop(key, None) + return result + + dist.Distribution.parse_config_files = parse_config_files + + +# Import hook that patches some modules to ignore configuration values that break package installation in case +# of virtual environments. +_DISTUTILS_PATCH = "distutils.dist", "setuptools.dist" +# https://docs.python.org/3/library/importlib.html#setting-up-an-importer + + +class _Finder: + """A meta path finder that allows patching the imported distutils modules.""" + + fullname = None + + # lock[0] is threading.Lock(), but initialized lazily to avoid importing threading very early at startup, + # because there are gevent-based applications that need to be first to import threading by themselves. + # See https://github.com/pypa/virtualenv/issues/1895 for details. + lock = [] # noqa: RUF012 + + def find_spec(self, fullname, path, target=None): # noqa: ARG002 + if fullname in _DISTUTILS_PATCH and self.fullname is None: + # initialize lock[0] lazily + if len(self.lock) == 0: + import threading + + lock = threading.Lock() + # there is possibility that two threads T1 and T2 are simultaneously running into find_spec, + # observing .lock as empty, and further going into hereby initialization. However due to the GIL, + # list.append() operation is atomic and this way only one of the threads will "win" to put the lock + # - that every thread will use - into .lock[0]. + # https://docs.python.org/3/faq/library.html#what-kinds-of-global-value-mutation-are-thread-safe + self.lock.append(lock) + + from functools import partial + from importlib.util import find_spec + + with self.lock[0]: + self.fullname = fullname + try: + spec = find_spec(fullname, path) + if spec is not None: + # https://www.python.org/dev/peps/pep-0451/#how-loading-will-work + is_new_api = hasattr(spec.loader, "exec_module") + func_name = "exec_module" if is_new_api else "load_module" + old = getattr(spec.loader, func_name) + func = self.exec_module if is_new_api else self.load_module + if old is not func: + try: # noqa: SIM105 + setattr(spec.loader, func_name, partial(func, old)) + except AttributeError: + pass # C-Extension loaders are r/o such as zipimporter with <3.7 + return spec + finally: + self.fullname = None + return None + + @staticmethod + def exec_module(old, module): + old(module) + if module.__name__ in _DISTUTILS_PATCH: + patch_dist(module) + + @staticmethod + def load_module(old, name): + module = old(name) + if module.__name__ in _DISTUTILS_PATCH: + patch_dist(module) + return module + + +sys.meta_path.insert(0, _Finder()) diff --git a/temp_venv/lib/python3.13/site-packages/decorator.py b/temp_venv/lib/python3.13/site-packages/decorator.py new file mode 100644 index 0000000000000000000000000000000000000000..40a39f2f1b6d07036d98260318108fdaf173de44 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/decorator.py @@ -0,0 +1,459 @@ +# ######################### LICENSE ############################ # + +# Copyright (c) 2005-2025, Michele Simionato +# All rights reserved. + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: + +# Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# Redistributions in bytecode form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + +""" +Decorator module, see +https://github.com/micheles/decorator/blob/master/docs/documentation.md +for the documentation. +""" +import re +import sys +import inspect +import operator +import itertools +import functools +from contextlib import _GeneratorContextManager +from inspect import getfullargspec, iscoroutinefunction, isgeneratorfunction + +__version__ = '5.2.1' + +DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(') +POS = inspect.Parameter.POSITIONAL_OR_KEYWORD +EMPTY = inspect.Parameter.empty + + +# this is not used anymore in the core, but kept for backward compatibility +class FunctionMaker(object): + """ + An object with the ability to create functions with a given signature. + It has attributes name, doc, module, signature, defaults, dict and + methods update and make. + """ + + # Atomic get-and-increment provided by the GIL + _compile_count = itertools.count() + + # make pylint happy + args = varargs = varkw = defaults = kwonlyargs = kwonlydefaults = () + + def __init__(self, func=None, name=None, signature=None, + defaults=None, doc=None, module=None, funcdict=None): + self.shortsignature = signature + if func: + # func can be a class or a callable, but not an instance method + self.name = func.__name__ + if self.name == '': # small hack for lambda functions + self.name = '_lambda_' + self.doc = func.__doc__ + self.module = func.__module__ + if inspect.isroutine(func) or isinstance(func, functools.partial): + argspec = getfullargspec(func) + self.annotations = getattr(func, '__annotations__', {}) + for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', + 'kwonlydefaults'): + setattr(self, a, getattr(argspec, a)) + for i, arg in enumerate(self.args): + setattr(self, 'arg%d' % i, arg) + allargs = list(self.args) + allshortargs = list(self.args) + if self.varargs: + allargs.append('*' + self.varargs) + allshortargs.append('*' + self.varargs) + elif self.kwonlyargs: + allargs.append('*') # single star syntax + for a in self.kwonlyargs: + allargs.append('%s=None' % a) + allshortargs.append('%s=%s' % (a, a)) + if self.varkw: + allargs.append('**' + self.varkw) + allshortargs.append('**' + self.varkw) + self.signature = ', '.join(allargs) + self.shortsignature = ', '.join(allshortargs) + self.dict = func.__dict__.copy() + # func=None happens when decorating a caller + if name: + self.name = name + if signature is not None: + self.signature = signature + if defaults: + self.defaults = defaults + if doc: + self.doc = doc + if module: + self.module = module + if funcdict: + self.dict = funcdict + # check existence required attributes + assert hasattr(self, 'name') + if not hasattr(self, 'signature'): + raise TypeError('You are decorating a non function: %s' % func) + + def update(self, func, **kw): + """ + Update the signature of func with the data in self + """ + func.__name__ = self.name + func.__doc__ = getattr(self, 'doc', None) + func.__dict__ = getattr(self, 'dict', {}) + func.__defaults__ = self.defaults + func.__kwdefaults__ = self.kwonlydefaults or None + func.__annotations__ = getattr(self, 'annotations', None) + try: + frame = sys._getframe(3) + except AttributeError: # for IronPython and similar implementations + callermodule = '?' + else: + callermodule = frame.f_globals.get('__name__', '?') + func.__module__ = getattr(self, 'module', callermodule) + func.__dict__.update(kw) + + def make(self, src_templ, evaldict=None, addsource=False, **attrs): + """ + Make a new function from a given template and update the signature + """ + src = src_templ % vars(self) # expand name and signature + evaldict = evaldict or {} + mo = DEF.search(src) + if mo is None: + raise SyntaxError('not a valid function template\n%s' % src) + name = mo.group(1) # extract the function name + names = set([name] + [arg.strip(' *') for arg in + self.shortsignature.split(',')]) + for n in names: + if n in ('_func_', '_call_'): + raise NameError('%s is overridden in\n%s' % (n, src)) + + if not src.endswith('\n'): # add a newline for old Pythons + src += '\n' + + # Ensure each generated function has a unique filename for profilers + # (such as cProfile) that depend on the tuple of (, + # , ) being unique. + filename = '' % next(self._compile_count) + try: + code = compile(src, filename, 'single') + exec(code, evaldict) + except Exception: + print('Error in generated code:', file=sys.stderr) + print(src, file=sys.stderr) + raise + func = evaldict[name] + if addsource: + attrs['__source__'] = src + self.update(func, **attrs) + return func + + @classmethod + def create(cls, obj, body, evaldict, defaults=None, + doc=None, module=None, addsource=True, **attrs): + """ + Create a function from the strings name, signature and body. + evaldict is the evaluation dictionary. If addsource is true an + attribute __source__ is added to the result. The attributes attrs + are added, if any. + """ + if isinstance(obj, str): # "name(signature)" + name, rest = obj.strip().split('(', 1) + signature = rest[:-1] # strip a right parens + func = None + else: # a function + name = None + signature = None + func = obj + self = cls(func, name, signature, defaults, doc, module) + ibody = '\n'.join(' ' + line for line in body.splitlines()) + caller = evaldict.get('_call_') # when called from `decorate` + if caller and iscoroutinefunction(caller): + body = ('async def %(name)s(%(signature)s):\n' + ibody).replace( + 'return', 'return await') + else: + body = 'def %(name)s(%(signature)s):\n' + ibody + return self.make(body, evaldict, addsource, **attrs) + + +def fix(args, kwargs, sig): + """ + Fix args and kwargs to be consistent with the signature + """ + ba = sig.bind(*args, **kwargs) + ba.apply_defaults() # needed for test_dan_schult + return ba.args, ba.kwargs + + +def decorate(func, caller, extras=(), kwsyntax=False): + """ + Decorates a function/generator/coroutine using a caller. + If kwsyntax is True calling the decorated functions with keyword + syntax will pass the named arguments inside the ``kw`` dictionary, + even if such argument are positional, similarly to what functools.wraps + does. By default kwsyntax is False and the the arguments are untouched. + """ + sig = inspect.signature(func) + if isinstance(func, functools.partial): + func = functools.update_wrapper(func, func.func) + if iscoroutinefunction(caller): + async def fun(*args, **kw): + if not kwsyntax: + args, kw = fix(args, kw, sig) + return await caller(func, *(extras + args), **kw) + elif isgeneratorfunction(caller): + def fun(*args, **kw): + if not kwsyntax: + args, kw = fix(args, kw, sig) + for res in caller(func, *(extras + args), **kw): + yield res + else: + def fun(*args, **kw): + if not kwsyntax: + args, kw = fix(args, kw, sig) + return caller(func, *(extras + args), **kw) + + fun.__name__ = func.__name__ + fun.__doc__ = func.__doc__ + fun.__wrapped__ = func + fun.__signature__ = sig + fun.__qualname__ = func.__qualname__ + # builtin functions like defaultdict.__setitem__ lack many attributes + try: + fun.__defaults__ = func.__defaults__ + except AttributeError: + pass + try: + fun.__kwdefaults__ = func.__kwdefaults__ + except AttributeError: + pass + try: + fun.__annotations__ = func.__annotations__ + except AttributeError: + pass + try: + fun.__module__ = func.__module__ + except AttributeError: + pass + try: + fun.__name__ = func.__name__ + except AttributeError: # happens with old versions of numpy.vectorize + func.__name__ == 'noname' + try: + fun.__dict__.update(func.__dict__) + except AttributeError: + pass + return fun + + +def decoratorx(caller): + """ + A version of "decorator" implemented via "exec" and not via the + Signature object. Use this if you are want to preserve the `.__code__` + object properties (https://github.com/micheles/decorator/issues/129). + """ + def dec(func): + return FunctionMaker.create( + func, + "return _call_(_func_, %(shortsignature)s)", + dict(_call_=caller, _func_=func), + __wrapped__=func, __qualname__=func.__qualname__) + return dec + + +def decorator(caller, _func=None, kwsyntax=False): + """ + decorator(caller) converts a caller function into a decorator + """ + if _func is not None: # return a decorated function + # this is obsolete behavior; you should use decorate instead + return decorate(_func, caller, (), kwsyntax) + # else return a decorator function + sig = inspect.signature(caller) + dec_params = [p for p in sig.parameters.values() if p.kind is POS] + + def dec(func=None, *args, **kw): + na = len(args) + 1 + extras = args + tuple(kw.get(p.name, p.default) + for p in dec_params[na:] + if p.default is not EMPTY) + if func is None: + return lambda func: decorate(func, caller, extras, kwsyntax) + else: + return decorate(func, caller, extras, kwsyntax) + dec.__signature__ = sig.replace(parameters=dec_params) + dec.__name__ = caller.__name__ + dec.__doc__ = caller.__doc__ + dec.__wrapped__ = caller + dec.__qualname__ = caller.__qualname__ + dec.__kwdefaults__ = getattr(caller, '__kwdefaults__', None) + dec.__dict__.update(caller.__dict__) + return dec + + +# ####################### contextmanager ####################### # + + +class ContextManager(_GeneratorContextManager): + def __init__(self, g, *a, **k): + _GeneratorContextManager.__init__(self, g, a, k) + + def __call__(self, func): + def caller(f, *a, **k): + with self.__class__(self.func, *self.args, **self.kwds): + return f(*a, **k) + return decorate(func, caller) + + +_contextmanager = decorator(ContextManager) + + +def contextmanager(func): + # Enable Pylint config: contextmanager-decorators=decorator.contextmanager + return _contextmanager(func) + + +# ############################ dispatch_on ############################ # + +def append(a, vancestors): + """ + Append ``a`` to the list of the virtual ancestors, unless it is already + included. + """ + add = True + for j, va in enumerate(vancestors): + if issubclass(va, a): + add = False + break + if issubclass(a, va): + vancestors[j] = a + add = False + if add: + vancestors.append(a) + + +# inspired from simplegeneric by P.J. Eby and functools.singledispatch +def dispatch_on(*dispatch_args): + """ + Factory of decorators turning a function into a generic function + dispatching on the given arguments. + """ + assert dispatch_args, 'No dispatch args passed' + dispatch_str = '(%s,)' % ', '.join(dispatch_args) + + def check(arguments, wrong=operator.ne, msg=''): + """Make sure one passes the expected number of arguments""" + if wrong(len(arguments), len(dispatch_args)): + raise TypeError('Expected %d arguments, got %d%s' % + (len(dispatch_args), len(arguments), msg)) + + def gen_func_dec(func): + """Decorator turning a function into a generic function""" + + # first check the dispatch arguments + argset = set(getfullargspec(func).args) + if not set(dispatch_args) <= argset: + raise NameError('Unknown dispatch arguments %s' % dispatch_str) + + typemap = {} + + def vancestors(*types): + """ + Get a list of sets of virtual ancestors for the given types + """ + check(types) + ras = [[] for _ in range(len(dispatch_args))] + for types_ in typemap: + for t, type_, ra in zip(types, types_, ras): + if issubclass(t, type_) and type_ not in t.mro(): + append(type_, ra) + return [set(ra) for ra in ras] + + def ancestors(*types): + """ + Get a list of virtual MROs, one for each type + """ + check(types) + lists = [] + for t, vas in zip(types, vancestors(*types)): + n_vas = len(vas) + if n_vas > 1: + raise RuntimeError( + 'Ambiguous dispatch for %s: %s' % (t, vas)) + elif n_vas == 1: + va, = vas + mro = type('t', (t, va), {}).mro()[1:] + else: + mro = t.mro() + lists.append(mro[:-1]) # discard t and object + return lists + + def register(*types): + """ + Decorator to register an implementation for the given types + """ + check(types) + + def dec(f): + check(getfullargspec(f).args, operator.lt, ' in ' + f.__name__) + typemap[types] = f + return f + return dec + + def dispatch_info(*types): + """ + An utility to introspect the dispatch algorithm + """ + check(types) + lst = [] + for ancs in itertools.product(*ancestors(*types)): + lst.append(tuple(a.__name__ for a in ancs)) + return lst + + def _dispatch(dispatch_args, *args, **kw): + types = tuple(type(arg) for arg in dispatch_args) + try: # fast path + f = typemap[types] + except KeyError: + pass + else: + return f(*args, **kw) + combinations = itertools.product(*ancestors(*types)) + next(combinations) # the first one has been already tried + for types_ in combinations: + f = typemap.get(types_) + if f is not None: + return f(*args, **kw) + + # else call the default implementation + return func(*args, **kw) + + return FunctionMaker.create( + func, 'return _f_(%s, %%(shortsignature)s)' % dispatch_str, + dict(_f_=_dispatch), register=register, default=func, + typemap=typemap, vancestors=vancestors, ancestors=ancestors, + dispatch_info=dispatch_info, __wrapped__=func) + + gen_func_dec.__name__ = 'dispatch_on' + dispatch_str + return gen_func_dec diff --git a/temp_venv/lib/python3.13/site-packages/idna-3.10.dist-info/LICENSE.md b/temp_venv/lib/python3.13/site-packages/idna-3.10.dist-info/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..19b6b45242c16a1025465309eec2ca5009319de3 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/idna-3.10.dist-info/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2024, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/temp_venv/lib/python3.13/site-packages/idna-3.10.dist-info/RECORD b/temp_venv/lib/python3.13/site-packages/idna-3.10.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..d8c77ddddae5d1523bf26ff599fc9e0e9ee6780e --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/idna-3.10.dist-info/RECORD @@ -0,0 +1,15 @@ +idna-3.10.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +idna-3.10.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 +idna-3.10.dist-info/METADATA,sha256=URR5ZyDfQ1PCEGhkYoojqfi2Ra0tau2--lhwG4XSfjI,10158 +idna-3.10.dist-info/RECORD,, +idna-3.10.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna-3.10.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 +idna/codec.py,sha256=PEew3ItwzjW4hymbasnty2N2OXvNcgHB-JjrBuxHPYY,3422 +idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 +idna/core.py,sha256=YJYyAMnwiQEPjVC4-Fqu_p4CJ6yKKuDGmppBNQNQpFs,13239 +idna/idnadata.py,sha256=W30GcIGvtOWYwAjZj4ZjuouUutC6ffgNuyjJy7fZ-lo,78306 +idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 +idna/package_data.py,sha256=q59S3OXsc5VI8j6vSD0sGBMyk6zZ4vWFREE88yCJYKs,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=rt90K9J40gUSwppDPCrhjgi5AA6pWM65dEGRSf6rIhM,239289 diff --git a/temp_venv/lib/python3.13/site-packages/ipython_pygments_lexers.py b/temp_venv/lib/python3.13/site-packages/ipython_pygments_lexers.py new file mode 100644 index 0000000000000000000000000000000000000000..c655e233654b8ddf8d9a18638451a1ad6937e1d8 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/ipython_pygments_lexers.py @@ -0,0 +1,582 @@ +# -*- coding: utf-8 -*- +""" +Defines a variety of Pygments lexers for highlighting IPython code. + +This includes: + + IPythonLexer, IPython3Lexer + Lexers for pure IPython (python + magic/shell commands) + + IPythonPartialTracebackLexer, IPythonTracebackLexer + Supports 2.x and 3.x via keyword `python3`. The partial traceback + lexer reads everything but the Python code appearing in a traceback. + The full lexer combines the partial lexer with an IPython lexer. + + IPythonConsoleLexer + A lexer for IPython console sessions, with support for tracebacks. + + IPyLexer + A friendly lexer which examines the first line of text and from it, + decides whether to use an IPython lexer or an IPython console lexer. + This is probably the only lexer that needs to be explicitly added + to Pygments. + +""" +# ----------------------------------------------------------------------------- +# Copyright (c) 2013, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +# ----------------------------------------------------------------------------- + +__version__ = "1.1.1" + +# Standard library +import re + +# Third party +from pygments.lexers import ( + BashLexer, + HtmlLexer, + JavascriptLexer, + RubyLexer, + PerlLexer, + Python2Lexer, + Python3Lexer, + TexLexer, +) +from pygments.lexer import ( + Lexer, + DelegatingLexer, + RegexLexer, + do_insertions, + bygroups, + using, +) +from pygments.token import ( + Generic, + Keyword, + Literal, + Name, + Operator, + Other, + Text, + Error, +) + + +line_re = re.compile(".*?\n") + +__all__ = [ + "IPython3Lexer", + "IPythonLexer", + "IPythonPartialTracebackLexer", + "IPythonTracebackLexer", + "IPythonConsoleLexer", + "IPyLexer", +] + + +ipython_tokens = [ + ( + r"(?s)(\s*)(%%capture)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%debug)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?is)(\s*)(%%html)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(HtmlLexer)), + ), + ( + r"(?s)(\s*)(%%javascript)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(JavascriptLexer)), + ), + ( + r"(?s)(\s*)(%%js)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(JavascriptLexer)), + ), + ( + r"(?s)(\s*)(%%latex)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(TexLexer)), + ), + ( + r"(?s)(\s*)(%%perl)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(PerlLexer)), + ), + ( + r"(?s)(\s*)(%%prun)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%python2)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python2Lexer)), + ), + ( + r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%python)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(RubyLexer)), + ), + ( + r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%time)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%writefile)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + ( + r"(?s)(\s*)(%%file)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(Python3Lexer)), + ), + (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)), + ( + r"(?s)(^\s*)(%%!)([^\n]*\n)(.*)", + bygroups(Text, Operator, Text, using(BashLexer)), + ), + (r"(%%?)(\w+)(\?\??)$", bygroups(Operator, Keyword, Operator)), + (r"\b(\?\??)(\s*)$", bygroups(Operator, Text)), + (r"(%)(sx|sc|system)(.*)(\n)", bygroups(Operator, Keyword, using(BashLexer), Text)), + (r"(%)(\w+)(.*\n)", bygroups(Operator, Keyword, Text)), + (r"^(!!)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)), + (r"(!)(?!=)(.+)(\n)", bygroups(Operator, using(BashLexer), Text)), + (r"^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)", bygroups(Text, Operator, Text)), + (r"(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$", bygroups(Text, Operator, Text)), +] + + +class IPython3Lexer(Python3Lexer): + """IPython code lexer (based on Python 3)""" + + name = "IPython" + aliases = ["ipython", "ipython3"] + + tokens = Python3Lexer.tokens.copy() + tokens["root"] = ipython_tokens + tokens["root"] + + +IPythonLexer = IPython3Lexer + + +class IPythonPartialTracebackLexer(RegexLexer): + """ + Partial lexer for IPython tracebacks. + + Handles all the non-python output. + + """ + + name = "IPython Partial Traceback" + + tokens = { + "root": [ + # Tracebacks for syntax errors have a different style. + # For both types of tracebacks, we mark the first line with + # Generic.Traceback. For syntax errors, we mark the filename + # as we mark the filenames for non-syntax tracebacks. + # + # These two regexps define how IPythonConsoleLexer finds a + # traceback. + # + ## Non-syntax traceback + (r"^(\^C)?(-+\n)", bygroups(Error, Generic.Traceback)), + ## Syntax traceback + ( + r"^( File)(.*)(, line )(\d+\n)", + bygroups( + Generic.Traceback, + Name.Namespace, + Generic.Traceback, + Literal.Number.Integer, + ), + ), + # (Exception Identifier)(Whitespace)(Traceback Message) + ( + r"(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)", + bygroups(Name.Exception, Generic.Whitespace, Text), + ), + # (Module/Filename)(Text)(Callee)(Function Signature) + # Better options for callee and function signature? + ( + r"(.*)( in )(.*)(\(.*\)\n)", + bygroups(Name.Namespace, Text, Name.Entity, Name.Tag), + ), + # Regular line: (Whitespace)(Line Number)(Python Code) + ( + r"(\s*?)(\d+)(.*?\n)", + bygroups(Generic.Whitespace, Literal.Number.Integer, Other), + ), + # Emphasized line: (Arrow)(Line Number)(Python Code) + # Using Exception token so arrow color matches the Exception. + ( + r"(-*>?\s?)(\d+)(.*?\n)", + bygroups(Name.Exception, Literal.Number.Integer, Other), + ), + # (Exception Identifier)(Message) + (r"(?u)(^[^\d\W]\w*)(:.*?\n)", bygroups(Name.Exception, Text)), + # Tag everything else as Other, will be handled later. + (r".*\n", Other), + ], + } + + +class IPythonTracebackLexer(DelegatingLexer): + """ + IPython traceback lexer. + + For doctests, the tracebacks can be snipped as much as desired with the + exception to the lines that designate a traceback. For non-syntax error + tracebacks, this is the line of hyphens. For syntax error tracebacks, + this is the line which lists the File and line number. + + """ + + # The lexer inherits from DelegatingLexer. The "root" lexer is an + # appropriate IPython lexer, which depends on the value of the boolean + # `python3`. First, we parse with the partial IPython traceback lexer. + # Then, any code marked with the "Other" token is delegated to the root + # lexer. + # + name = "IPython Traceback" + aliases = ["ipythontb", "ipython3tb"] + + def __init__(self, **options): + """ + A subclass of `DelegatingLexer` which delegates to the appropriate to either IPyLexer, + IPythonPartialTracebackLexer. + """ + # note we need a __init__ doc, as otherwise it inherits the doc from the super class + # which will fail the documentation build as it references section of the pygments docs that + # do not exists when building IPython's docs. + DelegatingLexer.__init__( + self, IPython3Lexer, IPythonPartialTracebackLexer, **options + ) + + +class IPythonConsoleLexer(Lexer): + """ + An IPython console lexer for IPython code-blocks and doctests, such as: + + .. code-block:: rst + + .. code-block:: ipythonconsole + + In [1]: a = 'foo' + + In [2]: a + Out[2]: 'foo' + + In [3]: print(a) + foo + + + Support is also provided for IPython exceptions: + + .. code-block:: rst + + .. code-block:: ipythonconsole + + In [1]: raise Exception + Traceback (most recent call last): + ... + Exception + + """ + + name = "IPython console session" + aliases = ["ipythonconsole", "ipython3console"] + mimetypes = ["text/x-ipython-console"] + + # The regexps used to determine what is input and what is output. + # The default prompts for IPython are: + # + # in = 'In [#]: ' + # continuation = ' .D.: ' + # template = 'Out[#]: ' + # + # Where '#' is the 'prompt number' or 'execution count' and 'D' + # D is a number of dots matching the width of the execution count + # + in1_regex = r"In \[[0-9]+\]: " + in2_regex = r" \.\.+\.: " + out_regex = r"Out\[[0-9]+\]: " + + #: The regex to determine when a traceback starts. + ipytb_start = re.compile(r"^(\^C)?(-+\n)|^( File)(.*)(, line )(\d+\n)") + + def __init__(self, **options): + """Initialize the IPython console lexer. + + Parameters + ---------- + in1_regex : RegexObject + The compiled regular expression used to detect the start + of inputs. Although the IPython configuration setting may have a + trailing whitespace, do not include it in the regex. If `None`, + then the default input prompt is assumed. + in2_regex : RegexObject + The compiled regular expression used to detect the continuation + of inputs. Although the IPython configuration setting may have a + trailing whitespace, do not include it in the regex. If `None`, + then the default input prompt is assumed. + out_regex : RegexObject + The compiled regular expression used to detect outputs. If `None`, + then the default output prompt is assumed. + + """ + in1_regex = options.get("in1_regex", self.in1_regex) + in2_regex = options.get("in2_regex", self.in2_regex) + out_regex = options.get("out_regex", self.out_regex) + + # So that we can work with input and output prompts which have been + # rstrip'd (possibly by editors) we also need rstrip'd variants. If + # we do not do this, then such prompts will be tagged as 'output'. + # The reason can't just use the rstrip'd variants instead is because + # we want any whitespace associated with the prompt to be inserted + # with the token. This allows formatted code to be modified so as hide + # the appearance of prompts, with the whitespace included. One example + # use of this is in copybutton.js from the standard lib Python docs. + in1_regex_rstrip = in1_regex.rstrip() + "\n" + in2_regex_rstrip = in2_regex.rstrip() + "\n" + out_regex_rstrip = out_regex.rstrip() + "\n" + + # Compile and save them all. + attrs = [ + "in1_regex", + "in2_regex", + "out_regex", + "in1_regex_rstrip", + "in2_regex_rstrip", + "out_regex_rstrip", + ] + for attr in attrs: + self.__setattr__(attr, re.compile(locals()[attr])) + + Lexer.__init__(self, **options) + + self.pylexer = IPython3Lexer(**options) + self.tblexer = IPythonTracebackLexer(**options) + + self.reset() + + def reset(self): + self.mode = "output" + self.index = 0 + self.buffer = "" + self.insertions = [] + + def buffered_tokens(self): + """ + Generator of unprocessed tokens after doing insertions and before + changing to a new state. + + """ + if self.mode == "output": + tokens = [(0, Generic.Output, self.buffer)] + elif self.mode == "input": + tokens = self.pylexer.get_tokens_unprocessed(self.buffer) + else: # traceback + tokens = self.tblexer.get_tokens_unprocessed(self.buffer) + + for i, t, v in do_insertions(self.insertions, tokens): + # All token indexes are relative to the buffer. + yield self.index + i, t, v + + # Clear it all + self.index += len(self.buffer) + self.buffer = "" + self.insertions = [] + + def get_mci(self, line): + """ + Parses the line and returns a 3-tuple: (mode, code, insertion). + + `mode` is the next mode (or state) of the lexer, and is always equal + to 'input', 'output', or 'tb'. + + `code` is a portion of the line that should be added to the buffer + corresponding to the next mode and eventually lexed by another lexer. + For example, `code` could be Python code if `mode` were 'input'. + + `insertion` is a 3-tuple (index, token, text) representing an + unprocessed "token" that will be inserted into the stream of tokens + that are created from the buffer once we change modes. This is usually + the input or output prompt. + + In general, the next mode depends on current mode and on the contents + of `line`. + + """ + # To reduce the number of regex match checks, we have multiple + # 'if' blocks instead of 'if-elif' blocks. + + # Check for possible end of input + in2_match = self.in2_regex.match(line) + in2_match_rstrip = self.in2_regex_rstrip.match(line) + if ( + in2_match and in2_match.group().rstrip() == line.rstrip() + ) or in2_match_rstrip: + end_input = True + else: + end_input = False + if end_input and self.mode != "tb": + # Only look for an end of input when not in tb mode. + # An ellipsis could appear within the traceback. + mode = "output" + code = "" + insertion = (0, Generic.Prompt, line) + return mode, code, insertion + + # Check for output prompt + out_match = self.out_regex.match(line) + out_match_rstrip = self.out_regex_rstrip.match(line) + if out_match or out_match_rstrip: + mode = "output" + if out_match: + idx = out_match.end() + else: + idx = out_match_rstrip.end() + code = line[idx:] + # Use the 'heading' token for output. We cannot use Generic.Error + # since it would conflict with exceptions. + insertion = (0, Generic.Heading, line[:idx]) + return mode, code, insertion + + # Check for input or continuation prompt (non stripped version) + in1_match = self.in1_regex.match(line) + if in1_match or (in2_match and self.mode != "tb"): + # New input or when not in tb, continued input. + # We do not check for continued input when in tb since it is + # allowable to replace a long stack with an ellipsis. + mode = "input" + if in1_match: + idx = in1_match.end() + else: # in2_match + idx = in2_match.end() + code = line[idx:] + insertion = (0, Generic.Prompt, line[:idx]) + return mode, code, insertion + + # Check for input or continuation prompt (stripped version) + in1_match_rstrip = self.in1_regex_rstrip.match(line) + if in1_match_rstrip or (in2_match_rstrip and self.mode != "tb"): + # New input or when not in tb, continued input. + # We do not check for continued input when in tb since it is + # allowable to replace a long stack with an ellipsis. + mode = "input" + if in1_match_rstrip: + idx = in1_match_rstrip.end() + else: # in2_match + idx = in2_match_rstrip.end() + code = line[idx:] + insertion = (0, Generic.Prompt, line[:idx]) + return mode, code, insertion + + # Check for traceback + if self.ipytb_start.match(line): + mode = "tb" + code = line + insertion = None + return mode, code, insertion + + # All other stuff... + if self.mode in ("input", "output"): + # We assume all other text is output. Multiline input that + # does not use the continuation marker cannot be detected. + # For example, the 3 in the following is clearly output: + # + # In [1]: print(3) + # 3 + # + # But the following second line is part of the input: + # + # In [2]: while True: + # print(True) + # + # In both cases, the 2nd line will be 'output'. + # + mode = "output" + else: + mode = "tb" + + code = line + insertion = None + + return mode, code, insertion + + def get_tokens_unprocessed(self, text): + self.reset() + for match in line_re.finditer(text): + line = match.group() + mode, code, insertion = self.get_mci(line) + + if mode != self.mode: + # Yield buffered tokens before transitioning to new mode. + for token in self.buffered_tokens(): + yield token + self.mode = mode + + if insertion: + self.insertions.append((len(self.buffer), [insertion])) + self.buffer += code + + for token in self.buffered_tokens(): + yield token + + +class IPyLexer(Lexer): + r""" + Primary lexer for all IPython-like code. + + This is a simple helper lexer. If the first line of the text begins with + "In \[[0-9]+\]:", then the entire text is parsed with an IPython console + lexer. If not, then the entire text is parsed with an IPython lexer. + + The goal is to reduce the number of lexers that are registered + with Pygments. + + """ + + name = "IPy session" + aliases = ["ipy", "ipy3"] + + def __init__(self, **options): + """ + Create a new IPyLexer instance which dispatch to either an + IPythonCOnsoleLexer (if In prompts are present) or and IPythonLexer (if + In prompts are not present). + """ + # init docstring is necessary for docs not to fail to build do to parent + # docs referenceing a section in pygments docs. + Lexer.__init__(self, **options) + + self.IPythonLexer = IPythonLexer(**options) + self.IPythonConsoleLexer = IPythonConsoleLexer(**options) + + def get_tokens_unprocessed(self, text): + # Search for the input prompt anywhere...this allows code blocks to + # begin with comments as well. + if re.match(r".*(In \[[0-9]+\]:)", text.strip(), re.DOTALL): + lex = self.IPythonConsoleLexer + else: + lex = self.IPythonLexer + for token in lex.get_tokens_unprocessed(text): + yield token diff --git a/temp_venv/lib/python3.13/site-packages/packaging/__init__.py b/temp_venv/lib/python3.13/site-packages/packaging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d45c22cfd88e2b88976b46d4171a502b602658ec --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/__init__.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "25.0" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = f"2014 {__author__}" diff --git a/temp_venv/lib/python3.13/site-packages/packaging/_elffile.py b/temp_venv/lib/python3.13/site-packages/packaging/_elffile.py new file mode 100644 index 0000000000000000000000000000000000000000..7a5afc33b0a2401cf509bb6a8d1143d5230324d9 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/_elffile.py @@ -0,0 +1,109 @@ +""" +ELF file parser. + +This provides a class ``ELFFile`` that parses an ELF executable in a similar +interface to ``ZipFile``. Only the read interface is implemented. + +Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +""" + +from __future__ import annotations + +import enum +import os +import struct +from typing import IO + + +class ELFInvalid(ValueError): + pass + + +class EIClass(enum.IntEnum): + C32 = 1 + C64 = 2 + + +class EIData(enum.IntEnum): + Lsb = 1 + Msb = 2 + + +class EMachine(enum.IntEnum): + I386 = 3 + S390 = 22 + Arm = 40 + X8664 = 62 + AArc64 = 183 + + +class ELFFile: + """ + Representation of an ELF executable. + """ + + def __init__(self, f: IO[bytes]) -> None: + self._f = f + + try: + ident = self._read("16B") + except struct.error as e: + raise ELFInvalid("unable to parse identification") from e + magic = bytes(ident[:4]) + if magic != b"\x7fELF": + raise ELFInvalid(f"invalid magic: {magic!r}") + + self.capacity = ident[4] # Format for program header (bitness). + self.encoding = ident[5] # Data structure encoding (endianness). + + try: + # e_fmt: Format for program header. + # p_fmt: Format for section header. + # p_idx: Indexes to find p_type, p_offset, and p_filesz. + e_fmt, self._p_fmt, self._p_idx = { + (1, 1): ("HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB. + (2, 1): ("HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB. + }[(self.capacity, self.encoding)] + except KeyError as e: + raise ELFInvalid( + f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})" + ) from e + + try: + ( + _, + self.machine, # Architecture type. + _, + _, + self._e_phoff, # Offset of program header. + _, + self.flags, # Processor-specific flags. + _, + self._e_phentsize, # Size of section. + self._e_phnum, # Number of sections. + ) = self._read(e_fmt) + except struct.error as e: + raise ELFInvalid("unable to parse machine and section information") from e + + def _read(self, fmt: str) -> tuple[int, ...]: + return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) + + @property + def interpreter(self) -> str | None: + """ + The path recorded in the ``PT_INTERP`` section header. + """ + for index in range(self._e_phnum): + self._f.seek(self._e_phoff + self._e_phentsize * index) + try: + data = self._read(self._p_fmt) + except struct.error: + continue + if data[self._p_idx[0]] != 3: # Not PT_INTERP. + continue + self._f.seek(data[self._p_idx[1]]) + return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0") + return None diff --git a/temp_venv/lib/python3.13/site-packages/packaging/_manylinux.py b/temp_venv/lib/python3.13/site-packages/packaging/_manylinux.py new file mode 100644 index 0000000000000000000000000000000000000000..95f55762e86f08af76aaa95f5cd147d4517c6d66 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/_manylinux.py @@ -0,0 +1,262 @@ +from __future__ import annotations + +import collections +import contextlib +import functools +import os +import re +import sys +import warnings +from typing import Generator, Iterator, NamedTuple, Sequence + +from ._elffile import EIClass, EIData, ELFFile, EMachine + +EF_ARM_ABIMASK = 0xFF000000 +EF_ARM_ABI_VER5 = 0x05000000 +EF_ARM_ABI_FLOAT_HARD = 0x00000400 + + +# `os.PathLike` not a generic type until Python 3.9, so sticking with `str` +# as the type for `path` until then. +@contextlib.contextmanager +def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]: + try: + with open(path, "rb") as f: + yield ELFFile(f) + except (OSError, TypeError, ValueError): + yield None + + +def _is_linux_armhf(executable: str) -> bool: + # hard-float ABI can be detected from the ELF header of the running + # process + # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.Arm + and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5 + and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD + ) + + +def _is_linux_i686(executable: str) -> bool: + with _parse_elf(executable) as f: + return ( + f is not None + and f.capacity == EIClass.C32 + and f.encoding == EIData.Lsb + and f.machine == EMachine.I386 + ) + + +def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: + if "armv7l" in archs: + return _is_linux_armhf(executable) + if "i686" in archs: + return _is_linux_i686(executable) + allowed_archs = { + "x86_64", + "aarch64", + "ppc64", + "ppc64le", + "s390x", + "loongarch64", + "riscv64", + } + return any(arch in allowed_archs for arch in archs) + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): + major: int + minor: int + + +def _glibc_version_string_confstr() -> str | None: + """ + Primary implementation of glibc_version_string using os.confstr. + """ + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module. + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 + try: + # Should be a string like "glibc 2.17". + version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") + assert version_string is not None + _, version = version_string.rsplit() + except (AssertionError, AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def _glibc_version_string_ctypes() -> str | None: + """ + Fallback implementation of glibc_version_string using ctypes. + """ + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + # + # We must also handle the special case where the executable is not a + # dynamically linked executable. This can occur when using musl libc, + # for example. In this situation, dlopen() will error, leading to an + # OSError. Interestingly, at least in the case of musl, there is no + # errno set on the OSError. The single string argument used to construct + # OSError comes from libc itself and is therefore not portable to + # hard code here. In any case, failure to call dlopen() means we + # can proceed, so we bail on our attempt. + try: + process_namespace = ctypes.CDLL(None) + except OSError: + return None + + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str: str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +def _glibc_version_string() -> str | None: + """Returns glibc version string, or None if not using glibc.""" + return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> tuple[int, int]: + """Parse glibc version. + + We use a regexp instead of str.split because we want to discard any + random junk that might come after the minor version -- this might happen + in patched/forked versions of glibc (e.g. Linaro's version of glibc + uses version strings like "2.20-2014.11"). See gh-3588. + """ + m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) + if not m: + warnings.warn( + f"Expected glibc version with 2 components major.minor, got: {version_str}", + RuntimeWarning, + stacklevel=2, + ) + return -1, -1 + return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache +def _get_glibc_version() -> tuple[int, int]: + version_str = _glibc_version_string() + if version_str is None: + return (-1, -1) + return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(arch: str, version: _GLibCVersion) -> bool: + sys_glibc = _get_glibc_version() + if sys_glibc < version: + return False + # Check for presence of _manylinux module. + try: + import _manylinux + except ImportError: + return True + if hasattr(_manylinux, "manylinux_compatible"): + result = _manylinux.manylinux_compatible(version[0], version[1], arch) + if result is not None: + return bool(result) + return True + if version == _GLibCVersion(2, 5): + if hasattr(_manylinux, "manylinux1_compatible"): + return bool(_manylinux.manylinux1_compatible) + if version == _GLibCVersion(2, 12): + if hasattr(_manylinux, "manylinux2010_compatible"): + return bool(_manylinux.manylinux2010_compatible) + if version == _GLibCVersion(2, 17): + if hasattr(_manylinux, "manylinux2014_compatible"): + return bool(_manylinux.manylinux2014_compatible) + return True + + +_LEGACY_MANYLINUX_MAP = { + # CentOS 7 w/ glibc 2.17 (PEP 599) + (2, 17): "manylinux2014", + # CentOS 6 w/ glibc 2.12 (PEP 571) + (2, 12): "manylinux2010", + # CentOS 5 w/ glibc 2.5 (PEP 513) + (2, 5): "manylinux1", +} + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate manylinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be manylinux-compatible. + + :returns: An iterator of compatible manylinux tags. + """ + if not _have_compatible_abi(sys.executable, archs): + return + # Oldest glibc to be supported regardless of architecture is (2, 17). + too_old_glibc2 = _GLibCVersion(2, 16) + if set(archs) & {"x86_64", "i686"}: + # On x86/i686 also oldest glibc to be supported is (2, 5). + too_old_glibc2 = _GLibCVersion(2, 4) + current_glibc = _GLibCVersion(*_get_glibc_version()) + glibc_max_list = [current_glibc] + # We can assume compatibility across glibc major versions. + # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 + # + # Build a list of maximum glibc versions so that we can + # output the canonical list of all glibc from current_glibc + # down to too_old_glibc2, including all intermediary versions. + for glibc_major in range(current_glibc.major - 1, 1, -1): + glibc_minor = _LAST_GLIBC_MINOR[glibc_major] + glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) + for arch in archs: + for glibc_max in glibc_max_list: + if glibc_max.major == too_old_glibc2.major: + min_minor = too_old_glibc2.minor + else: + # For other glibc major versions oldest supported is (x, 0). + min_minor = -1 + for glibc_minor in range(glibc_max.minor, min_minor, -1): + glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) + tag = "manylinux_{}_{}".format(*glibc_version) + if _is_compatible(arch, glibc_version): + yield f"{tag}_{arch}" + # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. + if glibc_version in _LEGACY_MANYLINUX_MAP: + legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] + if _is_compatible(arch, glibc_version): + yield f"{legacy_tag}_{arch}" diff --git a/temp_venv/lib/python3.13/site-packages/packaging/_musllinux.py b/temp_venv/lib/python3.13/site-packages/packaging/_musllinux.py new file mode 100644 index 0000000000000000000000000000000000000000..d2bf30b56319ba862c5c9a1a39a87c6d1cb68718 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/_musllinux.py @@ -0,0 +1,85 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +from __future__ import annotations + +import functools +import re +import subprocess +import sys +from typing import Iterator, NamedTuple, Sequence + +from ._elffile import ELFFile + + +class _MuslVersion(NamedTuple): + major: int + minor: int + + +def _parse_musl_version(output: str) -> _MuslVersion | None: + lines = [n for n in (n.strip() for n in output.splitlines()) if n] + if len(lines) < 2 or lines[0][:4] != "musl": + return None + m = re.match(r"Version (\d+)\.(\d+)", lines[1]) + if not m: + return None + return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache +def _get_musl_version(executable: str) -> _MuslVersion | None: + """Detect currently-running musl runtime version. + + This is done by checking the specified executable's dynamic linking + information, and invoking the loader to parse its output for a version + string. If the loader is musl, the output would be something like:: + + musl libc (x86_64) + Version 1.2.2 + Dynamic Program Loader + """ + try: + with open(executable, "rb") as f: + ld = ELFFile(f).interpreter + except (OSError, TypeError, ValueError): + return None + if ld is None or "musl" not in ld: + return None + proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) + return _parse_musl_version(proc.stderr) + + +def platform_tags(archs: Sequence[str]) -> Iterator[str]: + """Generate musllinux tags compatible to the current platform. + + :param archs: Sequence of compatible architectures. + The first one shall be the closest to the actual architecture and be the part of + platform tag after the ``linux_`` prefix, e.g. ``x86_64``. + The ``linux_`` prefix is assumed as a prerequisite for the current platform to + be musllinux-compatible. + + :returns: An iterator of compatible musllinux tags. + """ + sys_musl = _get_musl_version(sys.executable) + if sys_musl is None: # Python not dynamically linked against musl. + return + for arch in archs: + for minor in range(sys_musl.minor, -1, -1): + yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__": # pragma: no cover + import sysconfig + + plat = sysconfig.get_platform() + assert plat.startswith("linux-"), "not linux" + + print("plat:", plat) + print("musl:", _get_musl_version(sys.executable)) + print("tags:", end=" ") + for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): + print(t, end="\n ") diff --git a/temp_venv/lib/python3.13/site-packages/packaging/_parser.py b/temp_venv/lib/python3.13/site-packages/packaging/_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..0007c0aa64aeae24c7e0ca90790a25ba51dbf7d6 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/_parser.py @@ -0,0 +1,353 @@ +"""Handwritten parser of dependency specifiers. + +The docstring for each __parse_* function contains EBNF-inspired grammar representing +the implementation. +""" + +from __future__ import annotations + +import ast +from typing import NamedTuple, Sequence, Tuple, Union + +from ._tokenizer import DEFAULT_RULES, Tokenizer + + +class Node: + def __init__(self, value: str) -> None: + self.value = value + + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}('{self}')>" + + def serialize(self) -> str: + raise NotImplementedError + + +class Variable(Node): + def serialize(self) -> str: + return str(self) + + +class Value(Node): + def serialize(self) -> str: + return f'"{self}"' + + +class Op(Node): + def serialize(self) -> str: + return str(self) + + +MarkerVar = Union[Variable, Value] +MarkerItem = Tuple[MarkerVar, Op, MarkerVar] +MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] +MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]] + + +class ParsedRequirement(NamedTuple): + name: str + url: str + extras: list[str] + specifier: str + marker: MarkerList | None + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for dependency specifier +# -------------------------------------------------------------------------------------- +def parse_requirement(source: str) -> ParsedRequirement: + return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: + """ + requirement = WS? IDENTIFIER WS? extras WS? requirement_details + """ + tokenizer.consume("WS") + + name_token = tokenizer.expect( + "IDENTIFIER", expected="package name at the start of dependency specifier" + ) + name = name_token.text + tokenizer.consume("WS") + + extras = _parse_extras(tokenizer) + tokenizer.consume("WS") + + url, specifier, marker = _parse_requirement_details(tokenizer) + tokenizer.expect("END", expected="end of dependency specifier") + + return ParsedRequirement(name, url, extras, specifier, marker) + + +def _parse_requirement_details( + tokenizer: Tokenizer, +) -> tuple[str, str, MarkerList | None]: + """ + requirement_details = AT URL (WS requirement_marker?)? + | specifier WS? (requirement_marker)? + """ + + specifier = "" + url = "" + marker = None + + if tokenizer.check("AT"): + tokenizer.read() + tokenizer.consume("WS") + + url_start = tokenizer.position + url = tokenizer.expect("URL", expected="URL after @").text + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + tokenizer.expect("WS", expected="whitespace after URL") + + # The input might end after whitespace. + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, span_start=url_start, after="URL and whitespace" + ) + else: + specifier_start = tokenizer.position + specifier = _parse_specifier(tokenizer) + tokenizer.consume("WS") + + if tokenizer.check("END", peek=True): + return (url, specifier, marker) + + marker = _parse_requirement_marker( + tokenizer, + span_start=specifier_start, + after=( + "version specifier" + if specifier + else "name and no valid version specifier" + ), + ) + + return (url, specifier, marker) + + +def _parse_requirement_marker( + tokenizer: Tokenizer, *, span_start: int, after: str +) -> MarkerList: + """ + requirement_marker = SEMICOLON marker WS? + """ + + if not tokenizer.check("SEMICOLON"): + tokenizer.raise_syntax_error( + f"Expected end or semicolon (after {after})", + span_start=span_start, + ) + tokenizer.read() + + marker = _parse_marker(tokenizer) + tokenizer.consume("WS") + + return marker + + +def _parse_extras(tokenizer: Tokenizer) -> list[str]: + """ + extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? + """ + if not tokenizer.check("LEFT_BRACKET", peek=True): + return [] + + with tokenizer.enclosing_tokens( + "LEFT_BRACKET", + "RIGHT_BRACKET", + around="extras", + ): + tokenizer.consume("WS") + extras = _parse_extras_list(tokenizer) + tokenizer.consume("WS") + + return extras + + +def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: + """ + extras_list = identifier (wsp* ',' wsp* identifier)* + """ + extras: list[str] = [] + + if not tokenizer.check("IDENTIFIER"): + return extras + + extras.append(tokenizer.read().text) + + while True: + tokenizer.consume("WS") + if tokenizer.check("IDENTIFIER", peek=True): + tokenizer.raise_syntax_error("Expected comma between extra names") + elif not tokenizer.check("COMMA"): + break + + tokenizer.read() + tokenizer.consume("WS") + + extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma") + extras.append(extra_token.text) + + return extras + + +def _parse_specifier(tokenizer: Tokenizer) -> str: + """ + specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS + | WS? version_many WS? + """ + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="version specifier", + ): + tokenizer.consume("WS") + parsed_specifiers = _parse_version_many(tokenizer) + tokenizer.consume("WS") + + return parsed_specifiers + + +def _parse_version_many(tokenizer: Tokenizer) -> str: + """ + version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)? + """ + parsed_specifiers = "" + while tokenizer.check("SPECIFIER"): + span_start = tokenizer.position + parsed_specifiers += tokenizer.read().text + if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True): + tokenizer.raise_syntax_error( + ".* suffix can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position + 1, + ) + if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True): + tokenizer.raise_syntax_error( + "Local version label can only be used with `==` or `!=` operators", + span_start=span_start, + span_end=tokenizer.position, + ) + tokenizer.consume("WS") + if not tokenizer.check("COMMA"): + break + parsed_specifiers += tokenizer.read().text + tokenizer.consume("WS") + + return parsed_specifiers + + +# -------------------------------------------------------------------------------------- +# Recursive descent parser for marker expression +# -------------------------------------------------------------------------------------- +def parse_marker(source: str) -> MarkerList: + return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES)) + + +def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList: + retval = _parse_marker(tokenizer) + tokenizer.expect("END", expected="end of marker expression") + return retval + + +def _parse_marker(tokenizer: Tokenizer) -> MarkerList: + """ + marker = marker_atom (BOOLOP marker_atom)+ + """ + expression = [_parse_marker_atom(tokenizer)] + while tokenizer.check("BOOLOP"): + token = tokenizer.read() + expr_right = _parse_marker_atom(tokenizer) + expression.extend((token.text, expr_right)) + return expression + + +def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom: + """ + marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS? + | WS? marker_item WS? + """ + + tokenizer.consume("WS") + if tokenizer.check("LEFT_PARENTHESIS", peek=True): + with tokenizer.enclosing_tokens( + "LEFT_PARENTHESIS", + "RIGHT_PARENTHESIS", + around="marker expression", + ): + tokenizer.consume("WS") + marker: MarkerAtom = _parse_marker(tokenizer) + tokenizer.consume("WS") + else: + marker = _parse_marker_item(tokenizer) + tokenizer.consume("WS") + return marker + + +def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem: + """ + marker_item = WS? marker_var WS? marker_op WS? marker_var WS? + """ + tokenizer.consume("WS") + marker_var_left = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + marker_op = _parse_marker_op(tokenizer) + tokenizer.consume("WS") + marker_var_right = _parse_marker_var(tokenizer) + tokenizer.consume("WS") + return (marker_var_left, marker_op, marker_var_right) + + +def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: + """ + marker_var = VARIABLE | QUOTED_STRING + """ + if tokenizer.check("VARIABLE"): + return process_env_var(tokenizer.read().text.replace(".", "_")) + elif tokenizer.check("QUOTED_STRING"): + return process_python_str(tokenizer.read().text) + else: + tokenizer.raise_syntax_error( + message="Expected a marker variable or quoted string" + ) + + +def process_env_var(env_var: str) -> Variable: + if env_var in ("platform_python_implementation", "python_implementation"): + return Variable("platform_python_implementation") + else: + return Variable(env_var) + + +def process_python_str(python_str: str) -> Value: + value = ast.literal_eval(python_str) + return Value(str(value)) + + +def _parse_marker_op(tokenizer: Tokenizer) -> Op: + """ + marker_op = IN | NOT IN | OP + """ + if tokenizer.check("IN"): + tokenizer.read() + return Op("in") + elif tokenizer.check("NOT"): + tokenizer.read() + tokenizer.expect("WS", expected="whitespace after 'not'") + tokenizer.expect("IN", expected="'in' after 'not'") + return Op("not in") + elif tokenizer.check("OP"): + return Op(tokenizer.read().text) + else: + return tokenizer.raise_syntax_error( + "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in" + ) diff --git a/temp_venv/lib/python3.13/site-packages/packaging/_structures.py b/temp_venv/lib/python3.13/site-packages/packaging/_structures.py new file mode 100644 index 0000000000000000000000000000000000000000..90a6465f9682c886363eea5327dac64bf623a6ff --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: + def __repr__(self) -> str: + return "Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return False + + def __le__(self, other: object) -> bool: + return False + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return True + + def __ge__(self, other: object) -> bool: + return True + + def __neg__(self: object) -> "NegativeInfinityType": + return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: + def __repr__(self) -> str: + return "-Infinity" + + def __hash__(self) -> int: + return hash(repr(self)) + + def __lt__(self, other: object) -> bool: + return True + + def __le__(self, other: object) -> bool: + return True + + def __eq__(self, other: object) -> bool: + return isinstance(other, self.__class__) + + def __gt__(self, other: object) -> bool: + return False + + def __ge__(self, other: object) -> bool: + return False + + def __neg__(self: object) -> InfinityType: + return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/temp_venv/lib/python3.13/site-packages/packaging/_tokenizer.py b/temp_venv/lib/python3.13/site-packages/packaging/_tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..d28a9b6cf5daca9fa3ce5a8c8ae583a57ee84f12 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/_tokenizer.py @@ -0,0 +1,195 @@ +from __future__ import annotations + +import contextlib +import re +from dataclasses import dataclass +from typing import Iterator, NoReturn + +from .specifiers import Specifier + + +@dataclass +class Token: + name: str + text: str + position: int + + +class ParserSyntaxError(Exception): + """The provided source text could not be parsed correctly.""" + + def __init__( + self, + message: str, + *, + source: str, + span: tuple[int, int], + ) -> None: + self.span = span + self.message = message + self.source = source + + super().__init__() + + def __str__(self) -> str: + marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^" + return "\n ".join([self.message, self.source, marker]) + + +DEFAULT_RULES: dict[str, str | re.Pattern[str]] = { + "LEFT_PARENTHESIS": r"\(", + "RIGHT_PARENTHESIS": r"\)", + "LEFT_BRACKET": r"\[", + "RIGHT_BRACKET": r"\]", + "SEMICOLON": r";", + "COMMA": r",", + "QUOTED_STRING": re.compile( + r""" + ( + ('[^']*') + | + ("[^"]*") + ) + """, + re.VERBOSE, + ), + "OP": r"(===|==|~=|!=|<=|>=|<|>)", + "BOOLOP": r"\b(or|and)\b", + "IN": r"\bin\b", + "NOT": r"\bnot\b", + "VARIABLE": re.compile( + r""" + \b( + python_version + |python_full_version + |os[._]name + |sys[._]platform + |platform_(release|system) + |platform[._](version|machine|python_implementation) + |python_implementation + |implementation_(name|version) + |extras? + |dependency_groups + )\b + """, + re.VERBOSE, + ), + "SPECIFIER": re.compile( + Specifier._operator_regex_str + Specifier._version_regex_str, + re.VERBOSE | re.IGNORECASE, + ), + "AT": r"\@", + "URL": r"[^ \t]+", + "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b", + "VERSION_PREFIX_TRAIL": r"\.\*", + "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*", + "WS": r"[ \t]+", + "END": r"$", +} + + +class Tokenizer: + """Context-sensitive token parsing. + + Provides methods to examine the input stream to check whether the next token + matches. + """ + + def __init__( + self, + source: str, + *, + rules: dict[str, str | re.Pattern[str]], + ) -> None: + self.source = source + self.rules: dict[str, re.Pattern[str]] = { + name: re.compile(pattern) for name, pattern in rules.items() + } + self.next_token: Token | None = None + self.position = 0 + + def consume(self, name: str) -> None: + """Move beyond provided token name, if at current position.""" + if self.check(name): + self.read() + + def check(self, name: str, *, peek: bool = False) -> bool: + """Check whether the next token has the provided name. + + By default, if the check succeeds, the token *must* be read before + another check. If `peek` is set to `True`, the token is not loaded and + would need to be checked again. + """ + assert self.next_token is None, ( + f"Cannot check for {name!r}, already have {self.next_token!r}" + ) + assert name in self.rules, f"Unknown token name: {name!r}" + + expression = self.rules[name] + + match = expression.match(self.source, self.position) + if match is None: + return False + if not peek: + self.next_token = Token(name, match[0], self.position) + return True + + def expect(self, name: str, *, expected: str) -> Token: + """Expect a certain token name next, failing with a syntax error otherwise. + + The token is *not* read. + """ + if not self.check(name): + raise self.raise_syntax_error(f"Expected {expected}") + return self.read() + + def read(self) -> Token: + """Consume the next token and return it.""" + token = self.next_token + assert token is not None + + self.position += len(token.text) + self.next_token = None + + return token + + def raise_syntax_error( + self, + message: str, + *, + span_start: int | None = None, + span_end: int | None = None, + ) -> NoReturn: + """Raise ParserSyntaxError at the given position.""" + span = ( + self.position if span_start is None else span_start, + self.position if span_end is None else span_end, + ) + raise ParserSyntaxError( + message, + source=self.source, + span=span, + ) + + @contextlib.contextmanager + def enclosing_tokens( + self, open_token: str, close_token: str, *, around: str + ) -> Iterator[None]: + if self.check(open_token): + open_position = self.position + self.read() + else: + open_position = None + + yield + + if open_position is None: + return + + if not self.check(close_token): + self.raise_syntax_error( + f"Expected matching {close_token} for {open_token}, after {around}", + span_start=open_position, + ) + + self.read() diff --git a/temp_venv/lib/python3.13/site-packages/packaging/markers.py b/temp_venv/lib/python3.13/site-packages/packaging/markers.py new file mode 100644 index 0000000000000000000000000000000000000000..e7cea57297a23809548fcd0d344740796bae945b --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/markers.py @@ -0,0 +1,362 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import operator +import os +import platform +import sys +from typing import AbstractSet, Any, Callable, Literal, TypedDict, Union, cast + +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable +from ._parser import parse_marker as _parse_marker +from ._tokenizer import ParserSyntaxError +from .specifiers import InvalidSpecifier, Specifier +from .utils import canonicalize_name + +__all__ = [ + "EvaluateContext", + "InvalidMarker", + "Marker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "default_environment", +] + +Operator = Callable[[str, Union[str, AbstractSet[str]]], bool] +EvaluateContext = Literal["metadata", "lock_file", "requirement"] +MARKERS_ALLOWING_SET = {"extras", "dependency_groups"} + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Environment(TypedDict): + implementation_name: str + """The implementation's identifier, e.g. ``'cpython'``.""" + + implementation_version: str + """ + The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or + ``'7.3.13'`` for PyPy3.10 v7.3.13. + """ + + os_name: str + """ + The value of :py:data:`os.name`. The name of the operating system dependent module + imported, e.g. ``'posix'``. + """ + + platform_machine: str + """ + Returns the machine type, e.g. ``'i386'``. + + An empty string if the value cannot be determined. + """ + + platform_release: str + """ + The system's release, e.g. ``'2.2.0'`` or ``'NT'``. + + An empty string if the value cannot be determined. + """ + + platform_system: str + """ + The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. + + An empty string if the value cannot be determined. + """ + + platform_version: str + """ + The system's release version, e.g. ``'#3 on degas'``. + + An empty string if the value cannot be determined. + """ + + python_full_version: str + """ + The Python version as string ``'major.minor.patchlevel'``. + + Note that unlike the Python :py:data:`sys.version`, this value will always include + the patchlevel (it defaults to 0). + """ + + platform_python_implementation: str + """ + A string identifying the Python implementation, e.g. ``'CPython'``. + """ + + python_version: str + """The Python version as string ``'major.minor'``.""" + + sys_platform: str + """ + This string contains a platform identifier that can be used to append + platform-specific components to :py:data:`sys.path`, for instance. + + For Unix systems, except on Linux and AIX, this is the lowercased OS name as + returned by ``uname -s`` with the first part of the version as returned by + ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python + was built. + """ + + +def _normalize_extra_values(results: Any) -> Any: + """ + Normalize extra values. + """ + if isinstance(results[0], tuple): + lhs, op, rhs = results[0] + if isinstance(lhs, Variable) and lhs.value == "extra": + normalized_extra = canonicalize_name(rhs.value) + rhs = Value(normalized_extra) + elif isinstance(rhs, Variable) and rhs.value == "extra": + normalized_extra = canonicalize_name(lhs.value) + lhs = Value(normalized_extra) + results[0] = lhs, op, rhs + return results + + +def _format_marker( + marker: list[str] | MarkerAtom | str, first: bool | None = True +) -> str: + assert isinstance(marker, (list, tuple, str)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators: dict[str, Operator] = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str]) -> bool: + if isinstance(rhs, str): + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs, prereleases=True) + + oper: Operator | None = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + + return oper(lhs, rhs) + + +def _normalize( + lhs: str, rhs: str | AbstractSet[str], key: str +) -> tuple[str, str | AbstractSet[str]]: + # PEP 685 – Comparison of extra names for optional distribution dependencies + # https://peps.python.org/pep-0685/ + # > When comparing extra names, tools MUST normalize the names being + # > compared using the semantics outlined in PEP 503 for names + if key == "extra": + assert isinstance(rhs, str), "extra value must be a string" + return (canonicalize_name(lhs), canonicalize_name(rhs)) + if key in MARKERS_ALLOWING_SET: + if isinstance(rhs, str): # pragma: no cover + return (canonicalize_name(lhs), canonicalize_name(rhs)) + else: + return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs}) + + # other environment markers don't have such standards + return lhs, rhs + + +def _evaluate_markers( + markers: MarkerList, environment: dict[str, str | AbstractSet[str]] +) -> bool: + groups: list[list[bool]] = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, str)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + environment_key = lhs.value + lhs_value = environment[environment_key] + rhs_value = rhs.value + else: + lhs_value = lhs.value + environment_key = rhs.value + rhs_value = environment[environment_key] + assert isinstance(lhs_value, str), "lhs must be a string" + lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key) + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info: sys._version_info) -> str: + version = f"{info.major}.{info.minor}.{info.micro}" + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment() -> Environment: + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker: + def __init__(self, marker: str) -> None: + # Note: We create a Marker object without calling this constructor in + # packaging.requirements.Requirement. If any additional logic is + # added here, make sure to mirror/adapt Requirement. + try: + self._markers = _normalize_extra_values(_parse_marker(marker)) + # The attribute `_markers` can be described in terms of a recursive type: + # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]] + # + # For example, the following expression: + # python_version > "3.6" or (python_version == "3.6" and os_name == "unix") + # + # is parsed into: + # [ + # (, ')>, ), + # 'and', + # [ + # (, , ), + # 'or', + # (, , ) + # ] + # ] + except ParserSyntaxError as e: + raise InvalidMarker(str(e)) from e + + def __str__(self) -> str: + return _format_marker(self._markers) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash((self.__class__.__name__, str(self))) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Marker): + return NotImplemented + + return str(self) == str(other) + + def evaluate( + self, + environment: dict[str, str] | None = None, + context: EvaluateContext = "metadata", + ) -> bool: + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. The *context* parameter specifies what + context the markers are being evaluated for, which influences what markers + are considered valid. Acceptable values are "metadata" (for core metadata; + default), "lock_file", and "requirement" (i.e. all other situations). + + The environment is determined from the current Python process. + """ + current_environment = cast( + "dict[str, str | AbstractSet[str]]", default_environment() + ) + if context == "lock_file": + current_environment.update( + extras=frozenset(), dependency_groups=frozenset() + ) + elif context == "metadata": + current_environment["extra"] = "" + if environment is not None: + current_environment.update(environment) + # The API used to allow setting extra to None. We need to handle this + # case for backwards compatibility. + if "extra" in current_environment and current_environment["extra"] is None: + current_environment["extra"] = "" + + return _evaluate_markers( + self._markers, _repair_python_full_version(current_environment) + ) + + +def _repair_python_full_version( + env: dict[str, str | AbstractSet[str]], +) -> dict[str, str | AbstractSet[str]]: + """ + Work around platform.python_version() returning something that is not PEP 440 + compliant for non-tagged Python builds. + """ + python_full_version = cast(str, env["python_full_version"]) + if python_full_version.endswith("+"): + env["python_full_version"] = f"{python_full_version}local" + return env diff --git a/temp_venv/lib/python3.13/site-packages/packaging/metadata.py b/temp_venv/lib/python3.13/site-packages/packaging/metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..3bd8602d36c53374f1f10a0716d28fdc64e2d3ac --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/metadata.py @@ -0,0 +1,862 @@ +from __future__ import annotations + +import email.feedparser +import email.header +import email.message +import email.parser +import email.policy +import pathlib +import sys +import typing +from typing import ( + Any, + Callable, + Generic, + Literal, + TypedDict, + cast, +) + +from . import licenses, requirements, specifiers, utils +from . import version as version_module +from .licenses import NormalizedLicenseExpression + +T = typing.TypeVar("T") + + +if sys.version_info >= (3, 11): # pragma: no cover + ExceptionGroup = ExceptionGroup +else: # pragma: no cover + + class ExceptionGroup(Exception): + """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. + + If :external:exc:`ExceptionGroup` is already defined by Python itself, + that version is used instead. + """ + + message: str + exceptions: list[Exception] + + def __init__(self, message: str, exceptions: list[Exception]) -> None: + self.message = message + self.exceptions = exceptions + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})" + + +class InvalidMetadata(ValueError): + """A metadata field contains invalid data.""" + + field: str + """The name of the field that contains invalid data.""" + + def __init__(self, field: str, message: str) -> None: + self.field = field + super().__init__(message) + + +# The RawMetadata class attempts to make as few assumptions about the underlying +# serialization formats as possible. The idea is that as long as a serialization +# formats offer some very basic primitives in *some* way then we can support +# serializing to and from that format. +class RawMetadata(TypedDict, total=False): + """A dictionary of raw core metadata. + + Each field in core metadata maps to a key of this dictionary (when data is + provided). The key is lower-case and underscores are used instead of dashes + compared to the equivalent core metadata field. Any core metadata field that + can be specified multiple times or can hold multiple values in a single + field have a key with a plural name. See :class:`Metadata` whose attributes + match the keys of this dictionary. + + Core metadata fields that can be specified multiple times are stored as a + list or dict depending on which is appropriate for the field. Any fields + which hold multiple values in a single field are stored as a list. + + """ + + # Metadata 1.0 - PEP 241 + metadata_version: str + name: str + version: str + platforms: list[str] + summary: str + description: str + keywords: list[str] + home_page: str + author: str + author_email: str + license: str + + # Metadata 1.1 - PEP 314 + supported_platforms: list[str] + download_url: str + classifiers: list[str] + requires: list[str] + provides: list[str] + obsoletes: list[str] + + # Metadata 1.2 - PEP 345 + maintainer: str + maintainer_email: str + requires_dist: list[str] + provides_dist: list[str] + obsoletes_dist: list[str] + requires_python: str + requires_external: list[str] + project_urls: dict[str, str] + + # Metadata 2.0 + # PEP 426 attempted to completely revamp the metadata format + # but got stuck without ever being able to build consensus on + # it and ultimately ended up withdrawn. + # + # However, a number of tools had started emitting METADATA with + # `2.0` Metadata-Version, so for historical reasons, this version + # was skipped. + + # Metadata 2.1 - PEP 566 + description_content_type: str + provides_extra: list[str] + + # Metadata 2.2 - PEP 643 + dynamic: list[str] + + # Metadata 2.3 - PEP 685 + # No new fields were added in PEP 685, just some edge case were + # tightened up to provide better interoptability. + + # Metadata 2.4 - PEP 639 + license_expression: str + license_files: list[str] + + +_STRING_FIELDS = { + "author", + "author_email", + "description", + "description_content_type", + "download_url", + "home_page", + "license", + "license_expression", + "maintainer", + "maintainer_email", + "metadata_version", + "name", + "requires_python", + "summary", + "version", +} + +_LIST_FIELDS = { + "classifiers", + "dynamic", + "license_files", + "obsoletes", + "obsoletes_dist", + "platforms", + "provides", + "provides_dist", + "provides_extra", + "requires", + "requires_dist", + "requires_external", + "supported_platforms", +} + +_DICT_FIELDS = { + "project_urls", +} + + +def _parse_keywords(data: str) -> list[str]: + """Split a string of comma-separated keywords into a list of keywords.""" + return [k.strip() for k in data.split(",")] + + +def _parse_project_urls(data: list[str]) -> dict[str, str]: + """Parse a list of label/URL string pairings separated by a comma.""" + urls = {} + for pair in data: + # Our logic is slightly tricky here as we want to try and do + # *something* reasonable with malformed data. + # + # The main thing that we have to worry about, is data that does + # not have a ',' at all to split the label from the Value. There + # isn't a singular right answer here, and we will fail validation + # later on (if the caller is validating) so it doesn't *really* + # matter, but since the missing value has to be an empty str + # and our return value is dict[str, str], if we let the key + # be the missing value, then they'd have multiple '' values that + # overwrite each other in a accumulating dict. + # + # The other potentional issue is that it's possible to have the + # same label multiple times in the metadata, with no solid "right" + # answer with what to do in that case. As such, we'll do the only + # thing we can, which is treat the field as unparseable and add it + # to our list of unparsed fields. + parts = [p.strip() for p in pair.split(",", 1)] + parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items + + # TODO: The spec doesn't say anything about if the keys should be + # considered case sensitive or not... logically they should + # be case-preserving and case-insensitive, but doing that + # would open up more cases where we might have duplicate + # entries. + label, url = parts + if label in urls: + # The label already exists in our set of urls, so this field + # is unparseable, and we can just add the whole thing to our + # unparseable data and stop processing it. + raise KeyError("duplicate labels in project urls") + urls[label] = url + + return urls + + +def _get_payload(msg: email.message.Message, source: bytes | str) -> str: + """Get the body of the message.""" + # If our source is a str, then our caller has managed encodings for us, + # and we don't need to deal with it. + if isinstance(source, str): + payload = msg.get_payload() + assert isinstance(payload, str) + return payload + # If our source is a bytes, then we're managing the encoding and we need + # to deal with it. + else: + bpayload = msg.get_payload(decode=True) + assert isinstance(bpayload, bytes) + try: + return bpayload.decode("utf8", "strict") + except UnicodeDecodeError as exc: + raise ValueError("payload in an invalid encoding") from exc + + +# The various parse_FORMAT functions here are intended to be as lenient as +# possible in their parsing, while still returning a correctly typed +# RawMetadata. +# +# To aid in this, we also generally want to do as little touching of the +# data as possible, except where there are possibly some historic holdovers +# that make valid data awkward to work with. +# +# While this is a lower level, intermediate format than our ``Metadata`` +# class, some light touch ups can make a massive difference in usability. + +# Map METADATA fields to RawMetadata. +_EMAIL_TO_RAW_MAPPING = { + "author": "author", + "author-email": "author_email", + "classifier": "classifiers", + "description": "description", + "description-content-type": "description_content_type", + "download-url": "download_url", + "dynamic": "dynamic", + "home-page": "home_page", + "keywords": "keywords", + "license": "license", + "license-expression": "license_expression", + "license-file": "license_files", + "maintainer": "maintainer", + "maintainer-email": "maintainer_email", + "metadata-version": "metadata_version", + "name": "name", + "obsoletes": "obsoletes", + "obsoletes-dist": "obsoletes_dist", + "platform": "platforms", + "project-url": "project_urls", + "provides": "provides", + "provides-dist": "provides_dist", + "provides-extra": "provides_extra", + "requires": "requires", + "requires-dist": "requires_dist", + "requires-external": "requires_external", + "requires-python": "requires_python", + "summary": "summary", + "supported-platform": "supported_platforms", + "version": "version", +} +_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} + + +def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: + """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). + + This function returns a two-item tuple of dicts. The first dict is of + recognized fields from the core metadata specification. Fields that can be + parsed and translated into Python's built-in types are converted + appropriately. All other fields are left as-is. Fields that are allowed to + appear multiple times are stored as lists. + + The second dict contains all other fields from the metadata. This includes + any unrecognized fields. It also includes any fields which are expected to + be parsed into a built-in type but were not formatted appropriately. Finally, + any fields that are expected to appear only once but are repeated are + included in this dict. + + """ + raw: dict[str, str | list[str] | dict[str, str]] = {} + unparsed: dict[str, list[str]] = {} + + if isinstance(data, str): + parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) + else: + parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data) + + # We have to wrap parsed.keys() in a set, because in the case of multiple + # values for a key (a list), the key will appear multiple times in the + # list of keys, but we're avoiding that by using get_all(). + for name in frozenset(parsed.keys()): + # Header names in RFC are case insensitive, so we'll normalize to all + # lower case to make comparisons easier. + name = name.lower() + + # We use get_all() here, even for fields that aren't multiple use, + # because otherwise someone could have e.g. two Name fields, and we + # would just silently ignore it rather than doing something about it. + headers = parsed.get_all(name) or [] + + # The way the email module works when parsing bytes is that it + # unconditionally decodes the bytes as ascii using the surrogateescape + # handler. When you pull that data back out (such as with get_all() ), + # it looks to see if the str has any surrogate escapes, and if it does + # it wraps it in a Header object instead of returning the string. + # + # As such, we'll look for those Header objects, and fix up the encoding. + value = [] + # Flag if we have run into any issues processing the headers, thus + # signalling that the data belongs in 'unparsed'. + valid_encoding = True + for h in headers: + # It's unclear if this can return more types than just a Header or + # a str, so we'll just assert here to make sure. + assert isinstance(h, (email.header.Header, str)) + + # If it's a header object, we need to do our little dance to get + # the real data out of it. In cases where there is invalid data + # we're going to end up with mojibake, but there's no obvious, good + # way around that without reimplementing parts of the Header object + # ourselves. + # + # That should be fine since, if mojibacked happens, this key is + # going into the unparsed dict anyways. + if isinstance(h, email.header.Header): + # The Header object stores it's data as chunks, and each chunk + # can be independently encoded, so we'll need to check each + # of them. + chunks: list[tuple[bytes, str | None]] = [] + for bin, encoding in email.header.decode_header(h): + try: + bin.decode("utf8", "strict") + except UnicodeDecodeError: + # Enable mojibake. + encoding = "latin1" + valid_encoding = False + else: + encoding = "utf8" + chunks.append((bin, encoding)) + + # Turn our chunks back into a Header object, then let that + # Header object do the right thing to turn them into a + # string for us. + value.append(str(email.header.make_header(chunks))) + # This is already a string, so just add it. + else: + value.append(h) + + # We've processed all of our values to get them into a list of str, + # but we may have mojibake data, in which case this is an unparsed + # field. + if not valid_encoding: + unparsed[name] = value + continue + + raw_name = _EMAIL_TO_RAW_MAPPING.get(name) + if raw_name is None: + # This is a bit of a weird situation, we've encountered a key that + # we don't know what it means, so we don't know whether it's meant + # to be a list or not. + # + # Since we can't really tell one way or another, we'll just leave it + # as a list, even though it may be a single item list, because that's + # what makes the most sense for email headers. + unparsed[name] = value + continue + + # If this is one of our string fields, then we'll check to see if our + # value is a list of a single item. If it is then we'll assume that + # it was emitted as a single string, and unwrap the str from inside + # the list. + # + # If it's any other kind of data, then we haven't the faintest clue + # what we should parse it as, and we have to just add it to our list + # of unparsed stuff. + if raw_name in _STRING_FIELDS and len(value) == 1: + raw[raw_name] = value[0] + # If this is one of our list of string fields, then we can just assign + # the value, since email *only* has strings, and our get_all() call + # above ensures that this is a list. + elif raw_name in _LIST_FIELDS: + raw[raw_name] = value + # Special Case: Keywords + # The keywords field is implemented in the metadata spec as a str, + # but it conceptually is a list of strings, and is serialized using + # ", ".join(keywords), so we'll do some light data massaging to turn + # this into what it logically is. + elif raw_name == "keywords" and len(value) == 1: + raw[raw_name] = _parse_keywords(value[0]) + # Special Case: Project-URL + # The project urls is implemented in the metadata spec as a list of + # specially-formatted strings that represent a key and a value, which + # is fundamentally a mapping, however the email format doesn't support + # mappings in a sane way, so it was crammed into a list of strings + # instead. + # + # We will do a little light data massaging to turn this into a map as + # it logically should be. + elif raw_name == "project_urls": + try: + raw[raw_name] = _parse_project_urls(value) + except KeyError: + unparsed[name] = value + # Nothing that we've done has managed to parse this, so it'll just + # throw it in our unparseable data and move on. + else: + unparsed[name] = value + + # We need to support getting the Description from the message payload in + # addition to getting it from the the headers. This does mean, though, there + # is the possibility of it being set both ways, in which case we put both + # in 'unparsed' since we don't know which is right. + try: + payload = _get_payload(parsed, data) + except ValueError: + unparsed.setdefault("description", []).append( + parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload] + ) + else: + if payload: + # Check to see if we've already got a description, if so then both + # it, and this body move to unparseable. + if "description" in raw: + description_header = cast(str, raw.pop("description")) + unparsed.setdefault("description", []).extend( + [description_header, payload] + ) + elif "description" in unparsed: + unparsed["description"].append(payload) + else: + raw["description"] = payload + + # We need to cast our `raw` to a metadata, because a TypedDict only support + # literal key names, but we're computing our key names on purpose, but the + # way this function is implemented, our `TypedDict` can only have valid key + # names. + return cast(RawMetadata, raw), unparsed + + +_NOT_FOUND = object() + + +# Keep the two values in sync. +_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"] +_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"] + +_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"]) + + +class _Validator(Generic[T]): + """Validate a metadata field. + + All _process_*() methods correspond to a core metadata field. The method is + called with the field's raw value. If the raw value is valid it is returned + in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field). + If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause + as appropriate). + """ + + name: str + raw_name: str + added: _MetadataVersion + + def __init__( + self, + *, + added: _MetadataVersion = "1.0", + ) -> None: + self.added = added + + def __set_name__(self, _owner: Metadata, name: str) -> None: + self.name = name + self.raw_name = _RAW_TO_EMAIL_MAPPING[name] + + def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: + # With Python 3.8, the caching can be replaced with functools.cached_property(). + # No need to check the cache as attribute lookup will resolve into the + # instance's __dict__ before __get__ is called. + cache = instance.__dict__ + value = instance._raw.get(self.name) + + # To make the _process_* methods easier, we'll check if the value is None + # and if this field is NOT a required attribute, and if both of those + # things are true, we'll skip the the converter. This will mean that the + # converters never have to deal with the None union. + if self.name in _REQUIRED_ATTRS or value is not None: + try: + converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}") + except AttributeError: + pass + else: + value = converter(value) + + cache[self.name] = value + try: + del instance._raw[self.name] # type: ignore[misc] + except KeyError: + pass + + return cast(T, value) + + def _invalid_metadata( + self, msg: str, cause: Exception | None = None + ) -> InvalidMetadata: + exc = InvalidMetadata( + self.raw_name, msg.format_map({"field": repr(self.raw_name)}) + ) + exc.__cause__ = cause + return exc + + def _process_metadata_version(self, value: str) -> _MetadataVersion: + # Implicitly makes Metadata-Version required. + if value not in _VALID_METADATA_VERSIONS: + raise self._invalid_metadata(f"{value!r} is not a valid metadata version") + return cast(_MetadataVersion, value) + + def _process_name(self, value: str) -> str: + if not value: + raise self._invalid_metadata("{field} is a required field") + # Validate the name as a side-effect. + try: + utils.canonicalize_name(value, validate=True) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return value + + def _process_version(self, value: str) -> version_module.Version: + if not value: + raise self._invalid_metadata("{field} is a required field") + try: + return version_module.parse(value) + except version_module.InvalidVersion as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_summary(self, value: str) -> str: + """Check the field contains no newlines.""" + if "\n" in value: + raise self._invalid_metadata("{field} must be a single line") + return value + + def _process_description_content_type(self, value: str) -> str: + content_types = {"text/plain", "text/x-rst", "text/markdown"} + message = email.message.EmailMessage() + message["content-type"] = value + + content_type, parameters = ( + # Defaults to `text/plain` if parsing failed. + message.get_content_type().lower(), + message["content-type"].params, + ) + # Check if content-type is valid or defaulted to `text/plain` and thus was + # not parseable. + if content_type not in content_types or content_type not in value.lower(): + raise self._invalid_metadata( + f"{{field}} must be one of {list(content_types)}, not {value!r}" + ) + + charset = parameters.get("charset", "UTF-8") + if charset != "UTF-8": + raise self._invalid_metadata( + f"{{field}} can only specify the UTF-8 charset, not {list(charset)}" + ) + + markdown_variants = {"GFM", "CommonMark"} + variant = parameters.get("variant", "GFM") # Use an acceptable default. + if content_type == "text/markdown" and variant not in markdown_variants: + raise self._invalid_metadata( + f"valid Markdown variants for {{field}} are {list(markdown_variants)}, " + f"not {variant!r}", + ) + return value + + def _process_dynamic(self, value: list[str]) -> list[str]: + for dynamic_field in map(str.lower, value): + if dynamic_field in {"name", "version", "metadata-version"}: + raise self._invalid_metadata( + f"{dynamic_field!r} is not allowed as a dynamic field" + ) + elif dynamic_field not in _EMAIL_TO_RAW_MAPPING: + raise self._invalid_metadata( + f"{dynamic_field!r} is not a valid dynamic field" + ) + return list(map(str.lower, value)) + + def _process_provides_extra( + self, + value: list[str], + ) -> list[utils.NormalizedName]: + normalized_names = [] + try: + for name in value: + normalized_names.append(utils.canonicalize_name(name, validate=True)) + except utils.InvalidName as exc: + raise self._invalid_metadata( + f"{name!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return normalized_names + + def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: + try: + return specifiers.SpecifierSet(value) + except specifiers.InvalidSpecifier as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_requires_dist( + self, + value: list[str], + ) -> list[requirements.Requirement]: + reqs = [] + try: + for req in value: + reqs.append(requirements.Requirement(req)) + except requirements.InvalidRequirement as exc: + raise self._invalid_metadata( + f"{req!r} is invalid for {{field}}", cause=exc + ) from exc + else: + return reqs + + def _process_license_expression( + self, value: str + ) -> NormalizedLicenseExpression | None: + try: + return licenses.canonicalize_license_expression(value) + except ValueError as exc: + raise self._invalid_metadata( + f"{value!r} is invalid for {{field}}", cause=exc + ) from exc + + def _process_license_files(self, value: list[str]) -> list[str]: + paths = [] + for path in value: + if ".." in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, " + "parent directory indicators are not allowed" + ) + if "*" in path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be resolved" + ) + if ( + pathlib.PurePosixPath(path).is_absolute() + or pathlib.PureWindowsPath(path).is_absolute() + ): + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must be relative" + ) + if pathlib.PureWindowsPath(path).as_posix() != path: + raise self._invalid_metadata( + f"{path!r} is invalid for {{field}}, paths must use '/' delimiter" + ) + paths.append(path) + return paths + + +class Metadata: + """Representation of distribution metadata. + + Compared to :class:`RawMetadata`, this class provides objects representing + metadata fields instead of only using built-in types. Any invalid metadata + will cause :exc:`InvalidMetadata` to be raised (with a + :py:attr:`~BaseException.__cause__` attribute as appropriate). + """ + + _raw: RawMetadata + + @classmethod + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: + """Create an instance from :class:`RawMetadata`. + + If *validate* is true, all metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + ins = cls() + ins._raw = data.copy() # Mutations occur due to caching enriched values. + + if validate: + exceptions: list[Exception] = [] + try: + metadata_version = ins.metadata_version + metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) + except InvalidMetadata as metadata_version_exc: + exceptions.append(metadata_version_exc) + metadata_version = None + + # Make sure to check for the fields that are present, the required + # fields (so their absence can be reported). + fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS + # Remove fields that have already been checked. + fields_to_check -= {"metadata_version"} + + for key in fields_to_check: + try: + if metadata_version: + # Can't use getattr() as that triggers descriptor protocol which + # will fail due to no value for the instance argument. + try: + field_metadata_version = cls.__dict__[key].added + except KeyError: + exc = InvalidMetadata(key, f"unrecognized field: {key!r}") + exceptions.append(exc) + continue + field_age = _VALID_METADATA_VERSIONS.index( + field_metadata_version + ) + if field_age > metadata_age: + field = _RAW_TO_EMAIL_MAPPING[key] + exc = InvalidMetadata( + field, + f"{field} introduced in metadata version " + f"{field_metadata_version}, not {metadata_version}", + ) + exceptions.append(exc) + continue + getattr(ins, key) + except InvalidMetadata as exc: + exceptions.append(exc) + + if exceptions: + raise ExceptionGroup("invalid metadata", exceptions) + + return ins + + @classmethod + def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: + """Parse metadata from email headers. + + If *validate* is true, the metadata will be validated. All exceptions + related to validation will be gathered and raised as an :class:`ExceptionGroup`. + """ + raw, unparsed = parse_email(data) + + if validate: + exceptions: list[Exception] = [] + for unparsed_key in unparsed: + if unparsed_key in _EMAIL_TO_RAW_MAPPING: + message = f"{unparsed_key!r} has invalid data" + else: + message = f"unrecognized field: {unparsed_key!r}" + exceptions.append(InvalidMetadata(unparsed_key, message)) + + if exceptions: + raise ExceptionGroup("unparsed", exceptions) + + try: + return cls.from_raw(raw, validate=validate) + except ExceptionGroup as exc_group: + raise ExceptionGroup( + "invalid or unparsed metadata", exc_group.exceptions + ) from None + + metadata_version: _Validator[_MetadataVersion] = _Validator() + """:external:ref:`core-metadata-metadata-version` + (required; validated to be a valid metadata version)""" + # `name` is not normalized/typed to NormalizedName so as to provide access to + # the original/raw name. + name: _Validator[str] = _Validator() + """:external:ref:`core-metadata-name` + (required; validated using :func:`~packaging.utils.canonicalize_name` and its + *validate* parameter)""" + version: _Validator[version_module.Version] = _Validator() + """:external:ref:`core-metadata-version` (required)""" + dynamic: _Validator[list[str] | None] = _Validator( + added="2.2", + ) + """:external:ref:`core-metadata-dynamic` + (validated against core metadata field names and lowercased)""" + platforms: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-platform`""" + supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-supported-platform`""" + summary: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" + description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body + """:external:ref:`core-metadata-description`""" + description_content_type: _Validator[str | None] = _Validator(added="2.1") + """:external:ref:`core-metadata-description-content-type` (validated)""" + keywords: _Validator[list[str] | None] = _Validator() + """:external:ref:`core-metadata-keywords`""" + home_page: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-home-page`""" + download_url: _Validator[str | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-download-url`""" + author: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author`""" + author_email: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-author-email`""" + maintainer: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer`""" + maintainer_email: _Validator[str | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-maintainer-email`""" + license: _Validator[str | None] = _Validator() + """:external:ref:`core-metadata-license`""" + license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator( + added="2.4" + ) + """:external:ref:`core-metadata-license-expression`""" + license_files: _Validator[list[str] | None] = _Validator(added="2.4") + """:external:ref:`core-metadata-license-file`""" + classifiers: _Validator[list[str] | None] = _Validator(added="1.1") + """:external:ref:`core-metadata-classifier`""" + requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-dist`""" + requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( + added="1.2" + ) + """:external:ref:`core-metadata-requires-python`""" + # Because `Requires-External` allows for non-PEP 440 version specifiers, we + # don't do any processing on the values. + requires_external: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-requires-external`""" + project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-project-url`""" + # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation + # regardless of metadata version. + provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( + added="2.1", + ) + """:external:ref:`core-metadata-provides-extra`""" + provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-provides-dist`""" + obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") + """:external:ref:`core-metadata-obsoletes-dist`""" + requires: _Validator[list[str] | None] = _Validator(added="1.1") + """``Requires`` (deprecated)""" + provides: _Validator[list[str] | None] = _Validator(added="1.1") + """``Provides`` (deprecated)""" + obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") + """``Obsoletes`` (deprecated)""" diff --git a/temp_venv/lib/python3.13/site-packages/packaging/py.typed b/temp_venv/lib/python3.13/site-packages/packaging/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/temp_venv/lib/python3.13/site-packages/packaging/requirements.py b/temp_venv/lib/python3.13/site-packages/packaging/requirements.py new file mode 100644 index 0000000000000000000000000000000000000000..4e068c9567def3564f238a76fe7ab46b569f33e5 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/requirements.py @@ -0,0 +1,91 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import annotations + +from typing import Any, Iterator + +from ._parser import parse_requirement as _parse_requirement +from ._tokenizer import ParserSyntaxError +from .markers import Marker, _normalize_extra_values +from .specifiers import SpecifierSet +from .utils import canonicalize_name + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +class Requirement: + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string: str) -> None: + try: + parsed = _parse_requirement(requirement_string) + except ParserSyntaxError as e: + raise InvalidRequirement(str(e)) from e + + self.name: str = parsed.name + self.url: str | None = parsed.url or None + self.extras: set[str] = set(parsed.extras or []) + self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) + self.marker: Marker | None = None + if parsed.marker is not None: + self.marker = Marker.__new__(Marker) + self.marker._markers = _normalize_extra_values(parsed.marker) + + def _iter_parts(self, name: str) -> Iterator[str]: + yield name + + if self.extras: + formatted_extras = ",".join(sorted(self.extras)) + yield f"[{formatted_extras}]" + + if self.specifier: + yield str(self.specifier) + + if self.url: + yield f"@ {self.url}" + if self.marker: + yield " " + + if self.marker: + yield f"; {self.marker}" + + def __str__(self) -> str: + return "".join(self._iter_parts(self.name)) + + def __repr__(self) -> str: + return f"" + + def __hash__(self) -> int: + return hash( + ( + self.__class__.__name__, + *self._iter_parts(canonicalize_name(self.name)), + ) + ) + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, Requirement): + return NotImplemented + + return ( + canonicalize_name(self.name) == canonicalize_name(other.name) + and self.extras == other.extras + and self.specifier == other.specifier + and self.url == other.url + and self.marker == other.marker + ) diff --git a/temp_venv/lib/python3.13/site-packages/packaging/specifiers.py b/temp_venv/lib/python3.13/site-packages/packaging/specifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..c8448043006f1c60ee92df69da47986d10229b1d --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/specifiers.py @@ -0,0 +1,1019 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier + from packaging.version import Version +""" + +from __future__ import annotations + +import abc +import itertools +import re +from typing import Callable, Iterable, Iterator, TypeVar, Union + +from .utils import canonicalize_version +from .version import Version + +UnparsedVersion = Union[Version, str] +UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion) +CallableOperator = Callable[[Version, str], bool] + + +def _coerce_version(version: UnparsedVersion) -> Version: + if not isinstance(version, Version): + version = Version(version) + return version + + +class InvalidSpecifier(ValueError): + """ + Raised when attempting to create a :class:`Specifier` with a specifier + string that is invalid. + + >>> Specifier("lolwat") + Traceback (most recent call last): + ... + packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat' + """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): + @abc.abstractmethod + def __str__(self) -> str: + """ + Returns the str representation of this Specifier-like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self) -> int: + """ + Returns a hash value for this Specifier-like object. + """ + + @abc.abstractmethod + def __eq__(self, other: object) -> bool: + """ + Returns a boolean representing whether or not the two Specifier-like + objects are equal. + + :param other: The other object to check against. + """ + + @property + @abc.abstractmethod + def prereleases(self) -> bool | None: + """Whether or not pre-releases as a whole are allowed. + + This can be set to either ``True`` or ``False`` to explicitly enable or disable + prereleases or it can be set to ``None`` (the default) to use default semantics. + """ + + @prereleases.setter + def prereleases(self, value: bool) -> None: + """Setter for :attr:`prereleases`. + + :param value: The value to set. + """ + + @abc.abstractmethod + def contains(self, item: str, prereleases: bool | None = None) -> bool: + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class Specifier(BaseSpecifier): + """This class abstracts handling of version specifiers. + + .. tip:: + + It is generally not required to instantiate this manually. You should instead + prefer to work with :class:`SpecifierSet` instead, which can parse + comma-separated version specifiers (which is what package metadata contains). + """ + + _operator_regex_str = r""" + (?P(~=|==|!=|<=|>=|<|>|===)) + """ + _version_regex_str = r""" + (?P + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s;)]* # The arbitrary version can be just about anything, + # we match everything except for whitespace, a + # semi-colon for marker support, and a closing paren + # since versions can be enclosed in them. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + + # You cannot use a wild card and a pre-release, post-release, a dev or + # local version together so group them with a | and make them optional. + (?: + \.\* # Wild card syntax of .* + | + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (alpha|beta|preview|pre|a|b|c|rc) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: + """Initialize a Specifier instance. + + :param spec: + The string representation of a specifier which will be parsed and + normalized before use. + :param prereleases: + This tells the specifier if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + :raises InvalidSpecifier: + If the given specifier is invalid (i.e. bad syntax). + """ + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier(f"Invalid specifier: {spec!r}") + + self._spec: tuple[str, str] = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515 + @property # type: ignore[override] + def prereleases(self) -> bool: + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if Version(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + @property + def operator(self) -> str: + """The operator of this specifier. + + >>> Specifier("==1.2.3").operator + '==' + """ + return self._spec[0] + + @property + def version(self) -> str: + """The version of this specifier. + + >>> Specifier("==1.2.3").version + '1.2.3' + """ + return self._spec[1] + + def __repr__(self) -> str: + """A representation of the Specifier that shows all internal state. + + >>> Specifier('>=1.0.0') + =1.0.0')> + >>> Specifier('>=1.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> Specifier('>=1.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + + def __str__(self) -> str: + """A string representation of the Specifier that can be round-tripped. + + >>> str(Specifier('>=1.0.0')) + '>=1.0.0' + >>> str(Specifier('>=1.0.0', prereleases=False)) + '>=1.0.0' + """ + return "{}{}".format(*self._spec) + + @property + def _canonical_spec(self) -> tuple[str, str]: + canonical_version = canonicalize_version( + self._spec[1], + strip_trailing_zero=(self._spec[0] != "~="), + ) + return self._spec[0], canonical_version + + def __hash__(self) -> int: + return hash(self._canonical_spec) + + def __eq__(self, other: object) -> bool: + """Whether or not the two Specifier-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0") + True + >>> (Specifier("==1.2.3", prereleases=False) == + ... Specifier("==1.2.3", prereleases=True)) + True + >>> Specifier("==1.2.3") == "==1.2.3" + True + >>> Specifier("==1.2.3") == Specifier("==1.2.4") + False + >>> Specifier("==1.2.3") == Specifier("~=1.2.3") + False + """ + if isinstance(other, str): + try: + other = self.__class__(str(other)) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._canonical_spec == other._canonical_spec + + def _get_operator(self, op: str) -> CallableOperator: + operator_callable: CallableOperator = getattr( + self, f"_compare_{self._operators[op]}" + ) + return operator_callable + + def _compare_compatible(self, prospective: Version, spec: str) -> bool: + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore suffix segments. + prefix = _version_join( + list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + def _compare_equal(self, prospective: Version, spec: str) -> bool: + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + normalized_prospective = canonicalize_version( + prospective.public, strip_trailing_zero=False + ) + # Get the normalized version string ignoring the trailing .* + normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False) + # Split the spec out by bangs and dots, and pretend that there is + # an implicit dot in between a release segment and a pre-release segment. + split_spec = _version_split(normalized_spec) + + # Split the prospective version out by bangs and dots, and pretend + # that there is an implicit dot in between a release segment and + # a pre-release segment. + split_prospective = _version_split(normalized_prospective) + + # 0-pad the prospective version before shortening it to get the correct + # shortened version. + padded_prospective, _ = _pad_version(split_prospective, split_spec) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + shortened_prospective = padded_prospective[: len(split_spec)] + + return shortened_prospective == split_spec + else: + # Convert our spec string into a Version + spec_version = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec_version.local: + prospective = Version(prospective.public) + + return prospective == spec_version + + def _compare_not_equal(self, prospective: Version, spec: str) -> bool: + return not self._compare_equal(prospective, spec) + + def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) <= Version(spec) + + def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: + # NB: Local version identifiers are NOT permitted in the version + # specifier, so local version labels can be universally removed from + # the prospective version. + return Version(prospective.public) >= Version(spec) + + def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec_str) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: + return str(prospective).lower() == str(spec).lower() + + def __contains__(self, item: str | Version) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in Specifier(">=1.2.3") + True + >>> Version("1.2.3") in Specifier(">=1.2.3") + True + >>> "1.0.0" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3") + False + >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True) + True + """ + return self.contains(item) + + def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this Specifier. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> Specifier(">=1.2.3").contains("1.2.3") + True + >>> Specifier(">=1.2.3").contains(Version("1.2.3")) + True + >>> Specifier(">=1.2.3").contains("1.0.0") + False + >>> Specifier(">=1.2.3").contains("1.3.0a1") + False + >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1") + True + >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True) + True + """ + + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version, this allows us to have a shortcut for + # "2.0" in Specifier(">=2") + normalized_item = _coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if normalized_item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + operator_callable: CallableOperator = self._get_operator(self.operator) + return operator_callable(normalized_item, self.version) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifier. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(Specifier().contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")])) + ['1.2.3', '1.3', ] + >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"])) + ['1.5a1'] + >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + """ + + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = _coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later in case nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> list[str]: + """Split version into components. + + The split components are intended for version comparison. The logic does + not attempt to retain the original version string, so joining the + components back with :func:`_version_join` may not produce the original + version string. + """ + result: list[str] = [] + + epoch, _, rest = version.rpartition("!") + result.append(epoch or "0") + + for item in rest.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _version_join(components: list[str]) -> str: + """Join split version components into a version string. + + This function assumes the input came from :func:`_version_split`, where the + first component must be the epoch (either empty or numeric), and all other + components numeric. + """ + epoch, *rest = components + return f"{epoch}!{'.'.join(rest)}" + + +def _is_not_suffix(segment: str) -> bool: + return not any( + segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") + ) + + +def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return ( + list(itertools.chain.from_iterable(left_split)), + list(itertools.chain.from_iterable(right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + """This class abstracts handling of a set of version specifiers. + + It can be passed a single specifier (``>=3.0``), a comma-separated list of + specifiers (``>=3.0,!=3.1``), or no specifier at all. + """ + + def __init__( + self, + specifiers: str | Iterable[Specifier] = "", + prereleases: bool | None = None, + ) -> None: + """Initialize a SpecifierSet instance. + + :param specifiers: + The string representation of a specifier or a comma-separated list of + specifiers which will be parsed and normalized before use. + May also be an iterable of ``Specifier`` instances, which will be used + as is. + :param prereleases: + This tells the SpecifierSet if it should accept prerelease versions if + applicable or not. The default of ``None`` will autodetect it from the + given specifiers. + + :raises InvalidSpecifier: + If the given ``specifiers`` are not parseable than this exception will be + raised. + """ + + if isinstance(specifiers, str): + # Split on `,` to break each individual specifier into its own item, and + # strip each item to remove leading/trailing whitespace. + split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Make each individual specifier a Specifier and save in a frozen set + # for later. + self._specs = frozenset(map(Specifier, split_specifiers)) + else: + # Save the supplied specifiers in a frozen set. + self._specs = frozenset(specifiers) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + @property + def prereleases(self) -> bool | None: + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value: bool) -> None: + self._prereleases = value + + def __repr__(self) -> str: + """A representation of the specifier set that shows all internal state. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> SpecifierSet('>=1.0.0,!=2.0.0') + =1.0.0')> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False) + =1.0.0', prereleases=False)> + >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True) + =1.0.0', prereleases=True)> + """ + pre = ( + f", prereleases={self.prereleases!r}" + if self._prereleases is not None + else "" + ) + + return f"" + + def __str__(self) -> str: + """A string representation of the specifier set that can be round-tripped. + + Note that the ordering of the individual specifiers within the set may not + match the input string. + + >>> str(SpecifierSet(">=1.0.0,!=1.0.1")) + '!=1.0.1,>=1.0.0' + >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False)) + '!=1.0.1,>=1.0.0' + """ + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self) -> int: + return hash(self._specs) + + def __and__(self, other: SpecifierSet | str) -> SpecifierSet: + """Return a SpecifierSet which is a combination of the two sets. + + :param other: The other object to combine with. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1' + =1.0.0')> + >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1') + =1.0.0')> + """ + if isinstance(other, str): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease overrides." + ) + + return specifier + + def __eq__(self, other: object) -> bool: + """Whether or not the two SpecifierSet-like objects are equal. + + :param other: The other object to check against. + + The value of :attr:`prereleases` is ignored. + + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) == + ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1" + True + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2") + False + """ + if isinstance(other, (str, Specifier)): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __len__(self) -> int: + """Returns the number of specifiers in this specifier set.""" + return len(self._specs) + + def __iter__(self) -> Iterator[Specifier]: + """ + Returns an iterator over all the underlying :class:`Specifier` instances + in this specifier set. + + >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str) + [, =1.0.0')>] + """ + return iter(self._specs) + + def __contains__(self, item: UnparsedVersion) -> bool: + """Return whether or not the item is contained in this specifier. + + :param item: The item to check for. + + This is used for the ``in`` operator and behaves the same as + :meth:`contains` with no ``prereleases`` argument passed. + + >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1") + True + >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1") + False + >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True) + True + """ + return self.contains(item) + + def contains( + self, + item: UnparsedVersion, + prereleases: bool | None = None, + installed: bool | None = None, + ) -> bool: + """Return whether or not the item is contained in this SpecifierSet. + + :param item: + The item to check for, which can be a version string or a + :class:`Version` instance. + :param prereleases: + Whether or not to match prereleases with this SpecifierSet. If set to + ``None`` (the default), it uses :attr:`prereleases` to determine + whether or not prereleases are allowed. + + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3")) + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1") + False + >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1") + True + >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True) + True + """ + # Ensure that our item is a Version instance. + if not isinstance(item, Version): + item = Version(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + if installed and item.is_prerelease: + item = Version(item.base_version) + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter( + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None + ) -> Iterator[UnparsedVersionVar]: + """Filter items in the given iterable, that match the specifiers in this set. + + :param iterable: + An iterable that can contain version strings and :class:`Version` instances. + The items in the iterable will be filtered according to the specifier. + :param prereleases: + Whether or not to allow prereleases in the returned iterator. If set to + ``None`` (the default), it will be intelligently decide whether to allow + prereleases or not (based on the :attr:`prereleases` attribute, and + whether the only versions matching are prereleases). + + This method is smarter than just ``filter(SpecifierSet(...).contains, [...])`` + because it implements the rule from :pep:`440` that a prerelease item + SHOULD be accepted if no other versions match the given specifier. + + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")])) + ['1.3', ] + >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"])) + [] + >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + + An "empty" SpecifierSet will filter items based on the presence of prerelease + versions in the set. + + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"])) + ['1.3'] + >>> list(SpecifierSet("").filter(["1.5a1"])) + ['1.5a1'] + >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"])) + ['1.3', '1.5a1'] + >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True)) + ['1.3', '1.5a1'] + """ + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iter(iterable) + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases. + else: + filtered: list[UnparsedVersionVar] = [] + found_prereleases: list[UnparsedVersionVar] = [] + + for item in iterable: + parsed_version = _coerce_version(item) + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return iter(found_prereleases) + + return iter(filtered) diff --git a/temp_venv/lib/python3.13/site-packages/packaging/tags.py b/temp_venv/lib/python3.13/site-packages/packaging/tags.py new file mode 100644 index 0000000000000000000000000000000000000000..8522f59c4f2d0039c1a02ea9aef66fe017dbdb20 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/tags.py @@ -0,0 +1,656 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import logging +import platform +import re +import struct +import subprocess +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( + Iterable, + Iterator, + Sequence, + Tuple, + cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +AppleVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: dict[str, str] = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = struct.calcsize("P") == 4 + + +class Tag: + """ + A representation of the tag triple for a wheel. + + Instances are considered immutable and thus are hashable. Equality checking + is also supported. + """ + + __slots__ = ["_abi", "_hash", "_interpreter", "_platform"] + + def __init__(self, interpreter: str, abi: str, platform: str) -> None: + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + # The __hash__ of every single element in a Set[Tag] will be evaluated each time + # that a set calls its `.disjoint()` method, which may be called hundreds of + # times when scanning a page of links for packages with tags matching that + # Set[Tag]. Pre-computing the value here produces significant speedups for + # downstream consumers. + self._hash = hash((self._interpreter, self._abi, self._platform)) + + @property + def interpreter(self) -> str: + return self._interpreter + + @property + def abi(self) -> str: + return self._abi + + @property + def platform(self) -> str: + return self._platform + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Tag): + return NotImplemented + + return ( + (self._hash == other._hash) # Short-circuit ASAP for perf reasons. + and (self._platform == other._platform) + and (self._abi == other._abi) + and (self._interpreter == other._interpreter) + ) + + def __hash__(self) -> int: + return self._hash + + def __str__(self) -> str: + return f"{self._interpreter}-{self._abi}-{self._platform}" + + def __repr__(self) -> str: + return f"<{self} @ {id(self)}>" + + +def parse_tag(tag: str) -> frozenset[Tag]: + """ + Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + + Returning a set is required due to the possibility that the tag is a + compressed tag set. + """ + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> int | str | None: + value: int | str | None = sysconfig.get_config_var(name) + if value is None and warn: + logger.debug( + "Config variable '%s' is unset, Python ABI tag may be incorrect", name + ) + return value + + +def _normalize_string(string: str) -> str: + return string.replace(".", "_").replace("-", "_").replace(" ", "_") + + +def _is_threaded_cpython(abis: list[str]) -> bool: + """ + Determine if the ABI corresponds to a threaded (`--disable-gil`) build. + + The threaded builds are indicated by a "t" in the abiflags. + """ + if len(abis) == 0: + return False + # expect e.g., cp313 + m = re.match(r"cp\d+(.*)", abis[0]) + if not m: + return False + abiflags = m.group(1) + return "t" in abiflags + + +def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: + """ + Determine if the Python version supports abi3. + + PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`) + builds do not support abi3. + """ + return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]: + py_version = tuple(py_version) # To allow for version comparison. + abis = [] + version = _version_nodot(py_version[:2]) + threading = debug = pymalloc = ucs4 = "" + with_debug = _get_config_var("Py_DEBUG", warn) + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn): + threading = "t" + if py_version < (3, 8): + with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append(f"cp{version}{threading}") + abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}") + return abis + + +def cpython_tags( + python_version: PythonVersion | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a CPython interpreter. + + The tags consist of: + - cp-- + - cp-abi3- + - cp-none- + - cp-abi3- # Older Python versions down to 3.2. + + If python_version only specifies a major version then user-provided ABIs and + the 'none' ABItag will be used. + + If 'abi3' or 'none' are specified in 'abis' then they will be yielded at + their normal position and not at the beginning. + """ + if not python_version: + python_version = sys.version_info[:2] + + interpreter = f"cp{_version_nodot(python_version[:2])}" + + if abis is None: + if len(python_version) > 1: + abis = _cpython_abis(python_version, warn) + else: + abis = [] + abis = list(abis) + # 'abi3' and 'none' are explicitly handled later. + for explicit_abi in ("abi3", "none"): + try: + abis.remove(explicit_abi) + except ValueError: + pass + + platforms = list(platforms or platform_tags()) + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + threading = _is_threaded_cpython(abis) + use_abi3 = _abi3_applies(python_version, threading) + if use_abi3: + yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) + yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + + if use_abi3: + for minor_version in range(python_version[1] - 1, 1, -1): + for platform_ in platforms: + version = _version_nodot((python_version[0], minor_version)) + interpreter = f"cp{version}" + yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> list[str]: + """ + Return the ABI tag based on EXT_SUFFIX. + """ + # The following are examples of `EXT_SUFFIX`. + # We want to keep the parts which are related to the ABI and remove the + # parts which are related to the platform: + # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310 + # - mac: '.cpython-310-darwin.so' => cp310 + # - win: '.cp310-win_amd64.pyd' => cp310 + # - win: '.pyd' => cp37 (uses _cpython_abis()) + # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73 + # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib' + # => graalpy_38_native + + ext_suffix = _get_config_var("EXT_SUFFIX", warn=True) + if not isinstance(ext_suffix, str) or ext_suffix[0] != ".": + raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')") + parts = ext_suffix.split(".") + if len(parts) < 3: + # CPython3.7 and earlier uses ".pyd" on Windows. + return _cpython_abis(sys.version_info[:2]) + soabi = parts[1] + if soabi.startswith("cpython"): + # non-windows + abi = "cp" + soabi.split("-")[1] + elif soabi.startswith("cp"): + # windows + abi = soabi.split("-")[0] + elif soabi.startswith("pypy"): + abi = "-".join(soabi.split("-")[:2]) + elif soabi.startswith("graalpy"): + abi = "-".join(soabi.split("-")[:3]) + elif soabi: + # pyston, ironpython, others? + abi = soabi + else: + return [] + return [_normalize_string(abi)] + + +def generic_tags( + interpreter: str | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, + *, + warn: bool = False, +) -> Iterator[Tag]: + """ + Yields the tags for a generic interpreter. + + The tags consist of: + - -- + + The "none" ABI will be added if it was not explicitly provided. + """ + if not interpreter: + interp_name = interpreter_name() + interp_version = interpreter_version(warn=warn) + interpreter = "".join([interp_name, interp_version]) + if abis is None: + abis = _generic_abi() + else: + abis = list(abis) + platforms = list(platforms or platform_tags()) + if "none" not in abis: + abis.append("none") + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: + """ + Yields Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all previous versions of that major version. + """ + if len(py_version) > 1: + yield f"py{_version_nodot(py_version[:2])}" + yield f"py{py_version[0]}" + if len(py_version) > 1: + for minor in range(py_version[1] - 1, -1, -1): + yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( + python_version: PythonVersion | None = None, + interpreter: str | None = None, + platforms: Iterable[str] | None = None, +) -> Iterator[Tag]: + """ + Yields the sequence of tags that are compatible with a specific version of Python. + + The tags consist of: + - py*-none- + - -none-any # ... if `interpreter` is provided. + - py*-none-any + """ + if not python_version: + python_version = sys.version_info[:2] + platforms = list(platforms or platform_tags()) + for version in _py_interpreter_range(python_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + if interpreter: + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(python_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]: + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + if cpu_arch in {"arm64", "x86_64"}: + formats.append("universal2") + + if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: + formats.append("universal") + + return formats + + +def mac_platforms( + version: AppleVersion | None = None, arch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for a macOS system. + + The `version` parameter is a two-item tuple specifying the macOS version to + generate platform tags for. The `arch` parameter is the CPU architecture to + generate platform tags for. Both parameters default to the appropriate value + for the current system. + """ + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + if version == (10, 16): + # When built against an older macOS SDK, Python will report macOS 10.16 + # instead of the real version. + version_str = subprocess.run( + [ + sys.executable, + "-sS", + "-c", + "import platform; print(platform.mac_ver()[0])", + ], + check=True, + env={"SYSTEM_VERSION_COMPAT": "0"}, + stdout=subprocess.PIPE, + text=True, + ).stdout + version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2]))) + else: + version = version + if arch is None: + arch = _mac_arch(cpu_arch) + else: + arch = arch + + if (10, 0) <= version and version < (11, 0): + # Prior to Mac OS 11, each yearly release of Mac OS bumped the + # "minor" version number. The major version was always 10. + major_version = 10 + for minor_version in range(version[1], -1, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Starting with Mac OS 11, each yearly release bumps the major version + # number. The minor versions are now the midyear updates. + minor_version = 0 + for major_version in range(version[0], 10, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + if version >= (11, 0): + # Mac OS 11 on x86_64 is compatible with binaries from previous releases. + # Arm64 support was introduced in 11.0, so no Arm binaries from previous + # releases exist. + # + # However, the "universal2" binary format can have a + # macOS version earlier than 11.0 when the x86_64 part of the binary supports + # that version of macOS. + major_version = 10 + if arch == "x86_64": + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + else: + for minor_version in range(16, 3, -1): + compat_version = major_version, minor_version + binary_format = "universal2" + yield f"macosx_{major_version}_{minor_version}_{binary_format}" + + +def ios_platforms( + version: AppleVersion | None = None, multiarch: str | None = None +) -> Iterator[str]: + """ + Yields the platform tags for an iOS system. + + :param version: A two-item tuple specifying the iOS version to generate + platform tags for. Defaults to the current iOS version. + :param multiarch: The CPU architecture+ABI to generate platform tags for - + (the value used by `sys.implementation._multiarch` e.g., + `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current + multiarch value. + """ + if version is None: + # if iOS is the current platform, ios_ver *must* be defined. However, + # it won't exist for CPython versions before 3.13, which causes a mypy + # error. + _, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore] + version = cast("AppleVersion", tuple(map(int, release.split(".")[:2]))) + + if multiarch is None: + multiarch = sys.implementation._multiarch + multiarch = multiarch.replace("-", "_") + + ios_platform_template = "ios_{major}_{minor}_{multiarch}" + + # Consider any iOS major.minor version from the version requested, down to + # 12.0. 12.0 is the first iOS version that is known to have enough features + # to support CPython. Consider every possible minor release up to X.9. There + # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra + # candidates that won't ever match doesn't really hurt, and it saves us from + # having to keep an explicit list of known iOS versions in the code. Return + # the results descending order of version number. + + # If the requested major version is less than 12, there won't be any matches. + if version[0] < 12: + return + + # Consider the actual X.Y version that was requested. + yield ios_platform_template.format( + major=version[0], minor=version[1], multiarch=multiarch + ) + + # Consider every minor version from X.0 to the minor version prior to the + # version requested by the platform. + for minor in range(version[1] - 1, -1, -1): + yield ios_platform_template.format( + major=version[0], minor=minor, multiarch=multiarch + ) + + for major in range(version[0] - 1, 11, -1): + for minor in range(9, -1, -1): + yield ios_platform_template.format( + major=major, minor=minor, multiarch=multiarch + ) + + +def android_platforms( + api_level: int | None = None, abi: str | None = None +) -> Iterator[str]: + """ + Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on + non-Android platforms, the ``api_level`` and ``abi`` arguments are required. + + :param int api_level: The maximum `API level + `__ to return. Defaults + to the current system's version, as returned by ``platform.android_ver``. + :param str abi: The `Android ABI `__, + e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by + ``sysconfig.get_platform``. Hyphens and periods will be replaced with + underscores. + """ + if platform.system() != "Android" and (api_level is None or abi is None): + raise TypeError( + "on non-Android platforms, the api_level and abi arguments are required" + ) + + if api_level is None: + # Python 3.13 was the first version to return platform.system() == "Android", + # and also the first version to define platform.android_ver(). + api_level = platform.android_ver().api_level # type: ignore[attr-defined] + + if abi is None: + abi = sysconfig.get_platform().split("-")[-1] + abi = _normalize_string(abi) + + # 16 is the minimum API level known to have enough features to support CPython + # without major patching. Yield every API level from the maximum down to the + # minimum, inclusive. + min_api_level = 16 + for ver in range(api_level, min_api_level - 1, -1): + yield f"android_{ver}_{abi}" + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: + linux = _normalize_string(sysconfig.get_platform()) + if not linux.startswith("linux_"): + # we should never be here, just yield the sysconfig one and return + yield linux + return + if is_32bit: + if linux == "linux_x86_64": + linux = "linux_i686" + elif linux == "linux_aarch64": + linux = "linux_armv8l" + _, arch = linux.split("_", 1) + archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch]) + yield from _manylinux.platform_tags(archs) + yield from _musllinux.platform_tags(archs) + for arch in archs: + yield f"linux_{arch}" + + +def _generic_platforms() -> Iterator[str]: + yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: + """ + Provides the platform tags for this installation. + """ + if platform.system() == "Darwin": + return mac_platforms() + elif platform.system() == "iOS": + return ios_platforms() + elif platform.system() == "Android": + return android_platforms() + elif platform.system() == "Linux": + return _linux_platforms() + else: + return _generic_platforms() + + +def interpreter_name() -> str: + """ + Returns the name of the running interpreter. + + Some implementations have a reserved, two-letter abbreviation which will + be returned when appropriate. + """ + name = sys.implementation.name + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: + """ + Returns the version of the running interpreter. + """ + version = _get_config_var("py_version_nodot", warn=warn) + if version: + version = str(version) + else: + version = _version_nodot(sys.version_info[:2]) + return version + + +def _version_nodot(version: PythonVersion) -> str: + return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + + interp_name = interpreter_name() + if interp_name == "cp": + yield from cpython_tags(warn=warn) + else: + yield from generic_tags() + + if interp_name == "pp": + interp = "pp3" + elif interp_name == "cp": + interp = "cp" + interpreter_version(warn=warn) + else: + interp = None + yield from compatible_tags(interpreter=interp) diff --git a/temp_venv/lib/python3.13/site-packages/packaging/utils.py b/temp_venv/lib/python3.13/site-packages/packaging/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..23450953df74eccd9c13cd2a955ce09d1f968565 --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/utils.py @@ -0,0 +1,163 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import annotations + +import functools +import re +from typing import NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version, _TrimmedRelease + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidName(ValueError): + """ + An invalid distribution name; users should refer to the packaging user guide. + """ + + +class InvalidWheelFilename(ValueError): + """ + An invalid wheel filename was found, users should refer to PEP 427. + """ + + +class InvalidSdistFilename(ValueError): + """ + An invalid sdist filename was found, users should refer to the packaging user guide. + """ + + +# Core metadata spec for `Name` +_validate_regex = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE +) +_canonicalize_regex = re.compile(r"[-_.]+") +_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName: + if validate and not _validate_regex.match(name): + raise InvalidName(f"name is invalid: {name!r}") + # This is taken from PEP 503. + value = _canonicalize_regex.sub("-", name).lower() + return cast(NormalizedName, value) + + +def is_normalized_name(name: str) -> bool: + return _normalized_regex.match(name) is not None + + +@functools.singledispatch +def canonicalize_version( + version: Version | str, *, strip_trailing_zero: bool = True +) -> str: + """ + Return a canonical form of a version as a string. + + >>> canonicalize_version('1.0.1') + '1.0.1' + + Per PEP 625, versions may have multiple canonical forms, differing + only by trailing zeros. + + >>> canonicalize_version('1.0.0') + '1' + >>> canonicalize_version('1.0.0', strip_trailing_zero=False) + '1.0.0' + + Invalid versions are returned unaltered. + + >>> canonicalize_version('foo bar baz') + 'foo bar baz' + """ + return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version) + + +@canonicalize_version.register +def _(version: str, *, strip_trailing_zero: bool = True) -> str: + try: + parsed = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero) + + +def parse_wheel_filename( + filename: str, +) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]: + if not filename.endswith(".whl"): + raise InvalidWheelFilename( + f"Invalid wheel filename (extension must be '.whl'): {filename!r}" + ) + + filename = filename[:-4] + dashes = filename.count("-") + if dashes not in (4, 5): + raise InvalidWheelFilename( + f"Invalid wheel filename (wrong number of parts): {filename!r}" + ) + + parts = filename.split("-", dashes - 2) + name_part = parts[0] + # See PEP 427 for the rules on escaping the project name. + if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: + raise InvalidWheelFilename(f"Invalid project name: {filename!r}") + name = canonicalize_name(name_part) + + try: + version = Version(parts[1]) + except InvalidVersion as e: + raise InvalidWheelFilename( + f"Invalid wheel filename (invalid version): {filename!r}" + ) from e + + if dashes == 5: + build_part = parts[2] + build_match = _build_tag_regex.match(build_part) + if build_match is None: + raise InvalidWheelFilename( + f"Invalid build number: {build_part} in {filename!r}" + ) + build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) + else: + build = () + tags = parse_tag(parts[-1]) + return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]: + if filename.endswith(".tar.gz"): + file_stem = filename[: -len(".tar.gz")] + elif filename.endswith(".zip"): + file_stem = filename[: -len(".zip")] + else: + raise InvalidSdistFilename( + f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" + f" {filename!r}" + ) + + # We are requiring a PEP 440 version, which cannot contain dashes, + # so we split on the last dash. + name_part, sep, version_part = file_stem.rpartition("-") + if not sep: + raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}") + + name = canonicalize_name(name_part) + + try: + version = Version(version_part) + except InvalidVersion as e: + raise InvalidSdistFilename( + f"Invalid sdist filename (invalid version): {filename!r}" + ) from e + + return (name, version) diff --git a/temp_venv/lib/python3.13/site-packages/packaging/version.py b/temp_venv/lib/python3.13/site-packages/packaging/version.py new file mode 100644 index 0000000000000000000000000000000000000000..c9bbda20e463b8d9389ecd65f74af33810a02bdd --- /dev/null +++ b/temp_venv/lib/python3.13/site-packages/packaging/version.py @@ -0,0 +1,582 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +""" +.. testsetup:: + + from packaging.version import parse, Version +""" + +from __future__ import annotations + +import itertools +import re +from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"] + +LocalType = Tuple[Union[int, str], ...] + +CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]] +CmpLocalType = Union[ + NegativeInfinityType, + Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...], +] +CmpKey = Tuple[ + int, + Tuple[int, ...], + CmpPrePostDevType, + CmpPrePostDevType, + CmpPrePostDevType, + CmpLocalType, +] +VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool] + + +class _Version(NamedTuple): + epoch: int + release: tuple[int, ...] + dev: tuple[str, int] | None + pre: tuple[str, int] | None + post: tuple[str, int] | None + local: LocalType | None + + +def parse(version: str) -> Version: + """Parse the given version string. + + >>> parse('1.0.dev1') + + + :param version: The version string to parse. + :raises InvalidVersion: When the version string is not a valid version. + """ + return Version(version) + + +class InvalidVersion(ValueError): + """Raised when a version string is not a valid version. + + >>> Version("invalid") + Traceback (most recent call last): + ... + packaging.version.InvalidVersion: Invalid version: 'invalid' + """ + + +class _BaseVersion: + _key: tuple[Any, ...] + + def __hash__(self) -> int: + return hash(self._key) + + # Please keep the duplicated `isinstance` check + # in the six comparisons hereunder + # unless you find a way to avoid adding overhead function calls. + def __lt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key < other._key + + def __le__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key <= other._key + + def __eq__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key == other._key + + def __ge__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key >= other._key + + def __gt__(self, other: _BaseVersion) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key > other._key + + def __ne__(self, other: object) -> bool: + if not isinstance(other, _BaseVersion): + return NotImplemented + + return self._key != other._key + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +_VERSION_PATTERN = r""" + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+            [-_\.]?
+            (?Palpha|a|beta|b|preview|pre|c|rc)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                         # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?Ppost|rev|r)
+                [-_\.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                          # dev release
+            [-_\.]?
+            (?Pdev)
+            [-_\.]?
+            (?P[0-9]+)?
+        )?
+    )
+    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+    """This class abstracts handling of a project's versions.
+
+    A :class:`Version` instance is comparison aware and can be compared and
+    sorted using the standard Python interfaces.
+
+    >>> v1 = Version("1.0a5")
+    >>> v2 = Version("1.0")
+    >>> v1
+    
+    >>> v2
+    
+    >>> v1 < v2
+    True
+    >>> v1 == v2
+    False
+    >>> v1 > v2
+    False
+    >>> v1 >= v2
+    False
+    >>> v1 <= v2
+    True
+    """
+
+    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+    _key: CmpKey
+
+    def __init__(self, version: str) -> None:
+        """Initialize a Version object.
+
+        :param version:
+            The string representation of a version which will be parsed and normalized
+            before use.
+        :raises InvalidVersion:
+            If the ``version`` does not conform to PEP 440 in any way then this
+            exception will be raised.
+        """
+
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion(f"Invalid version: {version!r}")
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+            post=_parse_letter_version(
+                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+            ),
+            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self) -> str:
+        """A representation of the Version that shows all internal state.
+
+        >>> Version('1.0.0')
+        
+        """
+        return f""
+
+    def __str__(self) -> str:
+        """A string representation of the version that can be round-tripped.
+
+        >>> str(Version("1.0a5"))
+        '1.0a5'
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        # Pre-release
+        if self.pre is not None:
+            parts.append("".join(str(x) for x in self.pre))
+
+        # Post-release
+        if self.post is not None:
+            parts.append(f".post{self.post}")
+
+        # Development release
+        if self.dev is not None:
+            parts.append(f".dev{self.dev}")
+
+        # Local version segment
+        if self.local is not None:
+            parts.append(f"+{self.local}")
+
+        return "".join(parts)
+
+    @property
+    def epoch(self) -> int:
+        """The epoch of the version.
+
+        >>> Version("2.0.0").epoch
+        0
+        >>> Version("1!2.0.0").epoch
+        1
+        """
+        return self._version.epoch
+
+    @property
+    def release(self) -> tuple[int, ...]:
+        """The components of the "release" segment of the version.
+
+        >>> Version("1.2.3").release
+        (1, 2, 3)
+        >>> Version("2.0.0").release
+        (2, 0, 0)
+        >>> Version("1!2.0.0.post0").release
+        (2, 0, 0)
+
+        Includes trailing zeroes but not the epoch or any pre-release / development /
+        post-release suffixes.
+        """
+        return self._version.release
+
+    @property
+    def pre(self) -> tuple[str, int] | None:
+        """The pre-release segment of the version.
+
+        >>> print(Version("1.2.3").pre)
+        None
+        >>> Version("1.2.3a1").pre
+        ('a', 1)
+        >>> Version("1.2.3b1").pre
+        ('b', 1)
+        >>> Version("1.2.3rc1").pre
+        ('rc', 1)
+        """
+        return self._version.pre
+
+    @property
+    def post(self) -> int | None:
+        """The post-release number of the version.
+
+        >>> print(Version("1.2.3").post)
+        None
+        >>> Version("1.2.3.post1").post
+        1
+        """
+        return self._version.post[1] if self._version.post else None
+
+    @property
+    def dev(self) -> int | None:
+        """The development number of the version.
+
+        >>> print(Version("1.2.3").dev)
+        None
+        >>> Version("1.2.3.dev1").dev
+        1
+        """
+        return self._version.dev[1] if self._version.dev else None
+
+    @property
+    def local(self) -> str | None:
+        """The local version segment of the version.
+
+        >>> print(Version("1.2.3").local)
+        None
+        >>> Version("1.2.3+abc").local
+        'abc'
+        """
+        if self._version.local:
+            return ".".join(str(x) for x in self._version.local)
+        else:
+            return None
+
+    @property
+    def public(self) -> str:
+        """The public portion of the version.
+
+        >>> Version("1.2.3").public
+        '1.2.3'
+        >>> Version("1.2.3+abc").public
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").public
+        '1!1.2.3.dev1'
+        """
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self) -> str:
+        """The "base version" of the version.
+
+        >>> Version("1.2.3").base_version
+        '1.2.3'
+        >>> Version("1.2.3+abc").base_version
+        '1.2.3'
+        >>> Version("1!1.2.3dev1+abc").base_version
+        '1!1.2.3'
+
+        The "base version" is the public version of the project without any pre or post
+        release markers.
+        """
+        parts = []
+
+        # Epoch
+        if self.epoch != 0:
+            parts.append(f"{self.epoch}!")
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self.release))
+
+        return "".join(parts)
+
+    @property
+    def is_prerelease(self) -> bool:
+        """Whether this version is a pre-release.
+
+        >>> Version("1.2.3").is_prerelease
+        False
+        >>> Version("1.2.3a1").is_prerelease
+        True
+        >>> Version("1.2.3b1").is_prerelease
+        True
+        >>> Version("1.2.3rc1").is_prerelease
+        True
+        >>> Version("1.2.3dev1").is_prerelease
+        True
+        """
+        return self.dev is not None or self.pre is not None
+
+    @property
+    def is_postrelease(self) -> bool:
+        """Whether this version is a post-release.
+
+        >>> Version("1.2.3").is_postrelease
+        False
+        >>> Version("1.2.3.post1").is_postrelease
+        True
+        """
+        return self.post is not None
+
+    @property
+    def is_devrelease(self) -> bool:
+        """Whether this version is a development release.
+
+        >>> Version("1.2.3").is_devrelease
+        False
+        >>> Version("1.2.3.dev1").is_devrelease
+        True
+        """
+        return self.dev is not None
+
+    @property
+    def major(self) -> int:
+        """The first item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").major
+        1
+        """
+        return self.release[0] if len(self.release) >= 1 else 0
+
+    @property
+    def minor(self) -> int:
+        """The second item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").minor
+        2
+        >>> Version("1").minor
+        0
+        """
+        return self.release[1] if len(self.release) >= 2 else 0
+
+    @property
+    def micro(self) -> int:
+        """The third item of :attr:`release` or ``0`` if unavailable.
+
+        >>> Version("1.2.3").micro
+        3
+        >>> Version("1").micro
+        0
+        """
+        return self.release[2] if len(self.release) >= 3 else 0
+
+
+class _TrimmedRelease(Version):
+    @property
+    def release(self) -> tuple[int, ...]:
+        """
+        Release segment without any trailing zeros.
+
+        >>> _TrimmedRelease('1.0.0').release
+        (1,)
+        >>> _TrimmedRelease('0.0').release
+        (0,)
+        """
+        rel = super().release
+        nonzeros = (index for index, val in enumerate(rel) if val)
+        last_nonzero = max(nonzeros, default=0)
+        return rel[: last_nonzero + 1]
+
+
+def _parse_letter_version(
+    letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+
+    assert not letter
+    if number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+    return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str | None) -> LocalType | None:
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_separators.split(local)
+        )
+    return None
+
+
+def _cmpkey(
+    epoch: int,
+    release: tuple[int, ...],
+    pre: tuple[str, int] | None,
+    post: tuple[str, int] | None,
+    dev: tuple[str, int] | None,
+    local: LocalType | None,
+) -> CmpKey:
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    _release = tuple(
+        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        _pre: CmpPrePostDevType = NegativeInfinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        _pre = Infinity
+    else:
+        _pre = pre
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        _post: CmpPrePostDevType = NegativeInfinity
+
+    else:
+        _post = post
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        _dev: CmpPrePostDevType = Infinity
+
+    else:
+        _dev = dev
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        _local: CmpLocalType = NegativeInfinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        _local = tuple(
+            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+        )
+
+    return epoch, _release, _pre, _post, _dev, _local
diff --git a/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/INSTALLER b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+uv
\ No newline at end of file
diff --git a/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/LICENSE b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..754db5afcb8260c5539b1e0ede3990b8ddbc3e29
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/LICENSE
@@ -0,0 +1,20 @@
+ISC LICENSE
+
+    This license is approved by the OSI and FSF as GPL-compatible.
+        http://opensource.org/licenses/isc-license.txt
+
+    Copyright (c) 2013-2014, Pexpect development team
+    Copyright (c) 2012, Noah Spurrier 
+
+    Permission to use, copy, modify, and/or distribute this software for any
+    purpose with or without fee is hereby granted, provided that the above
+    copyright notice and this permission notice appear in all copies.
+    
+    THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+    WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+    MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+    ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+    WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+    ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+    OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
diff --git a/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/METADATA b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..c5ea4a1a59ae2f7d7d1bdb4efcc4c3d40caeb85e
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/METADATA
@@ -0,0 +1,52 @@
+Metadata-Version: 2.1
+Name: pexpect
+Version: 4.9.0
+Summary: Pexpect allows easy control of interactive console applications.
+Home-page: https://pexpect.readthedocs.io/
+Author: Noah Spurrier; Thomas Kluyver; Jeff Quast
+Author-email: noah@noah.org, thomas@kluyver.me.uk, contact@jeffquast.com
+License: ISC license
+Project-URL: Bug Tracker, https://github.com/pexpect/pexpect/issues
+Project-URL: Documentation, https://pexpect.readthedocs.io/
+Project-URL: Source Code, https://github.com/pexpect/pexpect
+Project-URL: History, https://pexpect.readthedocs.io/en/stable/history.html
+Platform: UNIX
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: ISC License (ISCL)
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Software Development
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Software Development :: Quality Assurance
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: System
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Installation/Setup
+Classifier: Topic :: System :: Shells
+Classifier: Topic :: System :: Software Distribution
+Classifier: Topic :: Terminals
+License-File: LICENSE
+Requires-Dist: ptyprocess (>=0.5)
+
+
+Pexpect is a pure Python module for spawning child applications; controlling
+them; and responding to expected patterns in their output. Pexpect works like
+Don Libes' Expect. Pexpect allows your script to spawn a child application and
+control it as if a human were typing commands.
+
+Pexpect can be used for automating interactive applications such as ssh, ftp,
+passwd, telnet, etc. It can be used to automate setup scripts for duplicating
+software package installations on different servers. It can be used for
+automated software testing. Pexpect is in the spirit of Don Libes' Expect, but
+Pexpect is pure Python.
+
+The main features of Pexpect require the pty module in the Python standard
+library, which is only available on Unix-like systems. Some features—waiting
+for patterns from file descriptors or subprocesses—are also available on
+Windows.
diff --git a/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/RECORD b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..6532a75d504d58551d8836efee212b8799786004
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pexpect-4.9.0.dist-info/RECORD
@@ -0,0 +1,26 @@
+pexpect-4.9.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
+pexpect-4.9.0.dist-info/LICENSE,sha256=Skg64cTcc4psi3P-tJB04YNdoCq1qmhvJnUCmQb6Nk0,987
+pexpect-4.9.0.dist-info/METADATA,sha256=LXxaxf3v3koBlOxSL7zAynDpnhoqlW7qREFo-FkWUKo,2463
+pexpect-4.9.0.dist-info/RECORD,,
+pexpect-4.9.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pexpect-4.9.0.dist-info/WHEEL,sha256=bb2Ot9scclHKMOLDEHY6B2sicWOgugjFKaJsT7vwMQo,110
+pexpect-4.9.0.dist-info/top_level.txt,sha256=O-b3UY9VQZkW3yDAeFNatUOKO4GojVWO4TTHoI9-E7k,8
+pexpect/ANSI.py,sha256=aA-3tdXz_FZ4G7PAqFZi5g1KBGQ6PzJzS0gm3ALZKZw,12177
+pexpect/FSM.py,sha256=tluiyUGMyIH3q_wLG6Ak1NZVuXUAGNDjq6k6BK1q8RY,13419
+pexpect/__init__.py,sha256=SuQYzpVxpzqLwZ1f68ov5Tvcy8Qv_eD1_NyuSIDibaU,4089
+pexpect/_async.py,sha256=kNQ073RymFLQgpmNzseyIC50jhBsLtoxsEW-7TRrxys,907
+pexpect/_async_pre_await.py,sha256=vkv3IAhRhVKzxvXydnl7Q7m9MjVC24E0OuveVsKoYRs,3465
+pexpect/_async_w_await.py,sha256=3QtNwnRSxz5L6pCEA8-aYTzSx647cmALPYwk8A_fTt0,3802
+pexpect/bashrc.sh,sha256=uPuvSvtNmi1ZYW1--6L5deebMjfUEq_LE3-A25zwDyo,419
+pexpect/exceptions.py,sha256=A9C1PWbBc2j9AKvnv7UkPCawhFTEGYmeULW0vwbMvXQ,1068
+pexpect/expect.py,sha256=KKtBmx2MYa-yDE715XlHUcloKe5ndBD359a4OYVXD84,13827
+pexpect/fdpexpect.py,sha256=zdSiPvZlBSuqMk_BNBcpXWb3kGJk6MIVI-1NWZv2lU4,5991
+pexpect/popen_spawn.py,sha256=bxLlZLG8BBbRFmv3YLeR38g2ZxoLRIhE_-RJt8dorgE,6159
+pexpect/pty_spawn.py,sha256=ZygSYsdnVJ5acxiNM9gLvLrT2AVqgwJvbDcPaTxxv9E,37382
+pexpect/pxssh.py,sha256=808mJbdWlDocew12LrvNHz9-ba_a1ZfwB8XFRrcest4,24445
+pexpect/replwrap.py,sha256=_I5PDohJ5qWWiXzO9pS32kfcVllJyungubrn5_ATcVY,5951
+pexpect/run.py,sha256=PkTA_IGOHzLVK89tVIzpLJ7I2jANN5o7GDcH5k1Ly8c,6629
+pexpect/screen.py,sha256=-twD4sIEp83nzuYH9lRDzwHfesoTgVGWglsBYWOK7Ks,13704
+pexpect/socket_pexpect.py,sha256=SV1strRUbcplsL-VbMXgL3sn_q4TB_wRFzGyMzxjzRY,4814
+pexpect/spawnbase.py,sha256=SThkQQ25wiSA-7uqveL3hbkS8FnLFAfk-iXgX2OtOY8,21685
+pexpect/utils.py,sha256=1jIhzU7eBvY3pbW3LZoJhCOU2KWqgty5HgQ6VBYIp5U,6019
diff --git a/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/INSTALLER b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+uv
\ No newline at end of file
diff --git a/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/METADATA b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..8671c6d8f8260954cc0e7df87560bd70a2934343
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/METADATA
@@ -0,0 +1,58 @@
+Metadata-Version: 2.4
+Name: Pygments
+Version: 2.19.1
+Summary: Pygments is a syntax highlighting package written in Python.
+Project-URL: Homepage, https://pygments.org
+Project-URL: Documentation, https://pygments.org/docs
+Project-URL: Source, https://github.com/pygments/pygments
+Project-URL: Bug Tracker, https://github.com/pygments/pygments/issues
+Project-URL: Changelog, https://github.com/pygments/pygments/blob/master/CHANGES
+Author-email: Georg Brandl 
+Maintainer: Matthäus G. Chajdas
+Maintainer-email: Georg Brandl , Jean Abou Samra 
+License: BSD-2-Clause
+License-File: AUTHORS
+License-File: LICENSE
+Keywords: syntax highlighting
+Classifier: Development Status :: 6 - Mature
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: End Users/Desktop
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Text Processing :: Filters
+Classifier: Topic :: Utilities
+Requires-Python: >=3.8
+Provides-Extra: plugins
+Provides-Extra: windows-terminal
+Requires-Dist: colorama>=0.4.6; extra == 'windows-terminal'
+Description-Content-Type: text/x-rst
+
+Pygments
+~~~~~~~~
+
+Pygments is a syntax highlighting package written in Python.
+
+It is a generic syntax highlighter suitable for use in code hosting, forums,
+wikis or other applications that need to prettify source code.  Highlights
+are:
+
+* a wide range of over 500 languages and other text formats is supported
+* special attention is paid to details, increasing quality by a fair amount
+* support for new languages and formats are added easily
+* a number of output formats, presently HTML, LaTeX, RTF, SVG, all image
+  formats that PIL supports and ANSI sequences
+* it is usable as a command-line tool and as a library
+
+Copyright 2006-2025 by the Pygments team, see ``AUTHORS``.
+Licensed under the BSD, see ``LICENSE`` for details.
diff --git a/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/RECORD b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..287ab514a2c0b32c788869ece3dc0eeadc8bde8c
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/RECORD
@@ -0,0 +1,346 @@
+../../../bin/pygmentize,sha256=4mKyPWv0yd_PSso-ZKqAWWI8a-d3GoD4wLy1_kS6wck,343
+pygments-2.19.1.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
+pygments-2.19.1.dist-info/METADATA,sha256=QGjhRksrfp_I-eQNvdNKViqR538H_0l-x473HqO-rxM,2512
+pygments-2.19.1.dist-info/RECORD,,
+pygments-2.19.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pygments-2.19.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
+pygments-2.19.1.dist-info/entry_points.txt,sha256=uUXw-XhMKBEX4pWcCtpuTTnPhL3h7OEE2jWi51VQsa8,53
+pygments-2.19.1.dist-info/licenses/AUTHORS,sha256=BmDjGKbyFYAq3Icxq4XQxl_yfPzKP10oWX8wZHYZW9k,10824
+pygments-2.19.1.dist-info/licenses/LICENSE,sha256=qdZvHVJt8C4p3Oc0NtNOVuhjL0bCdbvf_HBWnogvnxc,1331
+pygments/__init__.py,sha256=H0XmVJ1Fe9qSEDBU1j1wZLT-Tfu7blFbDiFSfpwU63c,2959
+pygments/__main__.py,sha256=p8AJyoyCOMYGvzWHdnq0_A9qaaVqaj02nIu3xhJp1_4,348
+pygments/cmdline.py,sha256=4pL9Kpn2PUEKPobgrsQgg-vCx2NjsrapKzQ6LxQR7Q0,23536
+pygments/console.py,sha256=AagDWqwea2yBWf10KC9ptBgMpMjxKp8yABAmh-NQOVk,1718
+pygments/filter.py,sha256=YLtpTnZiu07nY3oK9nfR6E9Y1FBHhP5PX8gvkJWcfag,1910
+pygments/filters/__init__.py,sha256=B00KqPCQh5E0XhzaDK74Qa1E4fDSTlD6b0Pvr1v-vEQ,40344
+pygments/formatter.py,sha256=H_4J-moKkKfRWUOW9J0u7hhw6n1LiO-2Xu1q2B0sE5w,4366
+pygments/formatters/__init__.py,sha256=7OuvmoYLyoPzoOQV_brHG8GSKYB_wjFSkAQng6x2y9g,5349
+pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176
+pygments/formatters/bbcode.py,sha256=s0Ka35OKuIchoSgEAGf6rj0rl2a9ym9L31JVNSRbZFQ,3296
+pygments/formatters/groff.py,sha256=pLcIHj4jJS_lRAVFnyJODKDu1Xlyl9_AEIdOtbl3DT0,5082
+pygments/formatters/html.py,sha256=FrHJ69FUliEyPY0zTfab0C1gPf7LXsKgeRlhwkniqIs,35953
+pygments/formatters/img.py,sha256=aRpFo8mBmWTL3sBUjRCWkeS3rc6FZrSFC4EksDrl53g,23301
+pygments/formatters/irc.py,sha256=R0Js0TYWySlI2yE9sW6tN4d4X-x3k9ZmudsijGPnLmU,4945
+pygments/formatters/latex.py,sha256=BRYtbLeW_YD1kwhhnFInhJIKylurnri8CF1lP069KWE,19258
+pygments/formatters/other.py,sha256=8pYW27sU_7XicLUqOEt2yWSO0h1IEUM3TIv34KODLwo,4986
+pygments/formatters/pangomarkup.py,sha256=pcFvEC7K1Me0EjGeOZth4oCnEY85bfqc77XzZASEPpY,2206
+pygments/formatters/rtf.py,sha256=kcKMCxTXu-2-hpgEftlGJRm7Ss-yA_Sy8OsHH_qzykA,11921
+pygments/formatters/svg.py,sha256=R6A2ME6JsMQWFiyn8wcKwFUOD6vsu-HLwiIztLu-77E,7138
+pygments/formatters/terminal.py,sha256=J_F_dFXwR9LHWvatIDnwqRYJyjVmSo1Zx8K_XDh6SyM,4626
+pygments/formatters/terminal256.py,sha256=7GQFLE5cfmeu53CAzANO74-kBk2BFkXfn5phmZjYkhM,11717
+pygments/lexer.py,sha256=ib-F_0GxHkwGpb6vWP0DeLMLc7EYgjo3hWFKN5IgOq0,35109
+pygments/lexers/__init__.py,sha256=6YhzxGKlWk38P6JpIJUQ1rVvV0DEZjEmdYsdMQ58hSk,12067
+pygments/lexers/_ada_builtins.py,sha256=CA_OnShtdc7wWh9oYcRlcrkDAQwYUKl6w7tdSbALQd4,1543
+pygments/lexers/_asy_builtins.py,sha256=cd9M00YH19w5ZL7aqucmC3nwpJGTS04U-01NLy5E2_4,27287
+pygments/lexers/_cl_builtins.py,sha256=kQeUIyZjP4kX0frkICDcKxBYQCLqzIDXa5WV5cevhDo,13994
+pygments/lexers/_cocoa_builtins.py,sha256=Ka1lLJe7JfWtdho4IFIB82X9yBvrbfHCCmEG-peXXhQ,105173
+pygments/lexers/_csound_builtins.py,sha256=qnQYKeI26ZHim316uqy_hDiRiCoHo2RHjD3sYBALyXs,18414
+pygments/lexers/_css_builtins.py,sha256=aD-dhLFXVd1Atn_bZd7gEdQn7Mhe60_VHpvZ340WzDI,12446
+pygments/lexers/_googlesql_builtins.py,sha256=IkrOk-T2v1yzbGzUEEQh5_Cf4uC_cmL_uuhwDpZlTug,16132
+pygments/lexers/_julia_builtins.py,sha256=N2WdSw5zgI2fhDat_i4YeVqurRTC_P8x71ez00SCN6U,11883
+pygments/lexers/_lasso_builtins.py,sha256=8q1gbsrMJeaeUhxIYKhaOxC9j_B-NBpq_XFj2Ze41X0,134510
+pygments/lexers/_lilypond_builtins.py,sha256=XTbGL1z1oKMoqWLEktG33jx5GdGTI9CpeO5NheEi4Y0,108094
+pygments/lexers/_lua_builtins.py,sha256=PhFdZV5-Tzz2j_q4lvG9lr84ELGfL41BhnrSDNNTaG4,8108
+pygments/lexers/_luau_builtins.py,sha256=-IDrU04kUVfjXwSQzMMpXmMYhNsQxZVVZk8cuAA0Lo0,955
+pygments/lexers/_mapping.py,sha256=9fv7xYOUAOr6LzfdFS4MDbPu78o4OQQH-2nsI1bNZf4,70438
+pygments/lexers/_mql_builtins.py,sha256=ybRQjlb7Cul0sDstnzxJl3h0qS6Ieqsr811fqrxyumU,24713
+pygments/lexers/_mysql_builtins.py,sha256=y0kAWZVAs0z2dTFJJV42OZpILgRnd8T3zSlBFv-g_oA,25838
+pygments/lexers/_openedge_builtins.py,sha256=Sz4j9-CPWIaxMa-2fZgY66j7igcu1ob1GR2UtI8zAkg,49398
+pygments/lexers/_php_builtins.py,sha256=Jd4BZpjMDELPi4EVoSxK1-8BFTc63HUwYfm1rLrGj0M,107922
+pygments/lexers/_postgres_builtins.py,sha256=Pqh4z0RBRbnW6rCQtWUdzWCJxNyqpJ7_0HOktxHDxk4,13343
+pygments/lexers/_qlik_builtins.py,sha256=xuJy9c9uZDXv6h8z582P5PrxqkxTZ_nS8gPl9OD9VN8,12595
+pygments/lexers/_scheme_builtins.py,sha256=2hNtJOJmP21lUsikpqMJ2gAmLT3Rwn_KEeqhXwCjgfk,32564
+pygments/lexers/_scilab_builtins.py,sha256=oZYPB1XPdIEz3pII11pFDe6extRRyWGA7pY06X8KZ8w,52411
+pygments/lexers/_sourcemod_builtins.py,sha256=H8AFLsNDdEpymIWOpDwbDJGCP1w-x-1gSlzPDioMF4o,26777
+pygments/lexers/_stan_builtins.py,sha256=dwi1hllM_NsaCv-aXJy7lEi57X5Hh5gSD97aCQyT9KM,13445
+pygments/lexers/_stata_builtins.py,sha256=Hqrr6j77zWU3cGGpBPohwexZci43YA4_sVYE4E1sNow,27227
+pygments/lexers/_tsql_builtins.py,sha256=Pi2RhTXcLE3glI9oxNhyVsOMn-fK_1TRxJ-EsYP5LcI,15460
+pygments/lexers/_usd_builtins.py,sha256=c9hbU1cwqBUCFIhNfu_Dob8ywv1rlPhi9w2OTj3kR8s,1658
+pygments/lexers/_vbscript_builtins.py,sha256=MqJ2ABywD21aSRtWYZRG64CCbGstC1kfsiHGJmZzxiw,4225
+pygments/lexers/_vim_builtins.py,sha256=bA4mH8t1mPPQfEiUCKEqRO1O0rL2DUG0Ux1Bt8ZSu0E,57066
+pygments/lexers/actionscript.py,sha256=JBngCe5UhYT_0dLD2j7PnPO0xRRJhmypEuQ-C5in8pY,11727
+pygments/lexers/ada.py,sha256=58k5ra1vGS4iLpW3h1ItY9ftzF3WevaeAAXzAYTiYkQ,5353
+pygments/lexers/agile.py,sha256=DN-7AVIqtG1MshA94rtSGYI_884hVHgzq405wD0_dl8,896
+pygments/lexers/algebra.py,sha256=yGTu9Tt-cQzAISQYIC5MS5a3z4QmL-tGcXnd_pkWGbk,9952
+pygments/lexers/ambient.py,sha256=UnzKpIlfSm3iitHvMd7XTMSY8TjZYYhKOC3AiARS_cE,2605
+pygments/lexers/amdgpu.py,sha256=S8qjn2UMLhBFm3Yn_c06XAGf8cl5x_ZeluelWG_-JAw,1723
+pygments/lexers/ampl.py,sha256=ZBRfDXm760gR1a1gqItnsHuoO3JdUcTBjJ5tFY9UtPA,4176
+pygments/lexers/apdlexer.py,sha256=Zr5-jgjxC8PKzRlEeclakZXPHci7FHBZghQ6wwiuT7A,30800
+pygments/lexers/apl.py,sha256=PTQMp-bxT5P-DbrEvFha10HBTcsDJ5srL3I1s9ljz58,3404
+pygments/lexers/archetype.py,sha256=pQVlP1Fb5OA8nn7QwmFaaaOSvvpoIsQVw43FVCQCve4,11538
+pygments/lexers/arrow.py,sha256=2PKdbWq3xQLF1KoDbWvSxpjwKRrznnDiArTflRGZzBo,3564
+pygments/lexers/arturo.py,sha256=U5MtRNHJtnBn4ZOeWmW6MKlVRG7SX6KhTRamDqzn9tA,11414
+pygments/lexers/asc.py,sha256=-DgZl9jccBDHPlDmjCsrEqx0-Q7ap7XVdNKtxLNWG1w,1693
+pygments/lexers/asm.py,sha256=xm2Y5mcT-sF3oQvair4SWs9EWTyndoaUoSsDy5v6shI,41967
+pygments/lexers/asn1.py,sha256=BlcloIX2bu6Q7BxGcksuhYFHGsXLVKyB4B9mFd4Pj6E,4262
+pygments/lexers/automation.py,sha256=Q61qon8EwpfakMh_2MS2E2zUUT16rG3UNIKPYjITeTs,19831
+pygments/lexers/bare.py,sha256=tWoei86JJX1k-ADhaXd5TgX6ItDTici9yFWpkTPhnfM,3020
+pygments/lexers/basic.py,sha256=qpVe5h8Fa7NJo1EihN-4R_UZpHO6my2Ssgkb-BktkKs,27989
+pygments/lexers/bdd.py,sha256=yysefcOFAEyk9kJ2y4EXmzJTecgLYUHlWixt_3YzPMU,1641
+pygments/lexers/berry.py,sha256=zxGowFb8HMIyN15-m8nmWnW6bPRR4esKtSEVugc9uXM,3209
+pygments/lexers/bibtex.py,sha256=yuNoPxwrJf9DCGUT17hxfDzbq_HtCLkQkRbBtiTVmeQ,4811
+pygments/lexers/blueprint.py,sha256=NzvWHMxCLDWt8hc6gB5jokltxVJgNa7Jwh4c61ng388,6188
+pygments/lexers/boa.py,sha256=dOot1XWNZThPIio2UyAX67K6EpISjSRCFjotD7dcnwE,3921
+pygments/lexers/bqn.py,sha256=nJiwrPKKbRF-qdai5tfqipwBkkko2P3weiZAjHUMimY,3671
+pygments/lexers/business.py,sha256=lRtekOJfsDkb12AGbuz10-G67OJrVJgCBtihTQ8_aoY,28345
+pygments/lexers/c_cpp.py,sha256=D7ZIswaHASlGBgoTlwnSqTQHf8_JyvvSt2L2q1W-F6g,18059
+pygments/lexers/c_like.py,sha256=FTGp17ds6X2rDZOHup2hH6BEn3gKK4nLm9pydNEhm0E,32021
+pygments/lexers/capnproto.py,sha256=XQJAh1WS-0ulqbTn9TdzR6gEgWLcuBqb4sj3jNsrhsY,2174
+pygments/lexers/carbon.py,sha256=av12YuTGZGpOa1Cmxp3lppx3LfSJUWbvOu0ixmUVll0,3211
+pygments/lexers/cddl.py,sha256=MKa70IwABgjBjYu15_Q9v8rsu2sr1a-i2jkiaPTI6sM,5076
+pygments/lexers/chapel.py,sha256=0n_fL3ehLC4pw4YKnmq9jxIXOJcxGPka1Wr1t1zsXPc,5156
+pygments/lexers/clean.py,sha256=dkDPAwF5BTALPeuKFoRKOSD3RfsKcGWbaRo6_G8LHng,6418
+pygments/lexers/codeql.py,sha256=ebvghn2zbrnETV4buVozMDmRCVKSdGiIN8ycLlHpGsE,2576
+pygments/lexers/comal.py,sha256=TC3NzcJ58ew5jw7qwK0kJ-okTA47psZje0yAIS39HR4,3179
+pygments/lexers/compiled.py,sha256=Slfo1sjWqcPawUwf0dIIZLBCL5pkOIoAX2S8Lxs02Mc,1426
+pygments/lexers/configs.py,sha256=wW8pY0Sa5a10pnAeTLGf48HhixQTVageIyHEf1aYMCc,50913
+pygments/lexers/console.py,sha256=-jAG120dupvV3kG3zC70brLJvSLwTFqMubBQuj_GVnU,4180
+pygments/lexers/cplint.py,sha256=DkbyE5EKydLgf6BRr1FhQrK-IeQPL7Zmjk0DVdlRFnQ,1389
+pygments/lexers/crystal.py,sha256=xU-RnpIkpjrquoxtOuOcP8fcesSJl4xhU7kO9m42LZY,15754
+pygments/lexers/csound.py,sha256=ioSw4Q04wdwjUAbnTZ1qLhUq1vxdWFxhh3QtEl5RAJc,16998
+pygments/lexers/css.py,sha256=JN1RBYsee-jrpHWrSmhN3TKc4TkOBn-_BEGpgTCzcqE,25376
+pygments/lexers/d.py,sha256=piOy0EJeiAwPHugiM3gVv0z7HNh3u2gZQoCUSASRbY4,9920
+pygments/lexers/dalvik.py,sha256=deFg2JPBktJ9mEGb9EgxNkmd6vaMjJFQVzUHo8NKIa8,4606
+pygments/lexers/data.py,sha256=o0x0SmB5ms_CPUPljEEEenOON4IQWn86DkwFjkJYCOg,27026
+pygments/lexers/dax.py,sha256=ASi73qmr7OA7cVZXF2GTYGt01Ly1vY8CgD_Pnpm8k-4,8098
+pygments/lexers/devicetree.py,sha256=RecSQCidt8DRE1QFCPUbwwR0hiRlNtsFihdGldeUn3k,4019
+pygments/lexers/diff.py,sha256=F6vxZ64wm5Nag_97de1H_3F700ZwCVnYjKvtT5jilww,5382
+pygments/lexers/dns.py,sha256=Hh5hJ7MXfrq36KgfyIRwK3X8o1LdR98IKERcV4eZ7HY,3891
+pygments/lexers/dotnet.py,sha256=NDE0kOmpe96GLO-zwNLazmj77E9ORGmKpa4ZMCXDXxQ,39441
+pygments/lexers/dsls.py,sha256=GnHKhGL5GxsRFnqC7-65NTPZLOZdmnllNrGP86x_fQE,36746
+pygments/lexers/dylan.py,sha256=7zZ1EbHWXeVHqTD36AqykKqo3fhuIh4sM-whcxUaH_Y,10409
+pygments/lexers/ecl.py,sha256=vhmpa2LBrHxsPkYcf3kPZ1ItVaLRDTebi186wY0xGZA,6371
+pygments/lexers/eiffel.py,sha256=5ydYIEFcgcMoEj4BlK31hZ0aJb8OX0RdAvuCNdlxwqw,2690
+pygments/lexers/elm.py,sha256=uRCddU8jK5vVkH6Y66y8KOsDJprIfrOgeYq3hv1PxAM,3152
+pygments/lexers/elpi.py,sha256=O9j_WKBPyvNFjCRuPciVpW4etVSnILm_T79BhCPZYmo,6877
+pygments/lexers/email.py,sha256=ZZL6yvwCRl1CEQyysuOu0lbabp5tjMutS7f3efFKGR4,4804
+pygments/lexers/erlang.py,sha256=bU11eVHvooLwmVknzN6Xkb2DMk7HbenqdNlYSzhThDM,19147
+pygments/lexers/esoteric.py,sha256=Jfp8UUKyKYsqLaqXRZT3GSM9dzkF65zduwfnH1GoGhU,10500
+pygments/lexers/ezhil.py,sha256=22r-xjvvBVpExTqCI-HycAwunDb1p5gY4tIfDmM0vDw,3272
+pygments/lexers/factor.py,sha256=urZ4En4uKFCLXdEkXLWg9EYUFGHQTTDCwNXtyq-ngok,19530
+pygments/lexers/fantom.py,sha256=JJ13-NwykD-iIESnuzCefCYeQDO95cHMJA8TasF4gHA,10231
+pygments/lexers/felix.py,sha256=F-v0si4zPtRelqzDQWXI1-tarCE-BvawziODxRU7378,9655
+pygments/lexers/fift.py,sha256=rOCwp3v5ocK5YOWvt7Td3Md--97_8e-7Sonx52uS8mA,1644
+pygments/lexers/floscript.py,sha256=aHh82k52jMuDuzl9LatrcSANJiXTCyjGU3SO53bwbb0,2667
+pygments/lexers/forth.py,sha256=ZMtsHdNbnS_0IdSYlfAlfTSPEr0MEsRo-YZriQNueTQ,7193
+pygments/lexers/fortran.py,sha256=1PE5dTxf4Df6LUeXFcmNtyeXWsC8tSiK5dYwPHIJeeQ,10382
+pygments/lexers/foxpro.py,sha256=CBkW62Fuibz3yfyelZCaEO8GGdFJWsuRhqwtsSeBwLM,26295
+pygments/lexers/freefem.py,sha256=LFBQk-m1-nNCgrl-VDH3QwnVWurvb7W29i06LoT207A,26913
+pygments/lexers/func.py,sha256=OR2rkM7gf9fKvad5WcFQln-_U_pb-RUCM9eQatToF4A,3700
+pygments/lexers/functional.py,sha256=fYT2AGZ642cRkIAId0rnXFBsx1c8LLEDRN_VuCEkUyM,693
+pygments/lexers/futhark.py,sha256=Vf1i4t-tR3zqaktVjhTzFNg_ts_9CcyA4ZDfDizbCmk,3743
+pygments/lexers/gcodelexer.py,sha256=4Xs9ax4-JZGupW_qSnHon39wQGpb-tNA3xorMKg841E,874
+pygments/lexers/gdscript.py,sha256=Ws7JKxy0M0IyZ_1iMfRvJPrizEwmeCNLDoeMIFaM-CU,7566
+pygments/lexers/gleam.py,sha256=XIlTcq6cB743pCqbNYo8PocSkjZyDPR6hHgdaJNJ1Vc,2392
+pygments/lexers/go.py,sha256=4LezefgyuqZWHzLZHieUkKTi-ssY6aHJxx7Z-LFaLK0,3783
+pygments/lexers/grammar_notation.py,sha256=LvzhRQHgwZzq9oceukZS_hwnKK58ee7Z5d0cwXOR734,8043
+pygments/lexers/graph.py,sha256=WFqoPA1c_hHYrV0i_F7-eUw3Co4_HmZY3GJ-TyDr670,4108
+pygments/lexers/graphics.py,sha256=tmF9NNALnvPnax8ywYC3pLOla45YXtp9UA0H-5EiTQY,39145
+pygments/lexers/graphql.py,sha256=O_zcrGrBaDaKTlUoJGRruxqk7CJi-NR92Y0Cs-KkCvw,5601
+pygments/lexers/graphviz.py,sha256=mzdXOMpwz9_V-be1eTAMyhkKCBl6UxCIXuq6C2yrtsw,1934
+pygments/lexers/gsql.py,sha256=VPZk9sb26-DumRkWfEaSTeoc0lx5xt5n-6eDDLezMtc,3990
+pygments/lexers/hare.py,sha256=PGCOuILktJsmtTpCZZKkMFtObfJuBpei8HM8HHuq1Tw,2649
+pygments/lexers/haskell.py,sha256=MYr74-PAC8kGJRX-dZmvZsHTc7a2u6yFS2B19LfDD7g,33262
+pygments/lexers/haxe.py,sha256=WHCy_nrXHnfLITfbdp3Ji3lqQU4HAsTUpXsLCp2_4sk,30974
+pygments/lexers/hdl.py,sha256=MOWxhmAuE4Ei0CKDqqaON7T8tl43geancrNYM136Z0U,22738
+pygments/lexers/hexdump.py,sha256=1lj9oJ-KiZXSVYvTMfGmEAQzNEW08WlMcC2I5aYvHK4,3653
+pygments/lexers/html.py,sha256=MxYTI4EeT7QxoGleCAyQq-8n_Sgly6tD95H5zanCNmk,21977
+pygments/lexers/idl.py,sha256=rcihUAGhfuGEaSW6pgFq6NzplT_pv0DagUoefg4zAmk,15449
+pygments/lexers/igor.py,sha256=wVefbUjb3ftaW3LCKGtX1JgLgiY4EmRor5gVOn8vQA8,31633
+pygments/lexers/inferno.py,sha256=ChE_5y5SLH_75Uv7D2dKWQMk2dlN6z1gY1IDjlJZ8rU,3135
+pygments/lexers/installers.py,sha256=ZHliit4Pxz1tYKOIjKkDXI5djTkpzYUMVIPR1xvUrL8,14435
+pygments/lexers/int_fiction.py,sha256=0ZzIa1sZDUQsltd1oHuS-BoNiOF8zKQfcVuDyK1Ttv8,56544
+pygments/lexers/iolang.py,sha256=L6dNDCLH0kxkIUi00fI4Z14QnRu79UcNDrgv02c5Zw8,1905
+pygments/lexers/j.py,sha256=DqNdwQGFLiZW3mCNLRg81gpmsy4Hgcai_9NP3LbWhNU,4853
+pygments/lexers/javascript.py,sha256=TGKQLSrCprCKfhLLGAq_0EOdvqvJKX9pOdKo7tCRurQ,63243
+pygments/lexers/jmespath.py,sha256=R5yA5LJ2nTIaDwnFIpSNGAThd0sAYFccwawA9xBptlg,2082
+pygments/lexers/jslt.py,sha256=OeYQf8O2_9FCaf9W6Q3a7rPdAFLthePCtVSgCrOTcl8,3700
+pygments/lexers/json5.py,sha256=8JZbc8EiTEZdKaIdQg3hXEh0mHWSzPlwd473a0nUuT0,2502
+pygments/lexers/jsonnet.py,sha256=bx2G6J4tJqGrJV1PyZrIWzWHXcoefCX-4lIxxtbn2gw,5636
+pygments/lexers/jsx.py,sha256=wGsoGSB40qAJrVfXwRPtan7OcK0O87RVsHHk0m6gogk,2693
+pygments/lexers/julia.py,sha256=0ZDJ9X83V5GqJzA6T6p0TTN8WHy2JAjvu-FSBXvfXdc,11710
+pygments/lexers/jvm.py,sha256=Yt1iQ3QodXRY-x_HUOGedhyuBBHn5jYH-I8NzOzHTlE,72667
+pygments/lexers/kuin.py,sha256=3dKKJVJlskgrvMKv2tY9NOsFfDjyo-3MLcJ1lFKdXSg,11405
+pygments/lexers/kusto.py,sha256=kaxkoPpEBDsBTCvCOkZZx7oGfv0jk_UNIRIRbfVAsBE,3477
+pygments/lexers/ldap.py,sha256=77vF4t_19x9V522cxRCM5d3HW8Ne3giYsFsMPVYYBw4,6551
+pygments/lexers/lean.py,sha256=7HWRgxFsxS1N9XKqw0vfKwaxl27s5YiVYtZeRUoTHFo,8570
+pygments/lexers/lilypond.py,sha256=yd2Tuv67um6EyCIr-VwBnlPhTHxMaQsBJ4nGgO5fjIk,9752
+pygments/lexers/lisp.py,sha256=EHUy1g4pzEsYPE-zGj2rAXm3YATE1j9dCQOr5-JPSkU,157668
+pygments/lexers/macaulay2.py,sha256=zkV-vxjQYa0Jj9TGfFP1iMgpTZ4ApQuAAIdJVGWb2is,33366
+pygments/lexers/make.py,sha256=YMI5DBCrxWca-pz9cVXcyfuHLcikPx9R_3pW_98Myqo,7831
+pygments/lexers/maple.py,sha256=Rs0dEmOMD3C1YQPd0mntN-vzReq4XfHegH6xV4lvJWo,7960
+pygments/lexers/markup.py,sha256=zWtxsyIx_1OxQzS6wLe8bEqglePv4RqvJjbia8AvV5c,65088
+pygments/lexers/math.py,sha256=P3ZK1ePd8ZnLdlmHezo2irCA8T2-nlHBoSaBoT5mEVI,695
+pygments/lexers/matlab.py,sha256=F9KO4qowIhfP8oVhCRRzE_1sqg4zmQbsB2NZH193PiM,133027
+pygments/lexers/maxima.py,sha256=a0h9Ggs9JEovTrzbJT-BLVbOqI29yPnaMZlkU5f_FeY,2715
+pygments/lexers/meson.py,sha256=BMrsDo6BH2lzTFw7JDwQ9SDNMTrRkXCNRDVf4aFHdsI,4336
+pygments/lexers/mime.py,sha256=yGrf3h37LK4b6ERBpFiL_qzn3JgOfGR5KLagnbWFl6c,7582
+pygments/lexers/minecraft.py,sha256=Nu88snDDPzM0D-742fFdUriczL-EE911pAd4_I4-pAw,13696
+pygments/lexers/mips.py,sha256=STKiZT67b3QERXXn7XKVxlPBu7vwbPC5EyCpuf3Jfbw,4656
+pygments/lexers/ml.py,sha256=t8sCv4BjvuBq6AihKKUwStEONIgdXCC2RMtO0RopNbM,35390
+pygments/lexers/modeling.py,sha256=M7B58bGB-Zwd1EmPxKqtRvg7TgNCyem3MVUHv0_H2SQ,13683
+pygments/lexers/modula2.py,sha256=NtpXBRoUCeHfflgB39LknSkCwhBHBKv2Er_pinjVsNE,53072
+pygments/lexers/mojo.py,sha256=8JRVoftN1E-W2woG0K-4n8PQXTUM9iY6Sl5sWb2uGNg,24233
+pygments/lexers/monte.py,sha256=baWU6zlXloenw9MO1MtEVGE9i3CfiXAYhqU621MIjRk,6289
+pygments/lexers/mosel.py,sha256=gjRdedhA1jTjoYoM1Gpaoog_I9o7TRbYMHk97N1TXwg,9297
+pygments/lexers/ncl.py,sha256=zJ6ahlitit4S0pBXc7Wu96PB7xOn59MwfR2HdY5_C60,63999
+pygments/lexers/nimrod.py,sha256=Q1NSqEkLC5wWt7xJyKC-vzWw_Iw2SfDNP_pyMFBuIfA,6413
+pygments/lexers/nit.py,sha256=p_hVD8GzMRl3CABVKHtYgnXFUQk0i5F2FbWFA6WXm6s,2725
+pygments/lexers/nix.py,sha256=NOrv20gdq-2A7eZ6c2gElPHv1Xx2pvv20-qOymL9GMg,4421
+pygments/lexers/numbair.py,sha256=fxkp2CXeXWKBMewfi1H4JSYkmm4kU58wZ2Sh9BDYAWQ,1758
+pygments/lexers/oberon.py,sha256=jw403qUUs7zpTHAs5CbLjb8qiuwtxLk0spDIYqGZwAw,4210
+pygments/lexers/objective.py,sha256=Fo1WB3JMj8sNeYnvB84H4_qwhOt4WNJtJWjVEOwrJGk,23297
+pygments/lexers/ooc.py,sha256=kD1XaJZaihDF_s-Vyu1Bx68S_9zFt2rhox7NF8LpOZM,3002
+pygments/lexers/openscad.py,sha256=h9I1k8kiuQmhX5vZm6VDSr2fa5Finy0sN8ZDIE-jx1c,3700
+pygments/lexers/other.py,sha256=WLVyqPsvm9oSXIbZwbfyJloS6HGgoFW5nVTaU1uQpTw,1763
+pygments/lexers/parasail.py,sha256=DWMGhtyQgGTXbIgQl_mID6CKqi-Dhbvs_dTkmvrZXfE,2719
+pygments/lexers/parsers.py,sha256=feNgxroPoWRf0NEsON2mtmKDUfslIQppukw6ndEsQ3M,26596
+pygments/lexers/pascal.py,sha256=N2tRAjlXnTxggAzzk2tOOAVzeC2MBzrXy97_HQl5n44,30989
+pygments/lexers/pawn.py,sha256=LWUYQYsebMMt2d5oxX1HYWvBqbakR1h7Av_z8Vw94Wg,8253
+pygments/lexers/pddl.py,sha256=Mk4_BzlROJCd0xR4KKRRSrbj0F7LLQcBRjmsmtWmrCg,2989
+pygments/lexers/perl.py,sha256=9BXn3tyHMA49NvzbM9E2czSCHjeU7bvaPLUcoZrhz-4,39192
+pygments/lexers/phix.py,sha256=hZqychqo5sFMBDESzDPXg1DYHQe_9sn294UfbjihaFk,23249
+pygments/lexers/php.py,sha256=l4hzQrlm0525i5dSw9Vmjcai3TzbPT6DkjzxPg9l6Zc,13061
+pygments/lexers/pointless.py,sha256=WSDjqQyGrNIGmTCdaMxl4zk7OZTlJAMzeUZ02kfgcTI,1974
+pygments/lexers/pony.py,sha256=EXrMkacqMZblI7v4AvBRQe-3Py8__bx5FOgjCLdfXxQ,3279
+pygments/lexers/praat.py,sha256=4UFK-nbC6WkZBhJgcQqEGqq9CocJkW7AmT_OJQbjWzk,12676
+pygments/lexers/procfile.py,sha256=05W2fyofLTP-FbEdSXD1eles-PPqVNfF6RWXjQdW2us,1155
+pygments/lexers/prolog.py,sha256=9Kc5YNUFqkfWu2sYoyzC3RX65abf1bm7oHr86z1s4kQ,12866
+pygments/lexers/promql.py,sha256=n-0vo-o8-ZasqP3Va4ujs562UfZSLfZF-RzT71yL0Tk,4738
+pygments/lexers/prql.py,sha256=PFReuvhbv4K5aeu6lvDfw4m-3hULkB3r43bKAy948os,8747
+pygments/lexers/ptx.py,sha256=KSHAvbiNVUntKilQ6EPYoLFocmJpRsBy_7fW6_Nrs1Y,4501
+pygments/lexers/python.py,sha256=WZe7fBAHKZ_BxPg8qIU26UGhk8qwUYyENJ3IyPW64mc,53805
+pygments/lexers/q.py,sha256=WQFUh3JrpK2j-VGW_Ytn3uJ5frUNmQIFnLtMVGRA9DI,6936
+pygments/lexers/qlik.py,sha256=2wqwdfIjrAz6RNBsP4MyeLX8Z7QpIGzxtf1CvaOlr_g,3693
+pygments/lexers/qvt.py,sha256=XMBnsWRrvCDf989OuDeb-KpszAkeETiACyaghZeL1ns,6103
+pygments/lexers/r.py,sha256=B6WgrD9SY1UTCV1fQBSlZbezPfpYsARn3FQIHcFYOiM,6474
+pygments/lexers/rdf.py,sha256=qUzxLna9v071bHhZAjdsBi8dKaJNk_h9g1ZRUAYCfoo,16056
+pygments/lexers/rebol.py,sha256=4u3N4kzui55HapopXDu3Kt0jczxDZ4buzwR7Mt4tQiM,18259
+pygments/lexers/rego.py,sha256=Rx5Gphbktr9ojg5DbqlyxHeQqqtF7g8W-oF0rmloDNY,1748
+pygments/lexers/resource.py,sha256=ioEzgWksB5HCjoz85XNkQPSd7n5kL0SZiuPkJP1hunQ,2927
+pygments/lexers/ride.py,sha256=kCWdxuR3PclVi4wiA0uUx4CYEFwuTqoMsKjhSW4X3yg,5035
+pygments/lexers/rita.py,sha256=Mj1QNxx1sWAZYC02kw8piVckaiw9B0MqQtiIiDFH0pA,1127
+pygments/lexers/rnc.py,sha256=g7ZD334PMGUqy_Ij64laSN1vJerwHqVkegfMCa3E-y8,1972
+pygments/lexers/roboconf.py,sha256=HbYuK5CqmQdd63SRY2nle01r7-p7mil0SnoauYDmEOY,2074
+pygments/lexers/robotframework.py,sha256=c4U1B9Q9ITBCTohqJTZOvkfyeVbenN4xhzSWIoZh5eU,18448
+pygments/lexers/ruby.py,sha256=uG617E5abBZcECRCqkhIfc-IbZcRb5cGuUZq_xpax90,22753
+pygments/lexers/rust.py,sha256=ZY-9vtsreBP0NfDd0WCouLSp_9MChAL8U8Abe-m9PB8,8260
+pygments/lexers/sas.py,sha256=C1Uz2s9DU6_s2kL-cB_PAGPtpyK5THlmhNmCumC1l48,9456
+pygments/lexers/savi.py,sha256=jrmruK0GnXktgBTWXW3oN3TXtofn3HBbkMlHnR84cko,4878
+pygments/lexers/scdoc.py,sha256=DXRmFDmYuc7h3gPAAVhfcL1OEbNBK5RdPpJqQzF3ZTk,2524
+pygments/lexers/scripting.py,sha256=jnJv_AFDyDrpzEPGNat5K6xzUBsEAiBr6N2_yLMt8LQ,81724
+pygments/lexers/sgf.py,sha256=w6C513ENaO2YCnqrduK7k03NaMDf-pgygvfzq2NaSRk,1985
+pygments/lexers/shell.py,sha256=dCS1zwkf5KwTog4__MnMC7h3Xmwv4_d3fnEV29tSwXI,36381
+pygments/lexers/sieve.py,sha256=eob-L84yf2jmhdNyYZUlbUJozdcd6GXcHW68lmAe8WE,2514
+pygments/lexers/slash.py,sha256=I-cRepmaxhL1SgYvD1hHX3gNBFI8NPszdU7hn1o5JlA,8484
+pygments/lexers/smalltalk.py,sha256=ue2PmqDK2sw0j75WdseiiENJBdZ1OwysH2Op1QN1r24,7204
+pygments/lexers/smithy.py,sha256=VREWoeuz7ANap_Uiopn7rs0Tnsfc-xBisDJKRGQY_y8,2659
+pygments/lexers/smv.py,sha256=He_VBSMbWONMWZmkrB5RYR0cfHVnMyKIXz68IFYl-a8,2805
+pygments/lexers/snobol.py,sha256=qDzb41xQQWMNmjB2MtZs23pFoFgZ2gbRZhK_Ir03r7I,2778
+pygments/lexers/solidity.py,sha256=Tixfnwku4Yezj6nNm8xVaw7EdV1qgAgdwahdTFP0St8,3163
+pygments/lexers/soong.py,sha256=Vm18vV4g6T8UPgjjY2yTRlSXGDpZowmuqQUBFfm4A9A,2339
+pygments/lexers/sophia.py,sha256=2YtYIT8iwAoW0B7TZuuoG_ZILhJV-2A7oBGat-98naE,3376
+pygments/lexers/special.py,sha256=8JuR2Vex8X-RWnC36S0HXTHWp2qmZclc90-TrLUWyaY,3585
+pygments/lexers/spice.py,sha256=m4nK0q4Sq_OFQez7kGWfki0No4ZV24YrONfHVj1Piqs,2790
+pygments/lexers/sql.py,sha256=2QvawM04xY96nvWadmHgHPDhOFbHGaFAQ_01rbNX_mc,48919
+pygments/lexers/srcinfo.py,sha256=B8vDs-sJogG3mWa5Hp_7JfHHUMyYRwGvKv6cKbFQXLM,1746
+pygments/lexers/stata.py,sha256=Zr9BC52D5O_3BbdW0N-tzoUmy0NTguL2sC-saXRVM-c,6415
+pygments/lexers/supercollider.py,sha256=_H5wDrn0DiGnlhB_cz6Rt_lo2TvqjSm0o6NPTd9R4Ko,3697
+pygments/lexers/tablegen.py,sha256=1JjedXYY18BNiY9JtNGLOtGfiwduNDZpQLBGTeQ6jAw,3987
+pygments/lexers/tact.py,sha256=X_lsxjFUMaC1TmYysXJq9tmAGifRnil83Bt1zA86Xdo,10809
+pygments/lexers/tal.py,sha256=xS9PlaWQOPj8MVr56fUNq31vUQKRWoLTlyWj9ZHm8AM,2904
+pygments/lexers/tcl.py,sha256=lK97ju4nikkt-oGOzIeyFEM98yq4dZSI8uEmYsq0R6c,5512
+pygments/lexers/teal.py,sha256=t3dqy_Arwv8_yExbX_xiFxv1TqJLPv4vh1MVKjKwS4Y,3522
+pygments/lexers/templates.py,sha256=BVdjYeoacIUuFyHTG39j4PxeNCe5E1oUURjH1rITrI4,75731
+pygments/lexers/teraterm.py,sha256=ciwztagW5Drg2gr17Qykrh6GwMsKy7e4xdQshX95GyQ,9718
+pygments/lexers/testing.py,sha256=YZgDgUEaLEYKSKEqpDsUi3Bn-Db_D42IlyiSsr1oX8U,10810
+pygments/lexers/text.py,sha256=nOCQPssIlKdVWU3PKxZiBPkf_KFM2V48IOssSyqhFY8,1068
+pygments/lexers/textedit.py,sha256=ttT4Ph-hIdgFLG6maRy_GskkziTFK0Wcg28yU0s6lek,7760
+pygments/lexers/textfmts.py,sha256=mi9KLEq4mrzDJbEc8G3VM-mSki_Tylkzodu47yH6z84,15524
+pygments/lexers/theorem.py,sha256=51ppBAEdhJmwU_lC916zMyjEoKLXqf89VAE_Lr0PNCc,17855
+pygments/lexers/thingsdb.py,sha256=x_fHNkLA-hIJyeIs6rg_X8n5OLYvFqaSu1FhI3apI5Y,6017
+pygments/lexers/tlb.py,sha256=ue2gqm45BI512lM13O8skAky9zAb7pLMrxZ8pbt5zRU,1450
+pygments/lexers/tls.py,sha256=_uQUVuMRDOhN-XUyGR5DIlVCk1CUZ1fIOSN4_WQYPKk,1540
+pygments/lexers/tnt.py,sha256=pK4LgoKON7u1xF66JYFncAPSbD8DZaeI_WTZ9HqEFlY,10456
+pygments/lexers/trafficscript.py,sha256=X3B8kgxS54ecuok9ic6Hkp-UMn5DvOmCK0p70Tz27Cw,1506
+pygments/lexers/typoscript.py,sha256=mBuePiVZUoAORPKsHwrx6fBWiy3fAIqG-2O67QmMiFI,8332
+pygments/lexers/typst.py,sha256=zIJBEhUXtWp5OiyAmvFA5m8d1EQG-ocwrJ677dvTUAk,7167
+pygments/lexers/ul4.py,sha256=rCaw0J9j3cdql9lX_HTilg65k9-9S118zOA6TAYfxaM,10499
+pygments/lexers/unicon.py,sha256=RAqoCnAAJBYOAGdR8ng0g6FtB39bGemLRlIqv5mcg9E,18625
+pygments/lexers/urbi.py,sha256=ajNP70NJg32jNnFDZsLvr_-4TToSGqRGkFyAPIJLfCU,6082
+pygments/lexers/usd.py,sha256=2eEGouolodYS402P_gtBrn4lLzpg1z8uHwPCKqjUb_k,3304
+pygments/lexers/varnish.py,sha256=dSh0Ku9SrjmlB29Fi_mWdWavN7M0cMKeepR4a34sOyI,7473
+pygments/lexers/verification.py,sha256=Qu433Q_h3EK3uS4bJoLRFZK0kIVwzX5AFKsa4Z-qnxA,3934
+pygments/lexers/verifpal.py,sha256=buyOOzCo_dGnoC40h0tthylHVVpgDt8qXu4olLvYy_4,2661
+pygments/lexers/vip.py,sha256=2lEV4cLV9p4E37wctBL7zkZ4ZU4p3HVsiLJFzB1bie0,5711
+pygments/lexers/vyper.py,sha256=Zq6sQIUBk6mBdpgOVgu3A6swGoBne0kDlRyjZznm2BY,5615
+pygments/lexers/web.py,sha256=4W9a7vcskrGJnxt4KmoE3SZydWB1qLq7lP2XS85J_m8,913
+pygments/lexers/webassembly.py,sha256=zgcMouzLawcbeFr6w_SOvGoUR68ZtqnnsbOcWEVleLk,5698
+pygments/lexers/webidl.py,sha256=ODtVmw4gVzI8HQWxuEckP6KMwm8WP2G2lSZEjagDXts,10516
+pygments/lexers/webmisc.py,sha256=-_-INDVdk47e2jlj-9bFcuLtntqVorBqIjlnwPfZFdI,40564
+pygments/lexers/wgsl.py,sha256=9igd9dzixGIgNewruv9mPnFms-c9BahkZcCCrZygv84,11880
+pygments/lexers/whiley.py,sha256=lMr750lA4MZsB4xqzVsIRtVMJIC3_dArhFYTHvOPwvA,4017
+pygments/lexers/wowtoc.py,sha256=8xxvf0xGeYtf4PE7KtkHZ_ly9xY_XXHrpCitdKE42Ro,4076
+pygments/lexers/wren.py,sha256=goGXnAMKKa13LLL40ybT3aMGPrk3gCRwZQFYAkKB_w0,3229
+pygments/lexers/x10.py,sha256=Q-AmgdF2E-N7mtOPpZ07CsxrTVnikyqC4uRRv6H75sk,1943
+pygments/lexers/xorg.py,sha256=9ttrBd3_Y2nXANsqtMposSgblYmMYqWXQ-Iz5RH9RsU,925
+pygments/lexers/yang.py,sha256=13CWbSaNr9giOHz4o0SXSklh0bfWt0ah14jJGpTvcn0,4499
+pygments/lexers/yara.py,sha256=jUSv78KTDfguCoAoAZKbYzQERkkyxBBWv5dInVrkDxo,2427
+pygments/lexers/zig.py,sha256=f-80MVOSp1KnczAMokQLVM-_wAEOD16EcGFnaCNlsN0,3976
+pygments/modeline.py,sha256=K5eSkR8GS1r5OkXXTHOcV0aM_6xpk9eWNEIAW-OOJ2g,1005
+pygments/plugin.py,sha256=tPx0rJCTIZ9ioRgLNYG4pifCbAwTRUZddvLw-NfAk2w,1891
+pygments/regexopt.py,sha256=wXaP9Gjp_hKAdnICqoDkRxAOQJSc4v3X6mcxx3z-TNs,3072
+pygments/scanner.py,sha256=nNcETRR1tRuiTaHmHSTTECVYFPcLf6mDZu1e4u91A9E,3092
+pygments/sphinxext.py,sha256=VEe_oHNgLoEGMHc2ROfbee2mF2PPREFyE6_m_JN5FvQ,7898
+pygments/style.py,sha256=Cpw9dCAyW3_JAwFRXOJXmtKb5ZwO2_5KSmlq6q4fZw4,6408
+pygments/styles/__init__.py,sha256=f9KCQXN4uKbe8aI8-L3qTC-_XPfT563FwTg6VTGVfwI,2006
+pygments/styles/_mapping.py,sha256=6lovFUE29tz6EsV3XYY4hgozJ7q1JL7cfO3UOlgnS8w,3312
+pygments/styles/abap.py,sha256=64Uwr8uPdEdcT-tE-Y2VveTXfH3SkqH9qdMgY49YHQI,749
+pygments/styles/algol.py,sha256=fCuk8ITTehvbJSufiaKlgnFsKbl-xFxxR82xhltc-cQ,2262
+pygments/styles/algol_nu.py,sha256=Gv9WfHJvYegGcUk1zcufQgsdXPNjCUNk8sAHyrSGGh4,2283
+pygments/styles/arduino.py,sha256=NoUB8xk7M1HGPoLfuySOLU0sVwoTuLcZqllXl2EO_iE,4557
+pygments/styles/autumn.py,sha256=fLLfjHXjxCl6crBAxEsBLH372ALMkFacA2bG6KFbJi4,2195
+pygments/styles/borland.py,sha256=_0ySKp4KGCSgtYjPe8uzD6gQhlmAIR4T43i-FoRYNOM,1611
+pygments/styles/bw.py,sha256=vhk8Xoj64fLPdA9IQU6mUVsYMel255jR-FDU7BjIHtI,1406
+pygments/styles/coffee.py,sha256=NqLt-fc7LONma1BGggbceVRY9uDE70WBuZXqK4zwaco,2308
+pygments/styles/colorful.py,sha256=mYcSbehtH7itH_QV9NqJp4Wna1X4lrwl2wkVXS2u-5A,2832
+pygments/styles/default.py,sha256=RTgG2zKWWUxPTDCFxhTnyZI_WZBIVgu5XsUpNvFisCA,2588
+pygments/styles/dracula.py,sha256=vRJmixBoSKV9o8NVQhXGViQqchhIYugfikLmvX0DoBw,2182
+pygments/styles/emacs.py,sha256=TiOG9oc83qToMCRMnJrXtWYqnzAqYycRz_50OoCKtxc,2535
+pygments/styles/friendly.py,sha256=oAi-l9anQTs9STDmUzXGDlOegatEOH4hpD0j6o6dZGM,2604
+pygments/styles/friendly_grayscale.py,sha256=a7Cqkzt6-uTiXvj6GoYBXzRvX5_zviCjjRB04Kf_-Q0,2828
+pygments/styles/fruity.py,sha256=GfSUTG0stlJr5Ow_saCaxbI2IB4-34Dp2TuRTpfUJBs,1324
+pygments/styles/gh_dark.py,sha256=ruNX3d4rf22rx-8HnwvGbNbXRQpXCNcHU1HNq6N4uNg,3590
+pygments/styles/gruvbox.py,sha256=KrFoHEoVnZW6XM9udyXncPomeGyZgIDsNWOH3kCrxFQ,3387
+pygments/styles/igor.py,sha256=fYYPhM0dRCvcDTMVrMVO5oFKnYm-8YVlsuVBoczFLtY,737
+pygments/styles/inkpot.py,sha256=jggSeX9NV15eOL2oJaVmZ6vmV7LWRzXJQRUqcWEqGRs,2404
+pygments/styles/lightbulb.py,sha256=Y8u1qdvlHfBqI2jJex55SkvVatVo_FjEUzE6h-X7m-0,3172
+pygments/styles/lilypond.py,sha256=Y6fp_sEL-zESmxAaMxzjtrKk90cuDC_DalNdC8wj0nw,2066
+pygments/styles/lovelace.py,sha256=cA9uhmbnzY04MccsiYSgMY7fvb4WMRbegWBUrGvXh1M,3178
+pygments/styles/manni.py,sha256=g9FyO7plTwfMm2cU4iiKgdlkMlvQLG6l2Lwkgz5ITS4,2443
+pygments/styles/material.py,sha256=LDmgomAbgtJDZhbv446_zIwgYh50UAqEEtgYNUns1rQ,4201
+pygments/styles/monokai.py,sha256=lrxTJpkBarV9gTLkBQryZ6oNSjekAVheJueKJP5iEYA,5184
+pygments/styles/murphy.py,sha256=-AKZiLkpiWej-otjHMsYCE-I-_IzCOLJY-_GBdKRZRw,2805
+pygments/styles/native.py,sha256=l6tezGSQTB8p_SyOXJ0PWI7KzCeEdtsPmVc4Yn4_CwU,2043
+pygments/styles/nord.py,sha256=GDt3WAaqaWsiCeqpIBPxd8TEUX708fGfwaA7S0w0oy0,5391
+pygments/styles/onedark.py,sha256=k80cZEppCEF-HLoxy_FEA0QmQDZze68nHVMNGyUVa28,1719
+pygments/styles/paraiso_dark.py,sha256=Jkrg4nUKIVNF8U4fPNV_Smq_g9NFbb9eiUrjYpVgQZg,5662
+pygments/styles/paraiso_light.py,sha256=MxN964ZEpze3wF0ss-igaa2I7E684MHe-Zq0rWPH3wo,5668
+pygments/styles/pastie.py,sha256=ZvAs9UpBNYFC-5PFrCRGYnm3FoPKb-eKR-ozbWZP-4g,2525
+pygments/styles/perldoc.py,sha256=HSxB93e4UpQkZspReQ34FeJbZ-59ksGvdaH-hToehi8,2230
+pygments/styles/rainbow_dash.py,sha256=4ugL18Or7aNtaLfPfCLFRiFy0Gu2RA4a9G2LQUE9SrM,2390
+pygments/styles/rrt.py,sha256=fgzfpC0PC_SCcLOMCNEIQTjPUMOncRe7SR10GfSRbXY,1006
+pygments/styles/sas.py,sha256=yzoXmbfQ2ND1WWq93b4vVGYkQSZHPqb4ymes9YYRT3w,1440
+pygments/styles/solarized.py,sha256=qupILFZn02WspnAF5SPYb-W8guo9xnUtjb1HeLw3XgE,4247
+pygments/styles/staroffice.py,sha256=CLbBeMoxay21Xyu3Af2p4xUXyG1_6ydCbvs5RJKYe5w,831
+pygments/styles/stata_dark.py,sha256=vX8SwHV__sG92F4CKribG08MJfSVq98dgs7gEA_n9yc,1257
+pygments/styles/stata_light.py,sha256=uV3GE-ylvffQ0yN3py1YAVqBB5wflIKZbceyK1Lqvrc,1289
+pygments/styles/tango.py,sha256=O2wcM4hHuU1Yt071M9CK7JPtiiSCqyxtT9tbiQICV28,7137
+pygments/styles/trac.py,sha256=9kMv1ZZyMKACWlx2fQVjRP0I2pgcRYCNrd7iGGZg9qk,1981
+pygments/styles/vim.py,sha256=J7_TqvrGkTX_XuTHW0In5wqPLAUPRWyr1122XueZWmM,2019
+pygments/styles/vs.py,sha256=s7YnzbIPuFU3LIke27mc4lAQSn2R3vbbHc1baMGSU_U,1130
+pygments/styles/xcode.py,sha256=PbQdzgGaA4a9LAU1i58alY9kM4IFlQX5jHQwOYmf_Rk,1504
+pygments/styles/zenburn.py,sha256=suZEKzBTCYdhf2cxNwcY7UATJK1tq5eYhGdBcXdf6MU,2203
+pygments/token.py,sha256=WbdWGhYm_Vosb0DDxW9lHNPgITXfWTsQmHt6cy9RbcM,6226
+pygments/unistring.py,sha256=al-_rBemRuGvinsrM6atNsHTmJ6DUbw24q2O2Ru1cBc,63208
+pygments/util.py,sha256=oRtSpiAo5jM9ulntkvVbgXUdiAW57jnuYGB7t9fYuhc,10031
diff --git a/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/REQUESTED b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/REQUESTED
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/WHEEL b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..12228d414b6cfed7c39d3781c85c63256a1d7fb5
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.27.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/entry_points.txt b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/entry_points.txt
new file mode 100644
index 0000000000000000000000000000000000000000..15498e35f53320bd5e1de176928daabf26be0109
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/pygments-2.19.1.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+pygmentize = pygments.cmdline:main
diff --git a/temp_venv/lib/python3.13/site-packages/typing_extensions.py b/temp_venv/lib/python3.13/site-packages/typing_extensions.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa89c83efcdad1f5c81a4b35bbb64ea64ea6101f
--- /dev/null
+++ b/temp_venv/lib/python3.13/site-packages/typing_extensions.py
@@ -0,0 +1,4584 @@
+import abc
+import builtins
+import collections
+import collections.abc
+import contextlib
+import enum
+import functools
+import inspect
+import keyword
+import operator
+import sys
+import types as _types
+import typing
+import warnings
+
+__all__ = [
+    # Super-special typing primitives.
+    'Any',
+    'ClassVar',
+    'Concatenate',
+    'Final',
+    'LiteralString',
+    'ParamSpec',
+    'ParamSpecArgs',
+    'ParamSpecKwargs',
+    'Self',
+    'Type',
+    'TypeVar',
+    'TypeVarTuple',
+    'Unpack',
+
+    # ABCs (from collections.abc).
+    'Awaitable',
+    'AsyncIterator',
+    'AsyncIterable',
+    'Coroutine',
+    'AsyncGenerator',
+    'AsyncContextManager',
+    'Buffer',
+    'ChainMap',
+
+    # Concrete collection types.
+    'ContextManager',
+    'Counter',
+    'Deque',
+    'DefaultDict',
+    'NamedTuple',
+    'OrderedDict',
+    'TypedDict',
+
+    # Structural checks, a.k.a. protocols.
+    'SupportsAbs',
+    'SupportsBytes',
+    'SupportsComplex',
+    'SupportsFloat',
+    'SupportsIndex',
+    'SupportsInt',
+    'SupportsRound',
+
+    # One-off things.
+    'Annotated',
+    'assert_never',
+    'assert_type',
+    'clear_overloads',
+    'dataclass_transform',
+    'deprecated',
+    'Doc',
+    'evaluate_forward_ref',
+    'get_overloads',
+    'final',
+    'Format',
+    'get_annotations',
+    'get_args',
+    'get_origin',
+    'get_original_bases',
+    'get_protocol_members',
+    'get_type_hints',
+    'IntVar',
+    'is_protocol',
+    'is_typeddict',
+    'Literal',
+    'NewType',
+    'overload',
+    'override',
+    'Protocol',
+    'reveal_type',
+    'runtime',
+    'runtime_checkable',
+    'Text',
+    'TypeAlias',
+    'TypeAliasType',
+    'TypeForm',
+    'TypeGuard',
+    'TypeIs',
+    'TYPE_CHECKING',
+    'Never',
+    'NoReturn',
+    'ReadOnly',
+    'Required',
+    'NotRequired',
+    'NoDefault',
+    'NoExtraItems',
+
+    # Pure aliases, have always been in typing
+    'AbstractSet',
+    'AnyStr',
+    'BinaryIO',
+    'Callable',
+    'Collection',
+    'Container',
+    'Dict',
+    'ForwardRef',
+    'FrozenSet',
+    'Generator',
+    'Generic',
+    'Hashable',
+    'IO',
+    'ItemsView',
+    'Iterable',
+    'Iterator',
+    'KeysView',
+    'List',
+    'Mapping',
+    'MappingView',
+    'Match',
+    'MutableMapping',
+    'MutableSequence',
+    'MutableSet',
+    'Optional',
+    'Pattern',
+    'Reversible',
+    'Sequence',
+    'Set',
+    'Sized',
+    'TextIO',
+    'Tuple',
+    'Union',
+    'ValuesView',
+    'cast',
+    'no_type_check',
+    'no_type_check_decorator',
+]
+
+# for backward compatibility
+PEP_560 = True
+GenericMeta = type
+_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
+
+# Added with bpo-45166 to 3.10.1+ and some 3.9 versions
+_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__
+
+# The functions below are modified copies of typing internal helpers.
+# They are needed by _ProtocolMeta and they provide support for PEP 646.
+
+
+class _Sentinel:
+    def __repr__(self):
+        return ""
+
+
+_marker = _Sentinel()
+
+
+if sys.version_info >= (3, 10):
+    def _should_collect_from_parameters(t):
+        return isinstance(
+            t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
+        )
+elif sys.version_info >= (3, 9):
+    def _should_collect_from_parameters(t):
+        return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
+else:
+    def _should_collect_from_parameters(t):
+        return isinstance(t, typing._GenericAlias) and not t._special
+
+
+NoReturn = typing.NoReturn
+
+# Some unconstrained type variables.  These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T')  # Any type.
+KT = typing.TypeVar('KT')  # Key type.
+VT = typing.TypeVar('VT')  # Value type.
+T_co = typing.TypeVar('T_co', covariant=True)  # Any type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True)  # Ditto contravariant.
+
+
+if sys.version_info >= (3, 11):
+    from typing import Any
+else:
+
+    class _AnyMeta(type):
+        def __instancecheck__(self, obj):
+            if self is Any:
+                raise TypeError("typing_extensions.Any cannot be used with isinstance()")
+            return super().__instancecheck__(obj)
+
+        def __repr__(self):
+            if self is Any:
+                return "typing_extensions.Any"
+            return super().__repr__()
+
+    class Any(metaclass=_AnyMeta):
+        """Special type indicating an unconstrained type.
+        - Any is compatible with every type.
+        - Any assumed to have all methods.
+        - All values assumed to be instances of Any.
+        Note that all the above statements are true from the point of view of
+        static type checkers. At runtime, Any should not be used with instance
+        checks.
+        """
+        def __new__(cls, *args, **kwargs):
+            if cls is Any:
+                raise TypeError("Any cannot be instantiated")
+            return super().__new__(cls, *args, **kwargs)
+
+
+ClassVar = typing.ClassVar
+
+
+class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+    def __repr__(self):
+        return 'typing_extensions.' + self._name
+
+
+Final = typing.Final
+
+if sys.version_info >= (3, 11):
+    final = typing.final
+else:
+    # @final exists in 3.8+, but we backport it for all versions
+    # before 3.11 to keep support for the __final__ attribute.
+    # See https://bugs.python.org/issue46342
+    def final(f):
+        """This decorator can be used to indicate to type checkers that
+        the decorated method cannot be overridden, and decorated class
+        cannot be subclassed. For example:
+
+            class Base:
+                @final
+                def done(self) -> None:
+                    ...
+            class Sub(Base):
+                def done(self) -> None:  # Error reported by type checker
+                    ...
+            @final
+            class Leaf:
+                ...
+            class Other(Leaf):  # Error reported by type checker
+                ...
+
+        There is no runtime checking of these properties. The decorator
+        sets the ``__final__`` attribute to ``True`` on the decorated object
+        to allow runtime introspection.
+        """
+        try:
+            f.__final__ = True
+        except (AttributeError, TypeError):
+            # Skip the attribute silently if it is not writable.
+            # AttributeError happens if the object has __slots__ or a
+            # read-only property, TypeError if it's a builtin class.
+            pass
+        return f
+
+
+def IntVar(name):
+    return typing.TypeVar(name)
+
+
+# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
+if sys.version_info >= (3, 10, 1):
+    Literal = typing.Literal
+else:
+    def _flatten_literal_params(parameters):
+        """An internal helper for Literal creation: flatten Literals among parameters"""
+        params = []
+        for p in parameters:
+            if isinstance(p, _LiteralGenericAlias):
+                params.extend(p.__args__)
+            else:
+                params.append(p)
+        return tuple(params)
+
+    def _value_and_type_iter(params):
+        for p in params:
+            yield p, type(p)
+
+    class _LiteralGenericAlias(typing._GenericAlias, _root=True):
+        def __eq__(self, other):
+            if not isinstance(other, _LiteralGenericAlias):
+                return NotImplemented
+            these_args_deduped = set(_value_and_type_iter(self.__args__))
+            other_args_deduped = set(_value_and_type_iter(other.__args__))
+            return these_args_deduped == other_args_deduped
+
+        def __hash__(self):
+            return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+    class _LiteralForm(_ExtensionsSpecialForm, _root=True):
+        def __init__(self, doc: str):
+            self._name = 'Literal'
+            self._doc = self.__doc__ = doc
+
+        def __getitem__(self, parameters):
+            if not isinstance(parameters, tuple):
+                parameters = (parameters,)
+
+            parameters = _flatten_literal_params(parameters)
+
+            val_type_pairs = list(_value_and_type_iter(parameters))
+            try:
+                deduped_pairs = set(val_type_pairs)
+            except TypeError:
+                # unhashable parameters
+                pass
+            else:
+                # similar logic to typing._deduplicate on Python 3.9+
+                if len(deduped_pairs) < len(val_type_pairs):
+                    new_parameters = []
+                    for pair in val_type_pairs:
+                        if pair in deduped_pairs:
+                            new_parameters.append(pair[0])
+                            deduped_pairs.remove(pair)
+                    assert not deduped_pairs, deduped_pairs
+                    parameters = tuple(new_parameters)
+
+            return _LiteralGenericAlias(self, parameters)
+
+    Literal = _LiteralForm(doc="""\
+                           A type that can be used to indicate to type checkers
+                           that the corresponding value has a value literally equivalent
+                           to the provided parameter. For example:
+
+                               var: Literal[4] = 4
+
+                           The type checker understands that 'var' is literally equal to
+                           the value 4 and no other value.
+
+                           Literal[...] cannot be subclassed. There is no runtime
+                           checking verifying that the parameter is actually a value
+                           instead of a type.""")
+
+
+_overload_dummy = typing._overload_dummy
+
+
+if hasattr(typing, "get_overloads"):  # 3.11+
+    overload = typing.overload
+    get_overloads = typing.get_overloads
+    clear_overloads = typing.clear_overloads
+else:
+    # {module: {qualname: {firstlineno: func}}}
+    _overload_registry = collections.defaultdict(
+        functools.partial(collections.defaultdict, dict)
+    )
+
+    def overload(func):
+        """Decorator for overloaded functions/methods.
+
+        In a stub file, place two or more stub definitions for the same
+        function in a row, each decorated with @overload.  For example:
+
+        @overload
+        def utf8(value: None) -> None: ...
+        @overload
+        def utf8(value: bytes) -> bytes: ...
+        @overload
+        def utf8(value: str) -> bytes: ...
+
+        In a non-stub file (i.e. a regular .py file), do the same but
+        follow it with an implementation.  The implementation should *not*
+        be decorated with @overload.  For example:
+
+        @overload
+        def utf8(value: None) -> None: ...
+        @overload
+        def utf8(value: bytes) -> bytes: ...
+        @overload
+        def utf8(value: str) -> bytes: ...
+        def utf8(value):
+            # implementation goes here
+
+        The overloads for a function can be retrieved at runtime using the
+        get_overloads() function.
+        """
+        # classmethod and staticmethod
+        f = getattr(func, "__func__", func)
+        try:
+            _overload_registry[f.__module__][f.__qualname__][
+                f.__code__.co_firstlineno
+            ] = func
+        except AttributeError:
+            # Not a normal function; ignore.
+            pass
+        return _overload_dummy
+
+    def get_overloads(func):
+        """Return all defined overloads for *func* as a sequence."""
+        # classmethod and staticmethod
+        f = getattr(func, "__func__", func)
+        if f.__module__ not in _overload_registry:
+            return []
+        mod_dict = _overload_registry[f.__module__]
+        if f.__qualname__ not in mod_dict:
+            return []
+        return list(mod_dict[f.__qualname__].values())
+
+    def clear_overloads():
+        """Clear all overloads in the registry."""
+        _overload_registry.clear()
+
+
+# This is not a real generic class.  Don't use outside annotations.
+Type = typing.Type
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+Awaitable = typing.Awaitable
+Coroutine = typing.Coroutine
+AsyncIterable = typing.AsyncIterable
+AsyncIterator = typing.AsyncIterator
+Deque = typing.Deque
+DefaultDict = typing.DefaultDict
+OrderedDict = typing.OrderedDict
+Counter = typing.Counter
+ChainMap = typing.ChainMap
+Text = typing.Text
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+if sys.version_info >= (3, 13, 0, "beta"):
+    from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
+else:
+    def _is_dunder(attr):
+        return attr.startswith('__') and attr.endswith('__')
+
+    # Python <3.9 doesn't have typing._SpecialGenericAlias
+    _special_generic_alias_base = getattr(
+        typing, "_SpecialGenericAlias", typing._GenericAlias
+    )
+
+    class _SpecialGenericAlias(_special_generic_alias_base, _root=True):
+        def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
+            if _special_generic_alias_base is typing._GenericAlias:
+                # Python <3.9
+                self.__origin__ = origin
+                self._nparams = nparams
+                super().__init__(origin, nparams, special=True, inst=inst, name=name)
+            else:
+                # Python >= 3.9
+                super().__init__(origin, nparams, inst=inst, name=name)
+            self._defaults = defaults
+
+        def __setattr__(self, attr, val):
+            allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
+            if _special_generic_alias_base is typing._GenericAlias:
+                # Python <3.9
+                allowed_attrs.add("__origin__")
+            if _is_dunder(attr) or attr in allowed_attrs:
+                object.__setattr__(self, attr, val)
+            else:
+                setattr(self.__origin__, attr, val)
+
+        @typing._tp_cache
+        def __getitem__(self, params):
+            if not isinstance(params, tuple):
+                params = (params,)
+            msg = "Parameters to generic types must be types."
+            params = tuple(typing._type_check(p, msg) for p in params)
+            if (
+                self._defaults
+                and len(params) < self._nparams
+                and len(params) + len(self._defaults) >= self._nparams
+            ):
+                params = (*params, *self._defaults[len(params) - self._nparams:])
+            actual_len = len(params)
+
+            if actual_len != self._nparams:
+                if self._defaults:
+                    expected = f"at least {self._nparams - len(self._defaults)}"
+                else:
+                    expected = str(self._nparams)
+                if not self._nparams:
+                    raise TypeError(f"{self} is not a generic class")
+                raise TypeError(
+                    f"Too {'many' if actual_len > self._nparams else 'few'}"
+                    f" arguments for {self};"
+                    f" actual {actual_len}, expected {expected}"
+                )
+            return self.copy_with(params)
+
+    _NoneType = type(None)
+    Generator = _SpecialGenericAlias(
+        collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
+    )
+    AsyncGenerator = _SpecialGenericAlias(
+        collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
+    )
+    ContextManager = _SpecialGenericAlias(
+        contextlib.AbstractContextManager,
+        2,
+        name="ContextManager",
+        defaults=(typing.Optional[bool],)
+    )
+    AsyncContextManager = _SpecialGenericAlias(
+        contextlib.AbstractAsyncContextManager,
+        2,
+        name="AsyncContextManager",
+        defaults=(typing.Optional[bool],)
+    )
+
+
+_PROTO_ALLOWLIST = {
+    'collections.abc': [
+        'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+        'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
+    ],
+    'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+    'typing_extensions': ['Buffer'],
+}
+
+
+_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
+    "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
+    "__final__",
+}
+
+
+def _get_protocol_attrs(cls):
+    attrs = set()
+    for base in cls.__mro__[:-1]:  # without object
+        if base.__name__ in {'Protocol', 'Generic'}:
+            continue
+        annotations = getattr(base, '__annotations__', {})
+        for attr in (*base.__dict__, *annotations):
+            if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
+                attrs.add(attr)
+    return attrs
+
+
+def _caller(depth=2):
+    try:
+        return sys._getframe(depth).f_globals.get('__name__', '__main__')
+    except (AttributeError, ValueError):  # For platforms without _getframe()
+        return None
+
+
+# `__match_args__` attribute was removed from protocol members in 3.13,
+# we want to backport this change to older Python versions.
+if sys.version_info >= (3, 13):
+    Protocol = typing.Protocol
+else:
+    def _allow_reckless_class_checks(depth=3):
+        """Allow instance and class checks for special stdlib modules.
+        The abc and functools modules indiscriminately call isinstance() and
+        issubclass() on the whole MRO of a user class, which may contain protocols.
+        """
+        return _caller(depth) in {'abc', 'functools', None}
+
+    def _no_init(self, *args, **kwargs):
+        if type(self)._is_protocol:
+            raise TypeError('Protocols cannot be instantiated')
+
+    def _type_check_issubclass_arg_1(arg):
+        """Raise TypeError if `arg` is not an instance of `type`
+        in `issubclass(arg, )`.
+
+        In most cases, this is verified by type.__subclasscheck__.
+        Checking it again unnecessarily would slow down issubclass() checks,
+        so, we don't perform this check unless we absolutely have to.
+
+        For various error paths, however,
+        we want to ensure that *this* error message is shown to the user
+        where relevant, rather than a typing.py-specific error message.
+        """
+        if not isinstance(arg, type):
+            # Same error message as for issubclass(1, int).
+            raise TypeError('issubclass() arg 1 must be a class')
+
+    # Inheriting from typing._ProtocolMeta isn't actually desirable,
+    # but is necessary to allow typing.Protocol and typing_extensions.Protocol
+    # to mix without getting TypeErrors about "metaclass conflict"
+    class _ProtocolMeta(type(typing.Protocol)):
+        # This metaclass is somewhat unfortunate,
+        # but is necessary for several reasons...
+        #
+        # NOTE: DO NOT call super() in any methods in this class
+        # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
+        # and those are slow
+        def __new__(mcls, name, bases, namespace, **kwargs):
+            if name == "Protocol" and len(bases) < 2:
+                pass
+            elif {Protocol, typing.Protocol} & set(bases):
+                for base in bases:
+                    if not (
+                        base in {object, typing.Generic, Protocol, typing.Protocol}
+                        or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
+                        or is_protocol(base)
+                    ):
+                        raise TypeError(
+                            f"Protocols can only inherit from other protocols, "
+                            f"got {base!r}"
+                        )
+            return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
+
+        def __init__(cls, *args, **kwargs):
+            abc.ABCMeta.__init__(cls, *args, **kwargs)
+            if getattr(cls, "_is_protocol", False):
+                cls.__protocol_attrs__ = _get_protocol_attrs(cls)
+
+        def __subclasscheck__(cls, other):
+            if cls is Protocol:
+                return type.__subclasscheck__(cls, other)
+            if (
+                getattr(cls, '_is_protocol', False)
+                and not _allow_reckless_class_checks()
+            ):
+                if not getattr(cls, '_is_runtime_protocol', False):
+                    _type_check_issubclass_arg_1(other)
+                    raise TypeError(
+                        "Instance and class checks can only be used with "
+                        "@runtime_checkable protocols"
+                    )
+                if (
+                    # this attribute is set by @runtime_checkable:
+                    cls.__non_callable_proto_members__
+                    and cls.__dict__.get("__subclasshook__") is _proto_hook
+                ):
+                    _type_check_issubclass_arg_1(other)
+                    non_method_attrs = sorted(cls.__non_callable_proto_members__)
+                    raise TypeError(
+                        "Protocols with non-method members don't support issubclass()."
+                        f" Non-method members: {str(non_method_attrs)[1:-1]}."
+                    )
+            return abc.ABCMeta.__subclasscheck__(cls, other)
+
+        def __instancecheck__(cls, instance):
+            # We need this method for situations where attributes are
+            # assigned in __init__.
+            if cls is Protocol:
+                return type.__instancecheck__(cls, instance)
+            if not getattr(cls, "_is_protocol", False):
+                # i.e., it's a concrete subclass of a protocol
+                return abc.ABCMeta.__instancecheck__(cls, instance)
+
+            if (
+                not getattr(cls, '_is_runtime_protocol', False) and
+                not _allow_reckless_class_checks()
+            ):
+                raise TypeError("Instance and class checks can only be used with"
+                                " @runtime_checkable protocols")
+
+            if abc.ABCMeta.__instancecheck__(cls, instance):
+                return True
+
+            for attr in cls.__protocol_attrs__:
+                try:
+                    val = inspect.getattr_static(instance, attr)
+                except AttributeError:
+                    break
+                # this attribute is set by @runtime_checkable:
+                if val is None and attr not in cls.__non_callable_proto_members__:
+                    break
+            else:
+                return True
+
+            return False
+
+        def __eq__(cls, other):
+            # Hack so that typing.Generic.__class_getitem__
+            # treats typing_extensions.Protocol
+            # as equivalent to typing.Protocol
+            if abc.ABCMeta.__eq__(cls, other) is True:
+                return True
+            return cls is Protocol and other is typing.Protocol
+
+        # This has to be defined, or the abc-module cache
+        # complains about classes with this metaclass being unhashable,
+        # if we define only __eq__!
+        def __hash__(cls) -> int:
+            return type.__hash__(cls)
+
+    @classmethod
+    def _proto_hook(cls, other):
+        if not cls.__dict__.get('_is_protocol', False):
+            return NotImplemented
+
+        for attr in cls.__protocol_attrs__:
+            for base in other.__mro__:
+                # Check if the members appears in the class dictionary...
+                if attr in base.__dict__:
+                    if base.__dict__[attr] is None:
+                        return NotImplemented
+                    break
+
+                # ...or in annotations, if it is a sub-protocol.
+                annotations = getattr(base, '__annotations__', {})
+                if (
+                    isinstance(annotations, collections.abc.Mapping)
+                    and attr in annotations
+                    and is_protocol(other)
+                ):
+                    break
+            else:
+                return NotImplemented
+        return True
+
+    class Protocol(typing.Generic, metaclass=_ProtocolMeta):
+        __doc__ = typing.Protocol.__doc__
+        __slots__ = ()
+        _is_protocol = True
+        _is_runtime_protocol = False
+
+        def __init_subclass__(cls, *args, **kwargs):
+            super().__init_subclass__(*args, **kwargs)
+
+            # Determine if this is a protocol or a concrete subclass.
+            if not cls.__dict__.get('_is_protocol', False):
+                cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+            # Set (or override) the protocol subclass hook.
+            if '__subclasshook__' not in cls.__dict__:
+                cls.__subclasshook__ = _proto_hook
+
+            # Prohibit instantiation for protocol classes
+            if cls._is_protocol and cls.__init__ is Protocol.__init__:
+                cls.__init__ = _no_init
+
+
+if sys.version_info >= (3, 13):
+    runtime_checkable = typing.runtime_checkable
+else:
+    def runtime_checkable(cls):
+        """Mark a protocol class as a runtime protocol.
+
+        Such protocol can be used with isinstance() and issubclass().
+        Raise TypeError if applied to a non-protocol class.
+        This allows a simple-minded structural check very similar to
+        one trick ponies in collections.abc such as Iterable.
+
+        For example::
+
+            @runtime_checkable
+            class Closable(Protocol):
+                def close(self): ...
+
+            assert isinstance(open('/some/file'), Closable)
+
+        Warning: this will check only the presence of the required methods,
+        not their type signatures!
+        """
+        if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
+            raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
+                            f' got {cls!r}')
+        cls._is_runtime_protocol = True
+
+        # typing.Protocol classes on <=3.11 break if we execute this block,
+        # because typing.Protocol classes on <=3.11 don't have a
+        # `__protocol_attrs__` attribute, and this block relies on the
+        # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
+        # break if we *don't* execute this block, because *they* assume that all
+        # protocol classes have a `__non_callable_proto_members__` attribute
+        # (which this block sets)
+        if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
+            # PEP 544 prohibits using issubclass()
+            # with protocols that have non-method members.
+            # See gh-113320 for why we compute this attribute here,
+            # rather than in `_ProtocolMeta.__init__`
+            cls.__non_callable_proto_members__ = set()
+            for attr in cls.__protocol_attrs__:
+                try:
+                    is_callable = callable(getattr(cls, attr, None))
+                except Exception as e:
+                    raise TypeError(
+                        f"Failed to determine whether protocol member {attr!r} "
+                        "is a method member"
+                    ) from e
+                else:
+                    if not is_callable:
+                        cls.__non_callable_proto_members__.add(attr)
+
+        return cls
+
+
+# The "runtime" alias exists for backwards compatibility.
+runtime = runtime_checkable
+
+
+# Our version of runtime-checkable protocols is faster on Python 3.8-3.11
+if sys.version_info >= (3, 12):
+    SupportsInt = typing.SupportsInt
+    SupportsFloat = typing.SupportsFloat
+    SupportsComplex = typing.SupportsComplex
+    SupportsBytes = typing.SupportsBytes
+    SupportsIndex = typing.SupportsIndex
+    SupportsAbs = typing.SupportsAbs
+    SupportsRound = typing.SupportsRound
+else:
+    @runtime_checkable
+    class SupportsInt(Protocol):
+        """An ABC with one abstract method __int__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __int__(self) -> int:
+            pass
+
+    @runtime_checkable
+    class SupportsFloat(Protocol):
+        """An ABC with one abstract method __float__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __float__(self) -> float:
+            pass
+
+    @runtime_checkable
+    class SupportsComplex(Protocol):
+        """An ABC with one abstract method __complex__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __complex__(self) -> complex:
+            pass
+
+    @runtime_checkable
+    class SupportsBytes(Protocol):
+        """An ABC with one abstract method __bytes__."""
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __bytes__(self) -> bytes:
+            pass
+
+    @runtime_checkable
+    class SupportsIndex(Protocol):
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __index__(self) -> int:
+            pass
+
+    @runtime_checkable
+    class SupportsAbs(Protocol[T_co]):
+        """
+        An ABC with one abstract method __abs__ that is covariant in its return type.
+        """
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __abs__(self) -> T_co:
+            pass
+
+    @runtime_checkable
+    class SupportsRound(Protocol[T_co]):
+        """
+        An ABC with one abstract method __round__ that is covariant in its return type.
+        """
+        __slots__ = ()
+
+        @abc.abstractmethod
+        def __round__(self, ndigits: int = 0) -> T_co:
+            pass
+
+
+def _ensure_subclassable(mro_entries):
+    def inner(func):
+        if sys.implementation.name == "pypy" and sys.version_info < (3, 9):
+            cls_dict = {
+                "__call__": staticmethod(func),
+                "__mro_entries__": staticmethod(mro_entries)
+            }
+            t = type(func.__name__, (), cls_dict)
+            return functools.update_wrapper(t(), func)
+        else:
+            func.__mro_entries__ = mro_entries
+            return func
+    return inner
+
+
+_NEEDS_SINGLETONMETA = (
+    not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems")
+)
+
+if _NEEDS_SINGLETONMETA:
+    class SingletonMeta(type):
+        def __setattr__(cls, attr, value):
+            # TypeError is consistent with the behavior of NoneType
+            raise TypeError(
+                f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+            )
+
+
+if hasattr(typing, "NoDefault"):
+    NoDefault = typing.NoDefault
+else:
+    class NoDefaultType(metaclass=SingletonMeta):
+        """The type of the NoDefault singleton."""
+
+        __slots__ = ()
+
+        def __new__(cls):
+            return globals().get("NoDefault") or object.__new__(cls)
+
+        def __repr__(self):
+            return "typing_extensions.NoDefault"
+
+        def __reduce__(self):
+            return "NoDefault"
+
+    NoDefault = NoDefaultType()
+    del NoDefaultType
+
+if hasattr(typing, "NoExtraItems"):
+    NoExtraItems = typing.NoExtraItems
+else:
+    class NoExtraItemsType(metaclass=SingletonMeta):
+        """The type of the NoExtraItems singleton."""
+
+        __slots__ = ()
+
+        def __new__(cls):
+            return globals().get("NoExtraItems") or object.__new__(cls)
+
+        def __repr__(self):
+            return "typing_extensions.NoExtraItems"
+
+        def __reduce__(self):
+            return "NoExtraItems"
+
+    NoExtraItems = NoExtraItemsType()
+    del NoExtraItemsType
+
+if _NEEDS_SINGLETONMETA:
+    del SingletonMeta
+
+
+# Update this to something like >=3.13.0b1 if and when
+# PEP 728 is implemented in CPython
+_PEP_728_IMPLEMENTED = False
+
+if _PEP_728_IMPLEMENTED:
+    # The standard library TypedDict in Python 3.8 does not store runtime information
+    # about which (if any) keys are optional.  See https://bugs.python.org/issue38834
+    # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
+    # keyword with old-style TypedDict().  See https://bugs.python.org/issue42059
+    # The standard library TypedDict below Python 3.11 does not store runtime
+    # information about optional and required keys when using Required or NotRequired.
+    # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
+    # Aaaand on 3.12 we add __orig_bases__ to TypedDict
+    # to enable better runtime introspection.
+    # On 3.13 we deprecate some odd ways of creating TypedDicts.
+    # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
+    # PEP 728 (still pending) makes more changes.
+    TypedDict = typing.TypedDict
+    _TypedDictMeta = typing._TypedDictMeta
+    is_typeddict = typing.is_typeddict
+else:
+    # 3.10.0 and later
+    _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
+    def _get_typeddict_qualifiers(annotation_type):
+        while True:
+            annotation_origin = get_origin(annotation_type)
+            if annotation_origin is Annotated:
+                annotation_args = get_args(annotation_type)
+                if annotation_args:
+                    annotation_type = annotation_args[0]
+                else:
+                    break
+            elif annotation_origin is Required:
+                yield Required
+                annotation_type, = get_args(annotation_type)
+            elif annotation_origin is NotRequired:
+                yield NotRequired
+                annotation_type, = get_args(annotation_type)
+            elif annotation_origin is ReadOnly:
+                yield ReadOnly
+                annotation_type, = get_args(annotation_type)
+            else:
+                break
+
+    class _TypedDictMeta(type):
+
+        def __new__(cls, name, bases, ns, *, total=True, closed=None,
+                    extra_items=NoExtraItems):
+            """Create new typed dict class object.
+
+            This method is called when TypedDict is subclassed,
+            or when TypedDict is instantiated. This way
+            TypedDict supports all three syntax forms described in its docstring.
+            Subclasses and instances of TypedDict return actual dictionaries.
+            """
+            for base in bases:
+                if type(base) is not _TypedDictMeta and base is not typing.Generic:
+                    raise TypeError('cannot inherit from both a TypedDict type '
+                                    'and a non-TypedDict base class')
+            if closed is not None and extra_items is not NoExtraItems:
+                raise TypeError(f"Cannot combine closed={closed!r} and extra_items")
+
+            if any(issubclass(b, typing.Generic) for b in bases):
+                generic_base = (typing.Generic,)
+            else:
+                generic_base = ()
+
+            # typing.py generally doesn't let you inherit from plain Generic, unless
+            # the name of the class happens to be "Protocol"
+            tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
+            tp_dict.__name__ = name
+            if tp_dict.__qualname__ == "Protocol":
+                tp_dict.__qualname__ = name
+
+            if not hasattr(tp_dict, '__orig_bases__'):
+                tp_dict.__orig_bases__ = bases
+
+            annotations = {}
+            if "__annotations__" in ns:
+                own_annotations = ns["__annotations__"]
+            elif "__annotate__" in ns:
+                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+                own_annotations = ns["__annotate__"](1)
+            else:
+                own_annotations = {}
+            msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+            if _TAKES_MODULE:
+                own_annotations = {
+                    n: typing._type_check(tp, msg, module=tp_dict.__module__)
+                    for n, tp in own_annotations.items()
+                }
+            else:
+                own_annotations = {
+                    n: typing._type_check(tp, msg)
+                    for n, tp in own_annotations.items()
+                }
+            required_keys = set()
+            optional_keys = set()
+            readonly_keys = set()
+            mutable_keys = set()
+            extra_items_type = extra_items
+
+            for base in bases:
+                base_dict = base.__dict__
+
+                annotations.update(base_dict.get('__annotations__', {}))
+                required_keys.update(base_dict.get('__required_keys__', ()))
+                optional_keys.update(base_dict.get('__optional_keys__', ()))
+                readonly_keys.update(base_dict.get('__readonly_keys__', ()))
+                mutable_keys.update(base_dict.get('__mutable_keys__', ()))
+
+            # This was specified in an earlier version of PEP 728. Support
+            # is retained for backwards compatibility, but only for Python
+            # 3.13 and lower.
+            if (closed and sys.version_info < (3, 14)
+                       and "__extra_items__" in own_annotations):
+                annotation_type = own_annotations.pop("__extra_items__")
+                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+                if Required in qualifiers:
+                    raise TypeError(
+                        "Special key __extra_items__ does not support "
+                        "Required"
+                    )
+                if NotRequired in qualifiers:
+                    raise TypeError(
+                        "Special key __extra_items__ does not support "
+                        "NotRequired"
+                    )
+                extra_items_type = annotation_type
+
+            annotations.update(own_annotations)
+            for annotation_key, annotation_type in own_annotations.items():
+                qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+
+                if Required in qualifiers:
+                    required_keys.add(annotation_key)
+                elif NotRequired in qualifiers:
+                    optional_keys.add(annotation_key)
+                elif total:
+                    required_keys.add(annotation_key)
+                else:
+                    optional_keys.add(annotation_key)
+                if ReadOnly in qualifiers:
+                    mutable_keys.discard(annotation_key)
+                    readonly_keys.add(annotation_key)
+                else:
+                    mutable_keys.add(annotation_key)
+                    readonly_keys.discard(annotation_key)
+
+            tp_dict.__annotations__ = annotations
+            tp_dict.__required_keys__ = frozenset(required_keys)
+            tp_dict.__optional_keys__ = frozenset(optional_keys)
+            tp_dict.__readonly_keys__ = frozenset(readonly_keys)
+            tp_dict.__mutable_keys__ = frozenset(mutable_keys)
+            tp_dict.__total__ = total
+            tp_dict.__closed__ = closed
+            tp_dict.__extra_items__ = extra_items_type
+            return tp_dict
+
+        __call__ = dict  # static method
+
+        def __subclasscheck__(cls, other):
+            # Typed dicts are only for static structural subtyping.
+            raise TypeError('TypedDict does not support instance and class checks')
+
+        __instancecheck__ = __subclasscheck__
+
+    _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+
+    @_ensure_subclassable(lambda bases: (_TypedDict,))
+    def TypedDict(
+        typename,
+        fields=_marker,
+        /,
+        *,
+        total=True,
+        closed=None,
+        extra_items=NoExtraItems,
+        **kwargs
+    ):
+        """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+        TypedDict creates a dictionary type such that a type checker will expect all
+        instances to have a certain set of keys, where each key is
+        associated with a value of a consistent type. This expectation
+        is not checked at runtime.
+
+        Usage::
+
+            class Point2D(TypedDict):
+                x: int
+                y: int
+                label: str
+
+            a: Point2D = {'x': 1, 'y': 2, 'label': 'good'}  # OK
+            b: Point2D = {'z': 3, 'label': 'bad'}           # Fails type check
+
+            assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+        The type info can be accessed via the Point2D.__annotations__ dict, and
+        the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+        TypedDict supports an additional equivalent form::
+
+            Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+        By default, all keys must be present in a TypedDict. It is possible
+        to override this by specifying totality::
+
+            class Point2D(TypedDict, total=False):
+                x: int
+                y: int
+
+        This means that a Point2D TypedDict can have any of the keys omitted. A type
+        checker is only expected to support a literal False or True as the value of
+        the total argument. True is the default, and makes all items defined in the
+        class body be required.
+
+        The Required and NotRequired special forms can also be used to mark
+        individual keys as being required or not required::
+
+            class Point2D(TypedDict):
+                x: int  # the "x" key must always be present (Required is the default)
+                y: NotRequired[int]  # the "y" key can be omitted
+
+        See PEP 655 for more details on Required and NotRequired.
+        """
+        if fields is _marker or fields is None:
+            if fields is _marker:
+                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+            else:
+                deprecated_thing = "Passing `None` as the 'fields' parameter"
+
+            example = f"`{typename} = TypedDict({typename!r}, {{}})`"
+            deprecation_msg = (
+                f"{deprecated_thing} is deprecated and will be disallowed in "
+                "Python 3.15. To create a TypedDict class with 0 fields "
+                "using the functional syntax, pass an empty dictionary, e.g. "
+            ) + example + "."
+            warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
+            # Support a field called "closed"
+            if closed is not False and closed is not True and closed is not None:
+                kwargs["closed"] = closed
+                closed = None
+            # Or "extra_items"
+            if extra_items is not NoExtraItems:
+                kwargs["extra_items"] = extra_items
+                extra_items = NoExtraItems
+            fields = kwargs
+        elif kwargs:
+            raise TypeError("TypedDict takes either a dict or keyword arguments,"
+                            " but not both")
+        if kwargs:
+            if sys.version_info >= (3, 13):
+                raise TypeError("TypedDict takes no keyword arguments")
+            warnings.warn(
+                "The kwargs-based syntax for TypedDict definitions is deprecated "
+                "in Python 3.11, will be removed in Python 3.13, and may not be "
+                "understood by third-party type checkers.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+
+        ns = {'__annotations__': dict(fields)}
+        module = _caller()
+        if module is not None:
+            # Setting correct module is necessary to make typed dict classes pickleable.
+            ns['__module__'] = module
+
+        td = _TypedDictMeta(typename, (), ns, total=total, closed=closed,
+                            extra_items=extra_items)
+        td.__orig_bases__ = (TypedDict,)
+        return td
+
+    if hasattr(typing, "_TypedDictMeta"):
+        _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
+    else:
+        _TYPEDDICT_TYPES = (_TypedDictMeta,)
+
+    def is_typeddict(tp):
+        """Check if an annotation is a TypedDict class
+
+        For example::
+            class Film(TypedDict):
+                title: str
+                year: int
+
+            is_typeddict(Film)  # => True
+            is_typeddict(Union[list, str])  # => False
+        """
+        # On 3.8, this would otherwise return True
+        if hasattr(typing, "TypedDict") and tp is typing.TypedDict:
+            return False
+        return isinstance(tp, _TYPEDDICT_TYPES)
+
+
+if hasattr(typing, "assert_type"):
+    assert_type = typing.assert_type
+
+else:
+    def assert_type(val, typ, /):
+        """Assert (to the type checker) that the value is of the given type.
+
+        When the type checker encounters a call to assert_type(), it
+        emits an error if the value is not of the specified type::
+
+            def greet(name: str) -> None:
+                assert_type(name, str)  # ok
+                assert_type(name, int)  # type checker error
+
+        At runtime this returns the first argument unchanged and otherwise
+        does nothing.
+        """
+        return val
+
+
+if hasattr(typing, "ReadOnly"):  # 3.13+
+    get_type_hints = typing.get_type_hints
+else:  # <=3.13
+    # replaces _strip_annotations()
+    def _strip_extras(t):
+        """Strips Annotated, Required and NotRequired from a given type."""
+        if isinstance(t, _AnnotatedAlias):
+            return _strip_extras(t.__origin__)
+        if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
+            return _strip_extras(t.__args__[0])
+        if isinstance(t, typing._GenericAlias):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return t.copy_with(stripped_args)
+        if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return _types.GenericAlias(t.__origin__, stripped_args)
+        if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
+            stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+            if stripped_args == t.__args__:
+                return t
+            return functools.reduce(operator.or_, stripped_args)
+
+        return t
+
+    def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+        """Return type hints for an object.
+
+        This is often the same as obj.__annotations__, but it handles
+        forward references encoded as string literals, adds Optional[t] if a
+        default value equal to None is set and recursively replaces all
+        'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
+        (unless 'include_extras=True').
+
+        The argument may be a module, class, method, or function. The annotations
+        are returned as a dictionary. For classes, annotations include also
+        inherited members.
+
+        TypeError is raised if the argument is not of a type that can contain
+        annotations, and an empty dictionary is returned if no annotations are
+        present.
+
+        BEWARE -- the behavior of globalns and localns is counterintuitive
+        (unless you are familiar with how eval() and exec() work).  The
+        search order is locals first, then globals.
+
+        - If no dict arguments are passed, an attempt is made to use the
+          globals from obj (or the respective module's globals for classes),
+          and these are also used as the locals.  If the object does not appear
+          to have globals, an empty dictionary is used.
+
+        - If one dict argument is passed, it is used for both globals and
+          locals.
+
+        - If two dict arguments are passed, they specify globals and
+          locals, respectively.
+        """
+        if hasattr(typing, "Annotated"):  # 3.9+
+            hint = typing.get_type_hints(
+                obj, globalns=globalns, localns=localns, include_extras=True
+            )
+        else:  # 3.8
+            hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+        if sys.version_info < (3, 11):
+            _clean_optional(obj, hint, globalns, localns)
+        if sys.version_info < (3, 9):
+            # In 3.8 eval_type does not flatten Optional[ForwardRef] correctly
+            # This will recreate and and cache Unions.
+            hint = {
+                k: (t
+                    if get_origin(t) != Union
+                    else Union[t.__args__])
+                for k, t in hint.items()
+            }
+        if include_extras:
+            return hint
+        return {k: _strip_extras(t) for k, t in hint.items()}
+
+    _NoneType = type(None)
+
+    def _could_be_inserted_optional(t):
+        """detects Union[..., None] pattern"""
+        # 3.8+ compatible checking before _UnionGenericAlias
+        if get_origin(t) is not Union:
+            return False
+        # Assume if last argument is not None they are user defined
+        if t.__args__[-1] is not _NoneType:
+            return False
+        return True
+
+    # < 3.11
+    def _clean_optional(obj, hints, globalns=None, localns=None):
+        # reverts injected Union[..., None] cases from typing.get_type_hints
+        # when a None default value is used.
+        # see https://github.com/python/typing_extensions/issues/310
+        if not hints or isinstance(obj, type):
+            return
+        defaults = typing._get_defaults(obj)  # avoid accessing __annotations___
+        if not defaults:
+            return
+        original_hints = obj.__annotations__
+        for name, value in hints.items():
+            # Not a Union[..., None] or replacement conditions not fullfilled
+            if (not _could_be_inserted_optional(value)
+                or name not in defaults
+                or defaults[name] is not None
+            ):
+                continue
+            original_value = original_hints[name]
+            # value=NoneType should have caused a skip above but check for safety
+            if original_value is None:
+                original_value = _NoneType
+            # Forward reference
+            if isinstance(original_value, str):
+                if globalns is None:
+                    if isinstance(obj, _types.ModuleType):
+                        globalns = obj.__dict__
+                    else:
+                        nsobj = obj
+                        # Find globalns for the unwrapped object.
+                        while hasattr(nsobj, '__wrapped__'):
+                            nsobj = nsobj.__wrapped__
+                        globalns = getattr(nsobj, '__globals__', {})
+                    if localns is None:
+                        localns = globalns
+                elif localns is None:
+                    localns = globalns
+                if sys.version_info < (3, 9):
+                    original_value = ForwardRef(original_value)
+                else:
+                    original_value = ForwardRef(
+                        original_value,
+                        is_argument=not isinstance(obj, _types.ModuleType)
+                    )
+            original_evaluated = typing._eval_type(original_value, globalns, localns)
+            if sys.version_info < (3, 9) and get_origin(original_evaluated) is Union:
+                # Union[str, None, "str"] is not reduced to Union[str, None]
+                original_evaluated = Union[original_evaluated.__args__]
+            # Compare if values differ. Note that even if equal
+            # value might be cached by typing._tp_cache contrary to original_evaluated
+            if original_evaluated != value or (
+                # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias
+                hasattr(_types, "UnionType")
+                and isinstance(original_evaluated, _types.UnionType)
+                and not isinstance(value, _types.UnionType)
+            ):
+                hints[name] = original_evaluated
+
+# Python 3.9+ has PEP 593 (Annotated)
+if hasattr(typing, 'Annotated'):
+    Annotated = typing.Annotated
+    # Not exported and not a public API, but needed for get_origin() and get_args()
+    # to work.
+    _AnnotatedAlias = typing._AnnotatedAlias
+# 3.8
+else:
+    class _AnnotatedAlias(typing._GenericAlias, _root=True):
+        """Runtime representation of an annotated type.
+
+        At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+        with extra annotations. The alias behaves like a normal typing alias,
+        instantiating is the same as instantiating the underlying type, binding
+        it to types is also the same.
+        """
+        def __init__(self, origin, metadata):
+            if isinstance(origin, _AnnotatedAlias):
+                metadata = origin.__metadata__ + metadata
+                origin = origin.__origin__
+            super().__init__(origin, origin)
+            self.__metadata__ = metadata
+
+        def copy_with(self, params):
+            assert len(params) == 1
+            new_type = params[0]
+            return _AnnotatedAlias(new_type, self.__metadata__)
+
+        def __repr__(self):
+            return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
+                    f"{', '.join(repr(a) for a in self.__metadata__)}]")
+
+        def __reduce__(self):
+            return operator.getitem, (
+                Annotated, (self.__origin__, *self.__metadata__)
+            )
+
+        def __eq__(self, other):
+            if not isinstance(other, _AnnotatedAlias):
+                return NotImplemented
+            if self.__origin__ != other.__origin__:
+                return False
+            return self.__metadata__ == other.__metadata__
+
+        def __hash__(self):
+            return hash((self.__origin__, self.__metadata__))
+
+    class Annotated:
+        """Add context specific metadata to a type.
+
+        Example: Annotated[int, runtime_check.Unsigned] indicates to the
+        hypothetical runtime_check module that this type is an unsigned int.
+        Every other consumer of this type can ignore this metadata and treat
+        this type as int.
+
+        The first argument to Annotated must be a valid type (and will be in
+        the __origin__ field), the remaining arguments are kept as a tuple in
+        the __extra__ field.
+
+        Details:
+
+        - It's an error to call `Annotated` with less than two arguments.
+        - Nested Annotated are flattened::
+
+            Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+        - Instantiating an annotated type is equivalent to instantiating the
+        underlying type::
+
+            Annotated[C, Ann1](5) == C(5)
+
+        - Annotated can be used as a generic type alias::
+
+            Optimized = Annotated[T, runtime.Optimize()]
+            Optimized[int] == Annotated[int, runtime.Optimize()]
+
+            OptimizedList = Annotated[List[T], runtime.Optimize()]
+            OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+        """
+
+        __slots__ = ()
+
+        def __new__(cls, *args, **kwargs):
+            raise TypeError("Type Annotated cannot be instantiated.")
+
+        @typing._tp_cache
+        def __class_getitem__(cls, params):
+            if not isinstance(params, tuple) or len(params) < 2:
+                raise TypeError("Annotated[...] should be used "
+                                "with at least two arguments (a type and an "
+                                "annotation).")
+            allowed_special_forms = (ClassVar, Final)
+            if get_origin(params[0]) in allowed_special_forms:
+                origin = params[0]
+            else:
+                msg = "Annotated[t, ...]: t must be a type."
+                origin = typing._type_check(params[0], msg)
+            metadata = tuple(params[1:])
+            return _AnnotatedAlias(origin, metadata)
+
+        def __init_subclass__(cls, *args, **kwargs):
+            raise TypeError(
+                f"Cannot subclass {cls.__module__}.Annotated"
+            )
+
+# Python 3.8 has get_origin() and get_args() but those implementations aren't
+# Annotated-aware, so we can't use those. Python 3.9's versions don't support
+# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
+if sys.version_info[:2] >= (3, 10):
+    get_origin = typing.get_origin
+    get_args = typing.get_args
+# 3.8-3.9
+else:
+    try:
+        # 3.9+
+        from typing import _BaseGenericAlias
+    except ImportError:
+        _BaseGenericAlias = typing._GenericAlias
+    try:
+        # 3.9+
+        from typing import GenericAlias as _typing_GenericAlias
+    except ImportError:
+        _typing_GenericAlias = typing._GenericAlias
+
+    def get_origin(tp):
+        """Get the unsubscripted version of a type.
+
+        This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+        and Annotated. Return None for unsupported types. Examples::
+
+            get_origin(Literal[42]) is Literal
+            get_origin(int) is None
+            get_origin(ClassVar[int]) is ClassVar
+            get_origin(Generic) is Generic
+            get_origin(Generic[T]) is Generic
+            get_origin(Union[T, int]) is Union
+            get_origin(List[Tuple[T, T]][int]) == list
+            get_origin(P.args) is P
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return Annotated
+        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
+                           ParamSpecArgs, ParamSpecKwargs)):
+            return tp.__origin__
+        if tp is typing.Generic:
+            return typing.Generic
+        return None
+
+    def get_args(tp):
+        """Get type arguments with all substitutions performed.
+
+        For unions, basic simplifications used by Union constructor are performed.
+        Examples::
+            get_args(Dict[str, int]) == (str, int)
+            get_args(int) == ()
+            get_args(Union[int, Union[T, int], str][int]) == (int, str)
+            get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+            get_args(Callable[[], T][int]) == ([], int)
+        """
+        if isinstance(tp, _AnnotatedAlias):
+            return (tp.__origin__, *tp.__metadata__)
+        if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
+            if getattr(tp, "_special", False):
+                return ()
+            res = tp.__args__
+            if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+                res = (list(res[:-1]), res[-1])
+            return res
+        return ()
+
+
+# 3.10+
+if hasattr(typing, 'TypeAlias'):
+    TypeAlias = typing.TypeAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeAlias(self, parameters):
+        """Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example above.
+        """
+        raise TypeError(f"{self} is not subscriptable")
+# 3.8
+else:
+    TypeAlias = _ExtensionsSpecialForm(
+        'TypeAlias',
+        doc="""Special marker indicating that an assignment should
+        be recognized as a proper type alias definition by type
+        checkers.
+
+        For example::
+
+            Predicate: TypeAlias = Callable[..., bool]
+
+        It's invalid when used anywhere except as in the example
+        above."""
+    )
+
+
+def _set_default(type_param, default):
+    type_param.has_default = lambda: default is not NoDefault
+    type_param.__default__ = default
+
+
+def _set_module(typevarlike):
+    # for pickling:
+    def_mod = _caller(depth=3)
+    if def_mod != 'typing_extensions':
+        typevarlike.__module__ = def_mod
+
+
+class _DefaultMixin:
+    """Mixin for TypeVarLike defaults."""
+
+    __slots__ = ()
+    __init__ = _set_default
+
+
+# Classes using this metaclass must provide a _backported_typevarlike ClassVar
+class _TypeVarLikeMeta(type):
+    def __instancecheck__(cls, __instance: Any) -> bool:
+        return isinstance(__instance, cls._backported_typevarlike)
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import TypeVar
+else:
+    # Add default and infer_variance parameters from PEP 696 and 695
+    class TypeVar(metaclass=_TypeVarLikeMeta):
+        """Type variable."""
+
+        _backported_typevarlike = typing.TypeVar
+
+        def __new__(cls, name, *constraints, bound=None,
+                    covariant=False, contravariant=False,
+                    default=NoDefault, infer_variance=False):
+            if hasattr(typing, "TypeAliasType"):
+                # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
+                typevar = typing.TypeVar(name, *constraints, bound=bound,
+                                         covariant=covariant, contravariant=contravariant,
+                                         infer_variance=infer_variance)
+            else:
+                typevar = typing.TypeVar(name, *constraints, bound=bound,
+                                         covariant=covariant, contravariant=contravariant)
+                if infer_variance and (covariant or contravariant):
+                    raise ValueError("Variance cannot be specified with infer_variance.")
+                typevar.__infer_variance__ = infer_variance
+
+            _set_default(typevar, default)
+            _set_module(typevar)
+
+            def _tvar_prepare_subst(alias, args):
+                if (
+                    typevar.has_default()
+                    and alias.__parameters__.index(typevar) == len(args)
+                ):
+                    args += (typevar.__default__,)
+                return args
+
+            typevar.__typing_prepare_subst__ = _tvar_prepare_subst
+            return typevar
+
+        def __init_subclass__(cls) -> None:
+            raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+
+
+# Python 3.10+ has PEP 612
+if hasattr(typing, 'ParamSpecArgs'):
+    ParamSpecArgs = typing.ParamSpecArgs
+    ParamSpecKwargs = typing.ParamSpecKwargs
+# 3.8-3.9
+else:
+    class _Immutable:
+        """Mixin to indicate that object should not be copied."""
+        __slots__ = ()
+
+        def __copy__(self):
+            return self
+
+        def __deepcopy__(self, memo):
+            return self
+
+    class ParamSpecArgs(_Immutable):
+        """The args for a ParamSpec object.
+
+        Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
+
+        ParamSpecArgs objects have a reference back to their ParamSpec:
+
+        P.args.__origin__ is P
+
+        This type is meant for runtime introspection and has no special meaning to
+        static type checkers.
+        """
+        def __init__(self, origin):
+            self.__origin__ = origin
+
+        def __repr__(self):
+            return f"{self.__origin__.__name__}.args"
+
+        def __eq__(self, other):
+            if not isinstance(other, ParamSpecArgs):
+                return NotImplemented
+            return self.__origin__ == other.__origin__
+
+    class ParamSpecKwargs(_Immutable):
+        """The kwargs for a ParamSpec object.
+
+        Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
+
+        ParamSpecKwargs objects have a reference back to their ParamSpec:
+
+        P.kwargs.__origin__ is P
+
+        This type is meant for runtime introspection and has no special meaning to
+        static type checkers.
+        """
+        def __init__(self, origin):
+            self.__origin__ = origin
+
+        def __repr__(self):
+            return f"{self.__origin__.__name__}.kwargs"
+
+        def __eq__(self, other):
+            if not isinstance(other, ParamSpecKwargs):
+                return NotImplemented
+            return self.__origin__ == other.__origin__
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import ParamSpec
+
+# 3.10+
+elif hasattr(typing, 'ParamSpec'):
+
+    # Add default parameter - PEP 696
+    class ParamSpec(metaclass=_TypeVarLikeMeta):
+        """Parameter specification."""
+
+        _backported_typevarlike = typing.ParamSpec
+
+        def __new__(cls, name, *, bound=None,
+                    covariant=False, contravariant=False,
+                    infer_variance=False, default=NoDefault):
+            if hasattr(typing, "TypeAliasType"):
+                # PEP 695 implemented, can pass infer_variance to typing.TypeVar
+                paramspec = typing.ParamSpec(name, bound=bound,
+                                             covariant=covariant,
+                                             contravariant=contravariant,
+                                             infer_variance=infer_variance)
+            else:
+                paramspec = typing.ParamSpec(name, bound=bound,
+                                             covariant=covariant,
+                                             contravariant=contravariant)
+                paramspec.__infer_variance__ = infer_variance
+
+            _set_default(paramspec, default)
+            _set_module(paramspec)
+
+            def _paramspec_prepare_subst(alias, args):
+                params = alias.__parameters__
+                i = params.index(paramspec)
+                if i == len(args) and paramspec.has_default():
+                    args = [*args, paramspec.__default__]
+                if i >= len(args):
+                    raise TypeError(f"Too few arguments for {alias}")
+                # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+                if len(params) == 1 and not typing._is_param_expr(args[0]):
+                    assert i == 0
+                    args = (args,)
+                # Convert lists to tuples to help other libraries cache the results.
+                elif isinstance(args[i], list):
+                    args = (*args[:i], tuple(args[i]), *args[i + 1:])
+                return args
+
+            paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
+            return paramspec
+
+        def __init_subclass__(cls) -> None:
+            raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
+
+# 3.8-3.9
+else:
+
+    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+    class ParamSpec(list, _DefaultMixin):
+        """Parameter specification variable.
+
+        Usage::
+
+           P = ParamSpec('P')
+
+        Parameter specification variables exist primarily for the benefit of static
+        type checkers.  They are used to forward the parameter types of one
+        callable to another callable, a pattern commonly found in higher order
+        functions and decorators.  They are only valid when used in ``Concatenate``,
+        or s the first argument to ``Callable``. In Python 3.10 and higher,
+        they are also supported in user-defined Generics at runtime.
+        See class Generic for more information on generic types.  An
+        example for annotating a decorator::
+
+           T = TypeVar('T')
+           P = ParamSpec('P')
+
+           def add_logging(f: Callable[P, T]) -> Callable[P, T]:
+               '''A type-safe decorator to add logging to a function.'''
+               def inner(*args: P.args, **kwargs: P.kwargs) -> T:
+                   logging.info(f'{f.__name__} was called')
+                   return f(*args, **kwargs)
+               return inner
+
+           @add_logging
+           def add_two(x: float, y: float) -> float:
+               '''Add two numbers together.'''
+               return x + y
+
+        Parameter specification variables defined with covariant=True or
+        contravariant=True can be used to declare covariant or contravariant
+        generic types.  These keyword arguments are valid, but their actual semantics
+        are yet to be decided.  See PEP 612 for details.
+
+        Parameter specification variables can be introspected. e.g.:
+
+           P.__name__ == 'T'
+           P.__bound__ == None
+           P.__covariant__ == False
+           P.__contravariant__ == False
+
+        Note that only parameter specification variables defined in global scope can
+        be pickled.
+        """
+
+        # Trick Generic __parameters__.
+        __class__ = typing.TypeVar
+
+        @property
+        def args(self):
+            return ParamSpecArgs(self)
+
+        @property
+        def kwargs(self):
+            return ParamSpecKwargs(self)
+
+        def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
+                     infer_variance=False, default=NoDefault):
+            list.__init__(self, [self])
+            self.__name__ = name
+            self.__covariant__ = bool(covariant)
+            self.__contravariant__ = bool(contravariant)
+            self.__infer_variance__ = bool(infer_variance)
+            if bound:
+                self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
+            else:
+                self.__bound__ = None
+            _DefaultMixin.__init__(self, default)
+
+            # for pickling:
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+        def __repr__(self):
+            if self.__infer_variance__:
+                prefix = ''
+            elif self.__covariant__:
+                prefix = '+'
+            elif self.__contravariant__:
+                prefix = '-'
+            else:
+                prefix = '~'
+            return prefix + self.__name__
+
+        def __hash__(self):
+            return object.__hash__(self)
+
+        def __eq__(self, other):
+            return self is other
+
+        def __reduce__(self):
+            return self.__name__
+
+        # Hack to get typing._type_check to pass.
+        def __call__(self, *args, **kwargs):
+            pass
+
+
+# 3.8-3.9
+if not hasattr(typing, 'Concatenate'):
+    # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+
+    # 3.9.0-1
+    if not hasattr(typing, '_type_convert'):
+        def _type_convert(arg, module=None, *, allow_special_forms=False):
+            """For converting None to type(None), and strings to ForwardRef."""
+            if arg is None:
+                return type(None)
+            if isinstance(arg, str):
+                if sys.version_info <= (3, 9, 6):
+                    return ForwardRef(arg)
+                if sys.version_info <= (3, 9, 7):
+                    return ForwardRef(arg, module=module)
+                return ForwardRef(arg, module=module, is_class=allow_special_forms)
+            return arg
+    else:
+        _type_convert = typing._type_convert
+
+    class _ConcatenateGenericAlias(list):
+
+        # Trick Generic into looking into this for __parameters__.
+        __class__ = typing._GenericAlias
+
+        # Flag in 3.8.
+        _special = False
+
+        def __init__(self, origin, args):
+            super().__init__(args)
+            self.__origin__ = origin
+            self.__args__ = args
+
+        def __repr__(self):
+            _type_repr = typing._type_repr
+            return (f'{_type_repr(self.__origin__)}'
+                    f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
+
+        def __hash__(self):
+            return hash((self.__origin__, self.__args__))
+
+        # Hack to get typing._type_check to pass in Generic.
+        def __call__(self, *args, **kwargs):
+            pass
+
+        @property
+        def __parameters__(self):
+            return tuple(
+                tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
+            )
+
+        # 3.8; needed for typing._subst_tvars
+        # 3.9 used by __getitem__ below
+        def copy_with(self, params):
+            if isinstance(params[-1], _ConcatenateGenericAlias):
+                params = (*params[:-1], *params[-1].__args__)
+            elif isinstance(params[-1], (list, tuple)):
+                return (*params[:-1], *params[-1])
+            elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))):
+                raise TypeError("The last parameter to Concatenate should be a "
+                        "ParamSpec variable or ellipsis.")
+            return self.__class__(self.__origin__, params)
+
+        # 3.9; accessed during GenericAlias.__getitem__ when substituting
+        def __getitem__(self, args):
+            if self.__origin__ in (Generic, Protocol):
+                # Can't subscript Generic[...] or Protocol[...].
+                raise TypeError(f"Cannot subscript already-subscripted {self}")
+            if not self.__parameters__:
+                raise TypeError(f"{self} is not a generic class")
+
+            if not isinstance(args, tuple):
+                args = (args,)
+            args = _unpack_args(*(_type_convert(p) for p in args))
+            params = self.__parameters__
+            for param in params:
+                prepare = getattr(param, "__typing_prepare_subst__", None)
+                if prepare is not None:
+                    args = prepare(self, args)
+                # 3.8 - 3.9 & typing.ParamSpec
+                elif isinstance(param, ParamSpec):
+                    i = params.index(param)
+                    if (
+                        i == len(args)
+                        and getattr(param, '__default__', NoDefault) is not NoDefault
+                    ):
+                        args = [*args, param.__default__]
+                    if i >= len(args):
+                        raise TypeError(f"Too few arguments for {self}")
+                    # Special case for Z[[int, str, bool]] == Z[int, str, bool]
+                    if len(params) == 1 and not _is_param_expr(args[0]):
+                        assert i == 0
+                        args = (args,)
+                    elif (
+                        isinstance(args[i], list)
+                        # 3.8 - 3.9
+                        # This class inherits from list do not convert
+                        and not isinstance(args[i], _ConcatenateGenericAlias)
+                    ):
+                        args = (*args[:i], tuple(args[i]), *args[i + 1:])
+
+            alen = len(args)
+            plen = len(params)
+            if alen != plen:
+                raise TypeError(
+                    f"Too {'many' if alen > plen else 'few'} arguments for {self};"
+                    f" actual {alen}, expected {plen}"
+                )
+
+            subst = dict(zip(self.__parameters__, args))
+            # determine new args
+            new_args = []
+            for arg in self.__args__:
+                if isinstance(arg, type):
+                    new_args.append(arg)
+                    continue
+                if isinstance(arg, TypeVar):
+                    arg = subst[arg]
+                    if (
+                        (isinstance(arg, typing._GenericAlias) and _is_unpack(arg))
+                        or (
+                            hasattr(_types, "GenericAlias")
+                            and isinstance(arg, _types.GenericAlias)
+                            and getattr(arg, "__unpacked__", False)
+                        )
+                    ):
+                        raise TypeError(f"{arg} is not valid as type argument")
+
+                elif isinstance(arg,
+                    typing._GenericAlias
+                    if not hasattr(_types, "GenericAlias") else
+                    (typing._GenericAlias, _types.GenericAlias)
+                ):
+                    subparams = arg.__parameters__
+                    if subparams:
+                        subargs = tuple(subst[x] for x in subparams)
+                        arg = arg[subargs]
+                new_args.append(arg)
+            return self.copy_with(tuple(new_args))
+
+# 3.10+
+else:
+    _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
+
+    # 3.10
+    if sys.version_info < (3, 11):
+
+        class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True):
+            # needed for checks in collections.abc.Callable to accept this class
+            __module__ = "typing"
+
+            def copy_with(self, params):
+                if isinstance(params[-1], (list, tuple)):
+                    return (*params[:-1], *params[-1])
+                if isinstance(params[-1], typing._ConcatenateGenericAlias):
+                    params = (*params[:-1], *params[-1].__args__)
+                elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)):
+                    raise TypeError("The last parameter to Concatenate should be a "
+                            "ParamSpec variable or ellipsis.")
+                return super(typing._ConcatenateGenericAlias, self).copy_with(params)
+
+            def __getitem__(self, args):
+                value = super().__getitem__(args)
+                if isinstance(value, tuple) and any(_is_unpack(t) for t in value):
+                    return tuple(_unpack_args(*(n for n in value)))
+                return value
+
+
+# 3.8-3.9.2
+class _EllipsisDummy: ...
+
+
+# 3.8-3.10
+def _create_concatenate_alias(origin, parameters):
+    if parameters[-1] is ... and sys.version_info < (3, 9, 2):
+        # Hack: Arguments must be types, replace it with one.
+        parameters = (*parameters[:-1], _EllipsisDummy)
+    if sys.version_info >= (3, 10, 3):
+        concatenate = _ConcatenateGenericAlias(origin, parameters,
+                                        _typevar_types=(TypeVar, ParamSpec),
+                                        _paramspec_tvars=True)
+    else:
+        concatenate = _ConcatenateGenericAlias(origin, parameters)
+    if parameters[-1] is not _EllipsisDummy:
+        return concatenate
+    # Remove dummy again
+    concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ...
+                                    for p in concatenate.__args__)
+    if sys.version_info < (3, 10):
+        # backport needs __args__ adjustment only
+        return concatenate
+    concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__
+                                        if p is not _EllipsisDummy)
+    return concatenate
+
+
+# 3.8-3.10
+@typing._tp_cache
+def _concatenate_getitem(self, parameters):
+    if parameters == ():
+        raise TypeError("Cannot take a Concatenate of no types.")
+    if not isinstance(parameters, tuple):
+        parameters = (parameters,)
+    if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
+        raise TypeError("The last parameter to Concatenate should be a "
+                        "ParamSpec variable or ellipsis.")
+    msg = "Concatenate[arg, ...]: each arg must be a type."
+    parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]),
+                    parameters[-1])
+    return _create_concatenate_alias(self, parameters)
+
+
+# 3.11+; Concatenate does not accept ellipsis in 3.10
+if sys.version_info >= (3, 11):
+    Concatenate = typing.Concatenate
+# 3.9-3.10
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def Concatenate(self, parameters):
+        """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+        higher order function which adds, removes or transforms parameters of a
+        callable.
+
+        For example::
+
+           Callable[Concatenate[int, P], int]
+
+        See PEP 612 for detailed information.
+        """
+        return _concatenate_getitem(self, parameters)
+# 3.8
+else:
+    class _ConcatenateForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            return _concatenate_getitem(self, parameters)
+
+    Concatenate = _ConcatenateForm(
+        'Concatenate',
+        doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+        higher order function which adds, removes or transforms parameters of a
+        callable.
+
+        For example::
+
+           Callable[Concatenate[int, P], int]
+
+        See PEP 612 for detailed information.
+        """)
+
+# 3.10+
+if hasattr(typing, 'TypeGuard'):
+    TypeGuard = typing.TypeGuard
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeGuard(self, parameters):
+        """Special typing form used to annotate the return type of a user-defined
+        type guard function.  ``TypeGuard`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeGuard`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the type inside ``TypeGuard``.
+
+        For example::
+
+            def is_str(val: Union[str, float]):
+                # "isinstance" type guard
+                if isinstance(val, str):
+                    # Type of ``val`` is narrowed to ``str``
+                    ...
+                else:
+                    # Else, type of ``val`` is narrowed to ``float``.
+                    ...
+
+        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+        form of ``TypeA`` (it can even be a wider form) and this may lead to
+        type-unsafe results.  The main reason is to allow for things like
+        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+        a subtype of the former, since ``List`` is invariant.  The responsibility of
+        writing type-safe type guards is left to the user.
+
+        ``TypeGuard`` also works with type variables.  For more information, see
+        PEP 647 (User-Defined Type Guards).
+        """
+        item = typing._type_check(parameters, f'{self} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+    class _TypeGuardForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type')
+            return typing._GenericAlias(self, (item,))
+
+    TypeGuard = _TypeGuardForm(
+        'TypeGuard',
+        doc="""Special typing form used to annotate the return type of a user-defined
+        type guard function.  ``TypeGuard`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeGuard[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeGuard`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the type inside ``TypeGuard``.
+
+        For example::
+
+            def is_str(val: Union[str, float]):
+                # "isinstance" type guard
+                if isinstance(val, str):
+                    # Type of ``val`` is narrowed to ``str``
+                    ...
+                else:
+                    # Else, type of ``val`` is narrowed to ``float``.
+                    ...
+
+        Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+        form of ``TypeA`` (it can even be a wider form) and this may lead to
+        type-unsafe results.  The main reason is to allow for things like
+        narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+        a subtype of the former, since ``List`` is invariant.  The responsibility of
+        writing type-safe type guards is left to the user.
+
+        ``TypeGuard`` also works with type variables.  For more information, see
+        PEP 647 (User-Defined Type Guards).
+        """)
+
+# 3.13+
+if hasattr(typing, 'TypeIs'):
+    TypeIs = typing.TypeIs
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    @_ExtensionsSpecialForm
+    def TypeIs(self, parameters):
+        """Special typing form used to annotate the return type of a user-defined
+        type narrower function.  ``TypeIs`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeIs[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeIs`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the intersection of the type inside ``TypeIs`` and the argument's
+        previously known type.
+
+        For example::
+
+            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+                return hasattr(val, '__await__')
+
+            def f(val: Union[int, Awaitable[int]]) -> int:
+                if is_awaitable(val):
+                    assert_type(val, Awaitable[int])
+                else:
+                    assert_type(val, int)
+
+        ``TypeIs`` also works with type variables.  For more information, see
+        PEP 742 (Narrowing types with TypeIs).
+        """
+        item = typing._type_check(parameters, f'{self} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+    class _TypeIsForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type')
+            return typing._GenericAlias(self, (item,))
+
+    TypeIs = _TypeIsForm(
+        'TypeIs',
+        doc="""Special typing form used to annotate the return type of a user-defined
+        type narrower function.  ``TypeIs`` only accepts a single type argument.
+        At runtime, functions marked this way should return a boolean.
+
+        ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+        type checkers to determine a more precise type of an expression within a
+        program's code flow.  Usually type narrowing is done by analyzing
+        conditional code flow and applying the narrowing to a block of code.  The
+        conditional expression here is sometimes referred to as a "type guard".
+
+        Sometimes it would be convenient to use a user-defined boolean function
+        as a type guard.  Such a function should use ``TypeIs[...]`` as its
+        return type to alert static type checkers to this intention.
+
+        Using  ``-> TypeIs`` tells the static type checker that for a given
+        function:
+
+        1. The return value is a boolean.
+        2. If the return value is ``True``, the type of its argument
+        is the intersection of the type inside ``TypeIs`` and the argument's
+        previously known type.
+
+        For example::
+
+            def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+                return hasattr(val, '__await__')
+
+            def f(val: Union[int, Awaitable[int]]) -> int:
+                if is_awaitable(val):
+                    assert_type(val, Awaitable[int])
+                else:
+                    assert_type(val, int)
+
+        ``TypeIs`` also works with type variables.  For more information, see
+        PEP 742 (Narrowing types with TypeIs).
+        """)
+
+# 3.14+?
+if hasattr(typing, 'TypeForm'):
+    TypeForm = typing.TypeForm
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+    class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
+        # TypeForm(X) is equivalent to X but indicates to the type checker
+        # that the object is a TypeForm.
+        def __call__(self, obj, /):
+            return obj
+
+    @_TypeFormForm
+    def TypeForm(self, parameters):
+        """A special form representing the value that results from the evaluation
+        of a type expression. This value encodes the information supplied in the
+        type expression, and it represents the type described by that type expression.
+
+        When used in a type expression, TypeForm describes a set of type form objects.
+        It accepts a single type argument, which must be a valid type expression.
+        ``TypeForm[T]`` describes the set of all type form objects that represent
+        the type T or types that are assignable to T.
+
+        Usage:
+
+            def cast[T](typ: TypeForm[T], value: Any) -> T: ...
+
+            reveal_type(cast(int, "x"))  # int
+
+        See PEP 747 for more information.
+        """
+        item = typing._type_check(parameters, f'{self} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+    class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type')
+            return typing._GenericAlias(self, (item,))
+
+        def __call__(self, obj, /):
+            return obj
+
+    TypeForm = _TypeFormForm(
+        'TypeForm',
+        doc="""A special form representing the value that results from the evaluation
+        of a type expression. This value encodes the information supplied in the
+        type expression, and it represents the type described by that type expression.
+
+        When used in a type expression, TypeForm describes a set of type form objects.
+        It accepts a single type argument, which must be a valid type expression.
+        ``TypeForm[T]`` describes the set of all type form objects that represent
+        the type T or types that are assignable to T.
+
+        Usage:
+
+            def cast[T](typ: TypeForm[T], value: Any) -> T: ...
+
+            reveal_type(cast(int, "x"))  # int
+
+        See PEP 747 for more information.
+        """)
+
+
+# Vendored from cpython typing._SpecialFrom
+class _SpecialForm(typing._Final, _root=True):
+    __slots__ = ('_name', '__doc__', '_getitem')
+
+    def __init__(self, getitem):
+        self._getitem = getitem
+        self._name = getitem.__name__
+        self.__doc__ = getitem.__doc__
+
+    def __getattr__(self, item):
+        if item in {'__name__', '__qualname__'}:
+            return self._name
+
+        raise AttributeError(item)
+
+    def __mro_entries__(self, bases):
+        raise TypeError(f"Cannot subclass {self!r}")
+
+    def __repr__(self):
+        return f'typing_extensions.{self._name}'
+
+    def __reduce__(self):
+        return self._name
+
+    def __call__(self, *args, **kwds):
+        raise TypeError(f"Cannot instantiate {self!r}")
+
+    def __or__(self, other):
+        return typing.Union[self, other]
+
+    def __ror__(self, other):
+        return typing.Union[other, self]
+
+    def __instancecheck__(self, obj):
+        raise TypeError(f"{self} cannot be used with isinstance()")
+
+    def __subclasscheck__(self, cls):
+        raise TypeError(f"{self} cannot be used with issubclass()")
+
+    @typing._tp_cache
+    def __getitem__(self, parameters):
+        return self._getitem(self, parameters)
+
+
+if hasattr(typing, "LiteralString"):  # 3.11+
+    LiteralString = typing.LiteralString
+else:
+    @_SpecialForm
+    def LiteralString(self, params):
+        """Represents an arbitrary literal string.
+
+        Example::
+
+          from typing_extensions import LiteralString
+
+          def query(sql: LiteralString) -> ...:
+              ...
+
+          query("SELECT * FROM table")  # ok
+          query(f"SELECT * FROM {input()}")  # not ok
+
+        See PEP 675 for details.
+
+        """
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Self"):  # 3.11+
+    Self = typing.Self
+else:
+    @_SpecialForm
+    def Self(self, params):
+        """Used to spell the type of "self" in classes.
+
+        Example::
+
+          from typing import Self
+
+          class ReturnsSelf:
+              def parse(self, data: bytes) -> Self:
+                  ...
+                  return self
+
+        """
+
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Never"):  # 3.11+
+    Never = typing.Never
+else:
+    @_SpecialForm
+    def Never(self, params):
+        """The bottom type, a type that has no members.
+
+        This can be used to define a function that should never be
+        called, or a function that never returns::
+
+            from typing_extensions import Never
+
+            def never_call_me(arg: Never) -> None:
+                pass
+
+            def int_or_str(arg: int | str) -> None:
+                never_call_me(arg)  # type checker error
+                match arg:
+                    case int():
+                        print("It's an int")
+                    case str():
+                        print("It's a str")
+                    case _:
+                        never_call_me(arg)  # ok, arg is of type Never
+
+        """
+
+        raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, 'Required'):  # 3.11+
+    Required = typing.Required
+    NotRequired = typing.NotRequired
+elif sys.version_info[:2] >= (3, 9):  # 3.9-3.10
+    @_ExtensionsSpecialForm
+    def Required(self, parameters):
+        """A special typing construct to mark a key of a total=False TypedDict
+        as required. For example:
+
+            class Movie(TypedDict, total=False):
+                title: Required[str]
+                year: int
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+
+        There is no runtime checking that a required key is actually provided
+        when instantiating a related TypedDict.
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+    @_ExtensionsSpecialForm
+    def NotRequired(self, parameters):
+        """A special typing construct to mark a key of a TypedDict as
+        potentially missing. For example:
+
+            class Movie(TypedDict):
+                title: str
+                year: NotRequired[int]
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+else:  # 3.8
+    class _RequiredForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return typing._GenericAlias(self, (item,))
+
+    Required = _RequiredForm(
+        'Required',
+        doc="""A special typing construct to mark a key of a total=False TypedDict
+        as required. For example:
+
+            class Movie(TypedDict, total=False):
+                title: Required[str]
+                year: int
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+
+        There is no runtime checking that a required key is actually provided
+        when instantiating a related TypedDict.
+        """)
+    NotRequired = _RequiredForm(
+        'NotRequired',
+        doc="""A special typing construct to mark a key of a TypedDict as
+        potentially missing. For example:
+
+            class Movie(TypedDict):
+                title: str
+                year: NotRequired[int]
+
+            m = Movie(
+                title='The Matrix',  # typechecker error if key is omitted
+                year=1999,
+            )
+        """)
+
+
+if hasattr(typing, 'ReadOnly'):
+    ReadOnly = typing.ReadOnly
+elif sys.version_info[:2] >= (3, 9):  # 3.9-3.12
+    @_ExtensionsSpecialForm
+    def ReadOnly(self, parameters):
+        """A special typing construct to mark an item of a TypedDict as read-only.
+
+        For example:
+
+            class Movie(TypedDict):
+                title: ReadOnly[str]
+                year: int
+
+            def mutate_movie(m: Movie) -> None:
+                m["year"] = 1992  # allowed
+                m["title"] = "The Matrix"  # typechecker error
+
+        There is no runtime checking for this property.
+        """
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return typing._GenericAlias(self, (item,))
+
+else:  # 3.8
+    class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return typing._GenericAlias(self, (item,))
+
+    ReadOnly = _ReadOnlyForm(
+        'ReadOnly',
+        doc="""A special typing construct to mark a key of a TypedDict as read-only.
+
+        For example:
+
+            class Movie(TypedDict):
+                title: ReadOnly[str]
+                year: int
+
+            def mutate_movie(m: Movie) -> None:
+                m["year"] = 1992  # allowed
+                m["title"] = "The Matrix"  # typechecker error
+
+        There is no runtime checking for this propery.
+        """)
+
+
+_UNPACK_DOC = """\
+Type unpack operator.
+
+The type unpack operator takes the child types from some container type,
+such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
+example:
+
+  # For some generic class `Foo`:
+  Foo[Unpack[tuple[int, str]]]  # Equivalent to Foo[int, str]
+
+  Ts = TypeVarTuple('Ts')
+  # Specifies that `Bar` is generic in an arbitrary number of types.
+  # (Think of `Ts` as a tuple of an arbitrary number of individual
+  #  `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
+  #  `Generic[]`.)
+  class Bar(Generic[Unpack[Ts]]): ...
+  Bar[int]  # Valid
+  Bar[int, str]  # Also valid
+
+From Python 3.11, this can also be done using the `*` operator:
+
+    Foo[*tuple[int, str]]
+    class Bar(Generic[*Ts]): ...
+
+The operator can also be used along with a `TypedDict` to annotate
+`**kwargs` in a function signature. For instance:
+
+  class Movie(TypedDict):
+    name: str
+    year: int
+
+  # This function expects two keyword arguments - *name* of type `str` and
+  # *year* of type `int`.
+  def foo(**kwargs: Unpack[Movie]): ...
+
+Note that there is only some runtime checking of this operator. Not
+everything the runtime allows may be accepted by static type checkers.
+
+For more information, see PEP 646 and PEP 692.
+"""
+
+
+if sys.version_info >= (3, 12):  # PEP 692 changed the repr of Unpack[]
+    Unpack = typing.Unpack
+
+    def _is_unpack(obj):
+        return get_origin(obj) is Unpack
+
+elif sys.version_info[:2] >= (3, 9):  # 3.9+
+    class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
+        def __init__(self, getitem):
+            super().__init__(getitem)
+            self.__doc__ = _UNPACK_DOC
+
+    class _UnpackAlias(typing._GenericAlias, _root=True):
+        if sys.version_info < (3, 11):
+            # needed for compatibility with Generic[Unpack[Ts]]
+            __class__ = typing.TypeVar
+
+        @property
+        def __typing_unpacked_tuple_args__(self):
+            assert self.__origin__ is Unpack
+            assert len(self.__args__) == 1
+            arg, = self.__args__
+            if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
+                if arg.__origin__ is not tuple:
+                    raise TypeError("Unpack[...] must be used with a tuple type")
+                return arg.__args__
+            return None
+
+        @property
+        def __typing_is_unpacked_typevartuple__(self):
+            assert self.__origin__ is Unpack
+            assert len(self.__args__) == 1
+            return isinstance(self.__args__[0], TypeVarTuple)
+
+        def __getitem__(self, args):
+            if self.__typing_is_unpacked_typevartuple__:
+                return args
+            return super().__getitem__(args)
+
+    @_UnpackSpecialForm
+    def Unpack(self, parameters):
+        item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+        return _UnpackAlias(self, (item,))
+
+    def _is_unpack(obj):
+        return isinstance(obj, _UnpackAlias)
+
+else:  # 3.8
+    class _UnpackAlias(typing._GenericAlias, _root=True):
+        __class__ = typing.TypeVar
+
+        @property
+        def __typing_unpacked_tuple_args__(self):
+            assert self.__origin__ is Unpack
+            assert len(self.__args__) == 1
+            arg, = self.__args__
+            if isinstance(arg, typing._GenericAlias):
+                if arg.__origin__ is not tuple:
+                    raise TypeError("Unpack[...] must be used with a tuple type")
+                return arg.__args__
+            return None
+
+        @property
+        def __typing_is_unpacked_typevartuple__(self):
+            assert self.__origin__ is Unpack
+            assert len(self.__args__) == 1
+            return isinstance(self.__args__[0], TypeVarTuple)
+
+        def __getitem__(self, args):
+            if self.__typing_is_unpacked_typevartuple__:
+                return args
+            return super().__getitem__(args)
+
+    class _UnpackForm(_ExtensionsSpecialForm, _root=True):
+        def __getitem__(self, parameters):
+            item = typing._type_check(parameters,
+                                      f'{self._name} accepts only a single type.')
+            return _UnpackAlias(self, (item,))
+
+    Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC)
+
+    def _is_unpack(obj):
+        return isinstance(obj, _UnpackAlias)
+
+
+def _unpack_args(*args):
+    newargs = []
+    for arg in args:
+        subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+        if subargs is not None and (not (subargs and subargs[-1] is ...)):
+            newargs.extend(subargs)
+        else:
+            newargs.append(arg)
+    return newargs
+
+
+if _PEP_696_IMPLEMENTED:
+    from typing import TypeVarTuple
+
+elif hasattr(typing, "TypeVarTuple"):  # 3.11+
+
+    # Add default parameter - PEP 696
+    class TypeVarTuple(metaclass=_TypeVarLikeMeta):
+        """Type variable tuple."""
+
+        _backported_typevarlike = typing.TypeVarTuple
+
+        def __new__(cls, name, *, default=NoDefault):
+            tvt = typing.TypeVarTuple(name)
+            _set_default(tvt, default)
+            _set_module(tvt)
+
+            def _typevartuple_prepare_subst(alias, args):
+                params = alias.__parameters__
+                typevartuple_index = params.index(tvt)
+                for param in params[typevartuple_index + 1:]:
+                    if isinstance(param, TypeVarTuple):
+                        raise TypeError(
+                            f"More than one TypeVarTuple parameter in {alias}"
+                        )
+
+                alen = len(args)
+                plen = len(params)
+                left = typevartuple_index
+                right = plen - typevartuple_index - 1
+                var_tuple_index = None
+                fillarg = None
+                for k, arg in enumerate(args):
+                    if not isinstance(arg, type):
+                        subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+                        if subargs and len(subargs) == 2 and subargs[-1] is ...:
+                            if var_tuple_index is not None:
+                                raise TypeError(
+                                    "More than one unpacked "
+                                    "arbitrary-length tuple argument"
+                                )
+                            var_tuple_index = k
+                            fillarg = subargs[0]
+                if var_tuple_index is not None:
+                    left = min(left, var_tuple_index)
+                    right = min(right, alen - var_tuple_index - 1)
+                elif left + right > alen:
+                    raise TypeError(f"Too few arguments for {alias};"
+                                    f" actual {alen}, expected at least {plen - 1}")
+                if left == alen - right and tvt.has_default():
+                    replacement = _unpack_args(tvt.__default__)
+                else:
+                    replacement = args[left: alen - right]
+
+                return (
+                    *args[:left],
+                    *([fillarg] * (typevartuple_index - left)),
+                    replacement,
+                    *([fillarg] * (plen - right - left - typevartuple_index - 1)),
+                    *args[alen - right:],
+                )
+
+            tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
+            return tvt
+
+        def __init_subclass__(self, *args, **kwds):
+            raise TypeError("Cannot subclass special typing classes")
+
+else:  # <=3.10
+    class TypeVarTuple(_DefaultMixin):
+        """Type variable tuple.
+
+        Usage::
+
+            Ts = TypeVarTuple('Ts')
+
+        In the same way that a normal type variable is a stand-in for a single
+        type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
+        type such as ``Tuple[int, str]``.
+
+        Type variable tuples can be used in ``Generic`` declarations.
+        Consider the following example::
+
+            class Array(Generic[*Ts]): ...
+
+        The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
+        where ``T1`` and ``T2`` are type variables. To use these type variables
+        as type parameters of ``Array``, we must *unpack* the type variable tuple using
+        the star operator: ``*Ts``. The signature of ``Array`` then behaves
+        as if we had simply written ``class Array(Generic[T1, T2]): ...``.
+        In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
+        us to parameterise the class with an *arbitrary* number of type parameters.
+
+        Type variable tuples can be used anywhere a normal ``TypeVar`` can.
+        This includes class definitions, as shown above, as well as function
+        signatures and variable annotations::
+
+            class Array(Generic[*Ts]):
+
+                def __init__(self, shape: Tuple[*Ts]):
+                    self._shape: Tuple[*Ts] = shape
+
+                def get_shape(self) -> Tuple[*Ts]:
+                    return self._shape
+
+            shape = (Height(480), Width(640))
+            x: Array[Height, Width] = Array(shape)
+            y = abs(x)  # Inferred type is Array[Height, Width]
+            z = x + x   #        ...    is Array[Height, Width]
+            x.get_shape()  #     ...    is tuple[Height, Width]
+
+        """
+
+        # Trick Generic __parameters__.
+        __class__ = typing.TypeVar
+
+        def __iter__(self):
+            yield self.__unpacked__
+
+        def __init__(self, name, *, default=NoDefault):
+            self.__name__ = name
+            _DefaultMixin.__init__(self, default)
+
+            # for pickling:
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+            self.__unpacked__ = Unpack[self]
+
+        def __repr__(self):
+            return self.__name__
+
+        def __hash__(self):
+            return object.__hash__(self)
+
+        def __eq__(self, other):
+            return self is other
+
+        def __reduce__(self):
+            return self.__name__
+
+        def __init_subclass__(self, *args, **kwds):
+            if '_root' not in kwds:
+                raise TypeError("Cannot subclass special typing classes")
+
+
+if hasattr(typing, "reveal_type"):  # 3.11+
+    reveal_type = typing.reveal_type
+else:  # <=3.10
+    def reveal_type(obj: T, /) -> T:
+        """Reveal the inferred type of a variable.
+
+        When a static type checker encounters a call to ``reveal_type()``,
+        it will emit the inferred type of the argument::
+
+            x: int = 1
+            reveal_type(x)
+
+        Running a static type checker (e.g., ``mypy``) on this example
+        will produce output similar to 'Revealed type is "builtins.int"'.
+
+        At runtime, the function prints the runtime type of the
+        argument and returns it unchanged.
+
+        """
+        print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
+        return obj
+
+
+if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"):  # 3.11+
+    _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
+else:  # <=3.10
+    _ASSERT_NEVER_REPR_MAX_LENGTH = 100
+
+
+if hasattr(typing, "assert_never"):  # 3.11+
+    assert_never = typing.assert_never
+else:  # <=3.10
+    def assert_never(arg: Never, /) -> Never:
+        """Assert to the type checker that a line of code is unreachable.
+
+        Example::
+
+            def int_or_str(arg: int | str) -> None:
+                match arg:
+                    case int():
+                        print("It's an int")
+                    case str():
+                        print("It's a str")
+                    case _:
+                        assert_never(arg)
+
+        If a type checker finds that a call to assert_never() is
+        reachable, it will emit an error.
+
+        At runtime, this throws an exception when called.
+
+        """
+        value = repr(arg)
+        if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+            value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+        raise AssertionError(f"Expected code to be unreachable, but got: {value}")
+
+
+if sys.version_info >= (3, 12):  # 3.12+
+    # dataclass_transform exists in 3.11 but lacks the frozen_default parameter
+    dataclass_transform = typing.dataclass_transform
+else:  # <=3.11
+    def dataclass_transform(
+        *,
+        eq_default: bool = True,
+        order_default: bool = False,
+        kw_only_default: bool = False,
+        frozen_default: bool = False,
+        field_specifiers: typing.Tuple[
+            typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
+            ...
+        ] = (),
+        **kwargs: typing.Any,
+    ) -> typing.Callable[[T], T]:
+        """Decorator that marks a function, class, or metaclass as providing
+        dataclass-like behavior.
+
+        Example:
+
+            from typing_extensions import dataclass_transform
+
+            _T = TypeVar("_T")
+
+            # Used on a decorator function
+            @dataclass_transform()
+            def create_model(cls: type[_T]) -> type[_T]:
+                ...
+                return cls
+
+            @create_model
+            class CustomerModel:
+                id: int
+                name: str
+
+            # Used on a base class
+            @dataclass_transform()
+            class ModelBase: ...
+
+            class CustomerModel(ModelBase):
+                id: int
+                name: str
+
+            # Used on a metaclass
+            @dataclass_transform()
+            class ModelMeta(type): ...
+
+            class ModelBase(metaclass=ModelMeta): ...
+
+            class CustomerModel(ModelBase):
+                id: int
+                name: str
+
+        Each of the ``CustomerModel`` classes defined in this example will now
+        behave similarly to a dataclass created with the ``@dataclasses.dataclass``
+        decorator. For example, the type checker will synthesize an ``__init__``
+        method.
+
+        The arguments to this decorator can be used to customize this behavior:
+        - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
+          True or False if it is omitted by the caller.
+        - ``order_default`` indicates whether the ``order`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``frozen_default`` indicates whether the ``frozen`` parameter is
+          assumed to be True or False if it is omitted by the caller.
+        - ``field_specifiers`` specifies a static list of supported classes
+          or functions that describe fields, similar to ``dataclasses.field()``.
+
+        At runtime, this decorator records its arguments in the
+        ``__dataclass_transform__`` attribute on the decorated object.
+
+        See PEP 681 for details.
+
+        """
+        def decorator(cls_or_fn):
+            cls_or_fn.__dataclass_transform__ = {
+                "eq_default": eq_default,
+                "order_default": order_default,
+                "kw_only_default": kw_only_default,
+                "frozen_default": frozen_default,
+                "field_specifiers": field_specifiers,
+                "kwargs": kwargs,
+            }
+            return cls_or_fn
+        return decorator
+
+
+if hasattr(typing, "override"):  # 3.12+
+    override = typing.override
+else:  # <=3.11
+    _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
+
+    def override(arg: _F, /) -> _F:
+        """Indicate that a method is intended to override a method in a base class.
+
+        Usage:
+
+            class Base:
+                def method(self) -> None:
+                    pass
+
+            class Child(Base):
+                @override
+                def method(self) -> None:
+                    super().method()
+
+        When this decorator is applied to a method, the type checker will
+        validate that it overrides a method with the same name on a base class.
+        This helps prevent bugs that may occur when a base class is changed
+        without an equivalent change to a child class.
+
+        There is no runtime checking of these properties. The decorator
+        sets the ``__override__`` attribute to ``True`` on the decorated object
+        to allow runtime introspection.
+
+        See PEP 698 for details.
+
+        """
+        try:
+            arg.__override__ = True
+        except (AttributeError, TypeError):
+            # Skip the attribute silently if it is not writable.
+            # AttributeError happens if the object has __slots__ or a
+            # read-only property, TypeError if it's a builtin class.
+            pass
+        return arg
+
+
+# Python 3.13.3+ contains a fix for the wrapped __new__
+if sys.version_info >= (3, 13, 3):
+    deprecated = warnings.deprecated
+else:
+    _T = typing.TypeVar("_T")
+
+    class deprecated:
+        """Indicate that a class, function or overload is deprecated.
+
+        When this decorator is applied to an object, the type checker
+        will generate a diagnostic on usage of the deprecated object.
+
+        Usage:
+
+            @deprecated("Use B instead")
+            class A:
+                pass
+
+            @deprecated("Use g instead")
+            def f():
+                pass
+
+            @overload
+            @deprecated("int support is deprecated")
+            def g(x: int) -> int: ...
+            @overload
+            def g(x: str) -> int: ...
+
+        The warning specified by *category* will be emitted at runtime
+        on use of deprecated objects. For functions, that happens on calls;
+        for classes, on instantiation and on creation of subclasses.
+        If the *category* is ``None``, no warning is emitted at runtime.
+        The *stacklevel* determines where the
+        warning is emitted. If it is ``1`` (the default), the warning
+        is emitted at the direct caller of the deprecated object; if it
+        is higher, it is emitted further up the stack.
+        Static type checker behavior is not affected by the *category*
+        and *stacklevel* arguments.
+
+        The deprecation message passed to the decorator is saved in the
+        ``__deprecated__`` attribute on the decorated object.
+        If applied to an overload, the decorator
+        must be after the ``@overload`` decorator for the attribute to
+        exist on the overload as returned by ``get_overloads()``.
+
+        See PEP 702 for details.
+
+        """
+        def __init__(
+            self,
+            message: str,
+            /,
+            *,
+            category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+            stacklevel: int = 1,
+        ) -> None:
+            if not isinstance(message, str):
+                raise TypeError(
+                    "Expected an object of type str for 'message', not "
+                    f"{type(message).__name__!r}"
+                )
+            self.message = message
+            self.category = category
+            self.stacklevel = stacklevel
+
+        def __call__(self, arg: _T, /) -> _T:
+            # Make sure the inner functions created below don't
+            # retain a reference to self.
+            msg = self.message
+            category = self.category
+            stacklevel = self.stacklevel
+            if category is None:
+                arg.__deprecated__ = msg
+                return arg
+            elif isinstance(arg, type):
+                import functools
+                from types import MethodType
+
+                original_new = arg.__new__
+
+                @functools.wraps(original_new)
+                def __new__(cls, /, *args, **kwargs):
+                    if cls is arg:
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                    if original_new is not object.__new__:
+                        return original_new(cls, *args, **kwargs)
+                    # Mirrors a similar check in object.__new__.
+                    elif cls.__init__ is object.__init__ and (args or kwargs):
+                        raise TypeError(f"{cls.__name__}() takes no arguments")
+                    else:
+                        return original_new(cls)
+
+                arg.__new__ = staticmethod(__new__)
+
+                original_init_subclass = arg.__init_subclass__
+                # We need slightly different behavior if __init_subclass__
+                # is a bound method (likely if it was implemented in Python)
+                if isinstance(original_init_subclass, MethodType):
+                    original_init_subclass = original_init_subclass.__func__
+
+                    @functools.wraps(original_init_subclass)
+                    def __init_subclass__(*args, **kwargs):
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                        return original_init_subclass(*args, **kwargs)
+
+                    arg.__init_subclass__ = classmethod(__init_subclass__)
+                # Or otherwise, which likely means it's a builtin such as
+                # object's implementation of __init_subclass__.
+                else:
+                    @functools.wraps(original_init_subclass)
+                    def __init_subclass__(*args, **kwargs):
+                        warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                        return original_init_subclass(*args, **kwargs)
+
+                    arg.__init_subclass__ = __init_subclass__
+
+                arg.__deprecated__ = __new__.__deprecated__ = msg
+                __init_subclass__.__deprecated__ = msg
+                return arg
+            elif callable(arg):
+                import asyncio.coroutines
+                import functools
+                import inspect
+
+                @functools.wraps(arg)
+                def wrapper(*args, **kwargs):
+                    warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+                    return arg(*args, **kwargs)
+
+                if asyncio.coroutines.iscoroutinefunction(arg):
+                    if sys.version_info >= (3, 12):
+                        wrapper = inspect.markcoroutinefunction(wrapper)
+                    else:
+                        wrapper._is_coroutine = asyncio.coroutines._is_coroutine
+
+                arg.__deprecated__ = wrapper.__deprecated__ = msg
+                return wrapper
+            else:
+                raise TypeError(
+                    "@deprecated decorator with non-None category must be applied to "
+                    f"a class or callable, not {arg!r}"
+                )
+
+if sys.version_info < (3, 10):
+    def _is_param_expr(arg):
+        return arg is ... or isinstance(
+            arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias)
+        )
+else:
+    def _is_param_expr(arg):
+        return arg is ... or isinstance(
+            arg,
+            (
+                tuple,
+                list,
+                ParamSpec,
+                _ConcatenateGenericAlias,
+                typing._ConcatenateGenericAlias,
+            ),
+        )
+
+
+# We have to do some monkey patching to deal with the dual nature of
+# Unpack/TypeVarTuple:
+# - We want Unpack to be a kind of TypeVar so it gets accepted in
+#   Generic[Unpack[Ts]]
+# - We want it to *not* be treated as a TypeVar for the purposes of
+#   counting generic parameters, so that when we subscript a generic,
+#   the runtime doesn't try to substitute the Unpack with the subscripted type.
+if not hasattr(typing, "TypeVarTuple"):
+    def _check_generic(cls, parameters, elen=_marker):
+        """Check correct count for parameters of a generic cls (internal helper).
+
+        This gives a nice error message in case of count mismatch.
+        """
+        # If substituting a single ParamSpec with multiple arguments
+        # we do not check the count
+        if (inspect.isclass(cls) and issubclass(cls, typing.Generic)
+            and len(cls.__parameters__) == 1
+            and isinstance(cls.__parameters__[0], ParamSpec)
+            and parameters
+            and not _is_param_expr(parameters[0])
+        ):
+            # Generic modifies parameters variable, but here we cannot do this
+            return
+
+        if not elen:
+            raise TypeError(f"{cls} is not a generic class")
+        if elen is _marker:
+            if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+                raise TypeError(f"{cls} is not a generic class")
+            elen = len(cls.__parameters__)
+        alen = len(parameters)
+        if alen != elen:
+            expect_val = elen
+            if hasattr(cls, "__parameters__"):
+                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+                num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+                if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+                    return
+
+                # deal with TypeVarLike defaults
+                # required TypeVarLikes cannot appear after a defaulted one.
+                if alen < elen:
+                    # since we validate TypeVarLike default in _collect_type_vars
+                    # or _collect_parameters we can safely check parameters[alen]
+                    if (
+                        getattr(parameters[alen], '__default__', NoDefault)
+                        is not NoDefault
+                    ):
+                        return
+
+                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
+                                         is not NoDefault for p in parameters)
+
+                    elen -= num_default_tv
+
+                    expect_val = f"at least {elen}"
+
+            things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+            raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
+                            f" for {cls}; actual {alen}, expected {expect_val}")
+else:
+    # Python 3.11+
+
+    def _check_generic(cls, parameters, elen):
+        """Check correct count for parameters of a generic cls (internal helper).
+
+        This gives a nice error message in case of count mismatch.
+        """
+        if not elen:
+            raise TypeError(f"{cls} is not a generic class")
+        alen = len(parameters)
+        if alen != elen:
+            expect_val = elen
+            if hasattr(cls, "__parameters__"):
+                parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+
+                # deal with TypeVarLike defaults
+                # required TypeVarLikes cannot appear after a defaulted one.
+                if alen < elen:
+                    # since we validate TypeVarLike default in _collect_type_vars
+                    # or _collect_parameters we can safely check parameters[alen]
+                    if (
+                        getattr(parameters[alen], '__default__', NoDefault)
+                        is not NoDefault
+                    ):
+                        return
+
+                    num_default_tv = sum(getattr(p, '__default__', NoDefault)
+                                         is not NoDefault for p in parameters)
+
+                    elen -= num_default_tv
+
+                    expect_val = f"at least {elen}"
+
+            raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
+                            f" for {cls}; actual {alen}, expected {expect_val}")
+
+if not _PEP_696_IMPLEMENTED:
+    typing._check_generic = _check_generic
+
+
+def _has_generic_or_protocol_as_origin() -> bool:
+    try:
+        frame = sys._getframe(2)
+    # - Catch AttributeError: not all Python implementations have sys._getframe()
+    # - Catch ValueError: maybe we're called from an unexpected module
+    #   and the call stack isn't deep enough
+    except (AttributeError, ValueError):
+        return False  # err on the side of leniency
+    else:
+        # If we somehow get invoked from outside typing.py,
+        # also err on the side of leniency
+        if frame.f_globals.get("__name__") != "typing":
+            return False
+        origin = frame.f_locals.get("origin")
+        # Cannot use "in" because origin may be an object with a buggy __eq__ that
+        # throws an error.
+        return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
+
+
+_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
+
+
+def _is_unpacked_typevartuple(x) -> bool:
+    if get_origin(x) is not Unpack:
+        return False
+    args = get_args(x)
+    return (
+        bool(args)
+        and len(args) == 1
+        and type(args[0]) in _TYPEVARTUPLE_TYPES
+    )
+
+
+# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
+if hasattr(typing, '_collect_type_vars'):
+    def _collect_type_vars(types, typevar_types=None):
+        """Collect all type variable contained in types in order of
+        first appearance (lexicographic order). For example::
+
+            _collect_type_vars((T, List[S, T])) == (T, S)
+        """
+        if typevar_types is None:
+            typevar_types = typing.TypeVar
+        tvars = []
+
+        # A required TypeVarLike cannot appear after a TypeVarLike with a default
+        # if it was a direct call to `Generic[]` or `Protocol[]`
+        enforce_default_ordering = _has_generic_or_protocol_as_origin()
+        default_encountered = False
+
+        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+        type_var_tuple_encountered = False
+
+        for t in types:
+            if _is_unpacked_typevartuple(t):
+                type_var_tuple_encountered = True
+            elif (
+                isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias)
+                and t not in tvars
+            ):
+                if enforce_default_ordering:
+                    has_default = getattr(t, '__default__', NoDefault) is not NoDefault
+                    if has_default:
+                        if type_var_tuple_encountered:
+                            raise TypeError('Type parameter with a default'
+                                            ' follows TypeVarTuple')
+                        default_encountered = True
+                    elif default_encountered:
+                        raise TypeError(f'Type parameter {t!r} without a default'
+                                        ' follows type parameter with a default')
+
+                tvars.append(t)
+            if _should_collect_from_parameters(t):
+                tvars.extend([t for t in t.__parameters__ if t not in tvars])
+            elif isinstance(t, tuple):
+                # Collect nested type_vars
+                # tuple wrapped by  _prepare_paramspec_params(cls, params)
+                for x in t:
+                    for collected in _collect_type_vars([x]):
+                        if collected not in tvars:
+                            tvars.append(collected)
+        return tuple(tvars)
+
+    typing._collect_type_vars = _collect_type_vars
+else:
+    def _collect_parameters(args):
+        """Collect all type variables and parameter specifications in args
+        in order of first appearance (lexicographic order).
+
+        For example::
+
+            assert _collect_parameters((T, Callable[P, T])) == (T, P)
+        """
+        parameters = []
+
+        # A required TypeVarLike cannot appear after a TypeVarLike with default
+        # if it was a direct call to `Generic[]` or `Protocol[]`
+        enforce_default_ordering = _has_generic_or_protocol_as_origin()
+        default_encountered = False
+
+        # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+        type_var_tuple_encountered = False
+
+        for t in args:
+            if isinstance(t, type):
+                # We don't want __parameters__ descriptor of a bare Python class.
+                pass
+            elif isinstance(t, tuple):
+                # `t` might be a tuple, when `ParamSpec` is substituted with
+                # `[T, int]`, or `[int, *Ts]`, etc.
+                for x in t:
+                    for collected in _collect_parameters([x]):
+                        if collected not in parameters:
+                            parameters.append(collected)
+            elif hasattr(t, '__typing_subst__'):
+                if t not in parameters:
+                    if enforce_default_ordering:
+                        has_default = (
+                            getattr(t, '__default__', NoDefault) is not NoDefault
+                        )
+
+                        if type_var_tuple_encountered and has_default:
+                            raise TypeError('Type parameter with a default'
+                                            ' follows TypeVarTuple')
+
+                        if has_default:
+                            default_encountered = True
+                        elif default_encountered:
+                            raise TypeError(f'Type parameter {t!r} without a default'
+                                            ' follows type parameter with a default')
+
+                    parameters.append(t)
+            else:
+                if _is_unpacked_typevartuple(t):
+                    type_var_tuple_encountered = True
+                for x in getattr(t, '__parameters__', ()):
+                    if x not in parameters:
+                        parameters.append(x)
+
+        return tuple(parameters)
+
+    if not _PEP_696_IMPLEMENTED:
+        typing._collect_parameters = _collect_parameters
+
+# Backport typing.NamedTuple as it exists in Python 3.13.
+# In 3.11, the ability to define generic `NamedTuple`s was supported.
+# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
+# On 3.12, we added __orig_bases__ to call-based NamedTuples
+# On 3.13, we deprecated kwargs-based NamedTuples
+if sys.version_info >= (3, 13):
+    NamedTuple = typing.NamedTuple
+else:
+    def _make_nmtuple(name, types, module, defaults=()):
+        fields = [n for n, t in types]
+        annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
+                       for n, t in types}
+        nm_tpl = collections.namedtuple(name, fields,
+                                        defaults=defaults, module=module)
+        nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
+        # The `_field_types` attribute was removed in 3.9;
+        # in earlier versions, it is the same as the `__annotations__` attribute
+        if sys.version_info < (3, 9):
+            nm_tpl._field_types = annotations
+        return nm_tpl
+
+    _prohibited_namedtuple_fields = typing._prohibited
+    _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
+
+    class _NamedTupleMeta(type):
+        def __new__(cls, typename, bases, ns):
+            assert _NamedTuple in bases
+            for base in bases:
+                if base is not _NamedTuple and base is not typing.Generic:
+                    raise TypeError(
+                        'can only inherit from a NamedTuple type and Generic')
+            bases = tuple(tuple if base is _NamedTuple else base for base in bases)
+            if "__annotations__" in ns:
+                types = ns["__annotations__"]
+            elif "__annotate__" in ns:
+                # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+                types = ns["__annotate__"](1)
+            else:
+                types = {}
+            default_names = []
+            for field_name in types:
+                if field_name in ns:
+                    default_names.append(field_name)
+                elif default_names:
+                    raise TypeError(f"Non-default namedtuple field {field_name} "
+                                    f"cannot follow default field"
+                                    f"{'s' if len(default_names) > 1 else ''} "
+                                    f"{', '.join(default_names)}")
+            nm_tpl = _make_nmtuple(
+                typename, types.items(),
+                defaults=[ns[n] for n in default_names],
+                module=ns['__module__']
+            )
+            nm_tpl.__bases__ = bases
+            if typing.Generic in bases:
+                if hasattr(typing, '_generic_class_getitem'):  # 3.12+
+                    nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
+                else:
+                    class_getitem = typing.Generic.__class_getitem__.__func__
+                    nm_tpl.__class_getitem__ = classmethod(class_getitem)
+            # update from user namespace without overriding special namedtuple attributes
+            for key, val in ns.items():
+                if key in _prohibited_namedtuple_fields:
+                    raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+                elif key not in _special_namedtuple_fields:
+                    if key not in nm_tpl._fields:
+                        setattr(nm_tpl, key, ns[key])
+                    try:
+                        set_name = type(val).__set_name__
+                    except AttributeError:
+                        pass
+                    else:
+                        try:
+                            set_name(val, nm_tpl, key)
+                        except BaseException as e:
+                            msg = (
+                                f"Error calling __set_name__ on {type(val).__name__!r} "
+                                f"instance {key!r} in {typename!r}"
+                            )
+                            # BaseException.add_note() existed on py311,
+                            # but the __set_name__ machinery didn't start
+                            # using add_note() until py312.
+                            # Making sure exceptions are raised in the same way
+                            # as in "normal" classes seems most important here.
+                            if sys.version_info >= (3, 12):
+                                e.add_note(msg)
+                                raise
+                            else:
+                                raise RuntimeError(msg) from e
+
+            if typing.Generic in bases:
+                nm_tpl.__init_subclass__()
+            return nm_tpl
+
+    _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
+
+    def _namedtuple_mro_entries(bases):
+        assert NamedTuple in bases
+        return (_NamedTuple,)
+
+    @_ensure_subclassable(_namedtuple_mro_entries)
+    def NamedTuple(typename, fields=_marker, /, **kwargs):
+        """Typed version of namedtuple.
+
+        Usage::
+
+            class Employee(NamedTuple):
+                name: str
+                id: int
+
+        This is equivalent to::
+
+            Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+        The resulting class has an extra __annotations__ attribute, giving a
+        dict that maps field names to types.  (The field names are also in
+        the _fields attribute, which is part of the namedtuple API.)
+        An alternative equivalent functional syntax is also accepted::
+
+            Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+        """
+        if fields is _marker:
+            if kwargs:
+                deprecated_thing = "Creating NamedTuple classes using keyword arguments"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "Use the class-based or functional syntax instead."
+                )
+            else:
+                deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+                example = f"`{typename} = NamedTuple({typename!r}, [])`"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "To create a NamedTuple class with 0 fields "
+                    "using the functional syntax, "
+                    "pass an empty list, e.g. "
+                ) + example + "."
+        elif fields is None:
+            if kwargs:
+                raise TypeError(
+                    "Cannot pass `None` as the 'fields' parameter "
+                    "and also specify fields using keyword arguments"
+                )
+            else:
+                deprecated_thing = "Passing `None` as the 'fields' parameter"
+                example = f"`{typename} = NamedTuple({typename!r}, [])`"
+                deprecation_msg = (
+                    "{name} is deprecated and will be disallowed in Python {remove}. "
+                    "To create a NamedTuple class with 0 fields "
+                    "using the functional syntax, "
+                    "pass an empty list, e.g. "
+                ) + example + "."
+        elif kwargs:
+            raise TypeError("Either list of fields or keywords"
+                            " can be provided to NamedTuple, not both")
+        if fields is _marker or fields is None:
+            warnings.warn(
+                deprecation_msg.format(name=deprecated_thing, remove="3.15"),
+                DeprecationWarning,
+                stacklevel=2,
+            )
+            fields = kwargs.items()
+        nt = _make_nmtuple(typename, fields, module=_caller())
+        nt.__orig_bases__ = (NamedTuple,)
+        return nt
+
+
+if hasattr(collections.abc, "Buffer"):
+    Buffer = collections.abc.Buffer
+else:
+    class Buffer(abc.ABC):  # noqa: B024
+        """Base class for classes that implement the buffer protocol.
+
+        The buffer protocol allows Python objects to expose a low-level
+        memory buffer interface. Before Python 3.12, it is not possible
+        to implement the buffer protocol in pure Python code, or even
+        to check whether a class implements the buffer protocol. In
+        Python 3.12 and higher, the ``__buffer__`` method allows access
+        to the buffer protocol from Python code, and the
+        ``collections.abc.Buffer`` ABC allows checking whether a class
+        implements the buffer protocol.
+
+        To indicate support for the buffer protocol in earlier versions,
+        inherit from this ABC, either in a stub file or at runtime,
+        or use ABC registration. This ABC provides no methods, because
+        there is no Python-accessible methods shared by pre-3.12 buffer
+        classes. It is useful primarily for static checks.
+
+        """
+
+    # As a courtesy, register the most common stdlib buffer classes.
+    Buffer.register(memoryview)
+    Buffer.register(bytearray)
+    Buffer.register(bytes)
+
+
+# Backport of types.get_original_bases, available on 3.12+ in CPython
+if hasattr(_types, "get_original_bases"):
+    get_original_bases = _types.get_original_bases
+else:
+    def get_original_bases(cls, /):
+        """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+        Examples::
+
+            from typing import TypeVar, Generic
+            from typing_extensions import NamedTuple, TypedDict
+
+            T = TypeVar("T")
+            class Foo(Generic[T]): ...
+            class Bar(Foo[int], float): ...
+            class Baz(list[str]): ...
+            Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+            Spam = TypedDict("Spam", {"a": int, "b": str})
+
+            assert get_original_bases(Bar) == (Foo[int], float)
+            assert get_original_bases(Baz) == (list[str],)
+            assert get_original_bases(Eggs) == (NamedTuple,)
+            assert get_original_bases(Spam) == (TypedDict,)
+            assert get_original_bases(int) == (object,)
+        """
+        try:
+            return cls.__dict__.get("__orig_bases__", cls.__bases__)
+        except AttributeError:
+            raise TypeError(
+                f'Expected an instance of type, not {type(cls).__name__!r}'
+            ) from None
+
+
+# NewType is a class on Python 3.10+, making it pickleable
+# The error message for subclassing instances of NewType was improved on 3.11+
+if sys.version_info >= (3, 11):
+    NewType = typing.NewType
+else:
+    class NewType:
+        """NewType creates simple unique types with almost zero
+        runtime overhead. NewType(name, tp) is considered a subtype of tp
+        by static type checkers. At runtime, NewType(name, tp) returns
+        a dummy callable that simply returns its argument. Usage::
+            UserId = NewType('UserId', int)
+            def name_by_id(user_id: UserId) -> str:
+                ...
+            UserId('user')          # Fails type check
+            name_by_id(42)          # Fails type check
+            name_by_id(UserId(42))  # OK
+            num = UserId(5) + 1     # type: int
+        """
+
+        def __call__(self, obj, /):
+            return obj
+
+        def __init__(self, name, tp):
+            self.__qualname__ = name
+            if '.' in name:
+                name = name.rpartition('.')[-1]
+            self.__name__ = name
+            self.__supertype__ = tp
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+
+        def __mro_entries__(self, bases):
+            # We defined __mro_entries__ to get a better error message
+            # if a user attempts to subclass a NewType instance. bpo-46170
+            supercls_name = self.__name__
+
+            class Dummy:
+                def __init_subclass__(cls):
+                    subcls_name = cls.__name__
+                    raise TypeError(
+                        f"Cannot subclass an instance of NewType. "
+                        f"Perhaps you were looking for: "
+                        f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
+                    )
+
+            return (Dummy,)
+
+        def __repr__(self):
+            return f'{self.__module__}.{self.__qualname__}'
+
+        def __reduce__(self):
+            return self.__qualname__
+
+        if sys.version_info >= (3, 10):
+            # PEP 604 methods
+            # It doesn't make sense to have these methods on Python <3.10
+
+            def __or__(self, other):
+                return typing.Union[self, other]
+
+            def __ror__(self, other):
+                return typing.Union[other, self]
+
+
+if sys.version_info >= (3, 14):
+    TypeAliasType = typing.TypeAliasType
+# 3.8-3.13
+else:
+    if sys.version_info >= (3, 12):
+        # 3.12-3.14
+        def _is_unionable(obj):
+            """Corresponds to is_unionable() in unionobject.c in CPython."""
+            return obj is None or isinstance(obj, (
+                type,
+                _types.GenericAlias,
+                _types.UnionType,
+                typing.TypeAliasType,
+                TypeAliasType,
+            ))
+    else:
+        # 3.8-3.11
+        def _is_unionable(obj):
+            """Corresponds to is_unionable() in unionobject.c in CPython."""
+            return obj is None or isinstance(obj, (
+                type,
+                _types.GenericAlias,
+                _types.UnionType,
+                TypeAliasType,
+            ))
+
+    if sys.version_info < (3, 10):
+        # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582,
+        # so that we emulate the behaviour of `types.GenericAlias`
+        # on the latest versions of CPython
+        _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({
+            "__class__",
+            "__bases__",
+            "__origin__",
+            "__args__",
+            "__unpacked__",
+            "__parameters__",
+            "__typing_unpacked_tuple_args__",
+            "__mro_entries__",
+            "__reduce_ex__",
+            "__reduce__",
+            "__copy__",
+            "__deepcopy__",
+        })
+
+        class _TypeAliasGenericAlias(typing._GenericAlias, _root=True):
+            def __getattr__(self, attr):
+                if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS:
+                    return object.__getattr__(self, attr)
+                return getattr(self.__origin__, attr)
+
+            if sys.version_info < (3, 9):
+                def __getitem__(self, item):
+                    result = super().__getitem__(item)
+                    result.__class__ = type(self)
+                    return result
+
+    class TypeAliasType:
+        """Create named, parameterized type aliases.
+
+        This provides a backport of the new `type` statement in Python 3.12:
+
+            type ListOrSet[T] = list[T] | set[T]
+
+        is equivalent to:
+
+            T = TypeVar("T")
+            ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
+
+        The name ListOrSet can then be used as an alias for the type it refers to.
+
+        The type_params argument should contain all the type parameters used
+        in the value of the type alias. If the alias is not generic, this
+        argument is omitted.
+
+        Static type checkers should only support type aliases declared using
+        TypeAliasType that follow these rules:
+
+        - The first argument (the name) must be a string literal.
+        - The TypeAliasType instance must be immediately assigned to a variable
+          of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
+          as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
+
+        """
+
+        def __init__(self, name: str, value, *, type_params=()):
+            if not isinstance(name, str):
+                raise TypeError("TypeAliasType name must be a string")
+            if not isinstance(type_params, tuple):
+                raise TypeError("type_params must be a tuple")
+            self.__value__ = value
+            self.__type_params__ = type_params
+
+            default_value_encountered = False
+            parameters = []
+            for type_param in type_params:
+                if (
+                    not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec))
+                    # 3.8-3.11
+                    # Unpack Backport passes isinstance(type_param, TypeVar)
+                    or _is_unpack(type_param)
+                ):
+                    raise TypeError(f"Expected a type param, got {type_param!r}")
+                has_default = (
+                    getattr(type_param, '__default__', NoDefault) is not NoDefault
+                )
+                if default_value_encountered and not has_default:
+                    raise TypeError(f"non-default type parameter '{type_param!r}'"
+                                    " follows default type parameter")
+                if has_default:
+                    default_value_encountered = True
+                if isinstance(type_param, TypeVarTuple):
+                    parameters.extend(type_param)
+                else:
+                    parameters.append(type_param)
+            self.__parameters__ = tuple(parameters)
+            def_mod = _caller()
+            if def_mod != 'typing_extensions':
+                self.__module__ = def_mod
+            # Setting this attribute closes the TypeAliasType from further modification
+            self.__name__ = name
+
+        def __setattr__(self, name: str, value: object, /) -> None:
+            if hasattr(self, "__name__"):
+                self._raise_attribute_error(name)
+            super().__setattr__(name, value)
+
+        def __delattr__(self, name: str, /) -> Never:
+            self._raise_attribute_error(name)
+
+        def _raise_attribute_error(self, name: str) -> Never:
+            # Match the Python 3.12 error messages exactly
+            if name == "__name__":
+                raise AttributeError("readonly attribute")
+            elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
+                raise AttributeError(
+                    f"attribute '{name}' of 'typing.TypeAliasType' objects "
+                    "is not writable"
+                )
+            else:
+                raise AttributeError(
+                    f"'typing.TypeAliasType' object has no attribute '{name}'"
+                )
+
+        def __repr__(self) -> str:
+            return self.__name__
+
+        if sys.version_info < (3, 11):
+            def _check_single_param(self, param, recursion=0):
+                # Allow [], [int], [int, str], [int, ...], [int, T]
+                if param is ...:
+                    return ...
+                if param is None:
+                    return None
+                # Note in <= 3.9 _ConcatenateGenericAlias inherits from list
+                if isinstance(param, list) and recursion == 0:
+                    return [self._check_single_param(arg, recursion+1)
+                            for arg in param]
+                return typing._type_check(
+                        param, f'Subscripting {self.__name__} requires a type.'
+                    )
+
+        def _check_parameters(self, parameters):
+            if sys.version_info < (3, 11):
+                return tuple(
+                    self._check_single_param(item)
+                    for item in parameters
+                )
+            return tuple(typing._type_check(
+                        item, f'Subscripting {self.__name__} requires a type.'
+                    )
+                    for item in parameters
+            )
+
+        def __getitem__(self, parameters):
+            if not self.__type_params__:
+                raise TypeError("Only generic type aliases are subscriptable")
+            if not isinstance(parameters, tuple):
+                parameters = (parameters,)
+            # Using 3.9 here will create problems with Concatenate
+            if sys.version_info >= (3, 10):
+                return _types.GenericAlias(self, parameters)
+            type_vars = _collect_type_vars(parameters)
+            parameters = self._check_parameters(parameters)
+            alias = _TypeAliasGenericAlias(self, parameters)
+            # alias.__parameters__ is not complete if Concatenate is present
+            # as it is converted to a list from which no parameters are extracted.
+            if alias.__parameters__ != type_vars:
+                alias.__parameters__ = type_vars
+            return alias
+
+        def __reduce__(self):
+            return self.__name__
+
+        def __init_subclass__(cls, *args, **kwargs):
+            raise TypeError(
+                "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
+            )
+
+        # The presence of this method convinces typing._type_check
+        # that TypeAliasTypes are types.
+        def __call__(self):
+            raise TypeError("Type alias is not callable")
+
+        if sys.version_info >= (3, 10):
+            def __or__(self, right):
+                # For forward compatibility with 3.12, reject Unions
+                # that are not accepted by the built-in Union.
+                if not _is_unionable(right):
+                    return NotImplemented
+                return typing.Union[self, right]
+
+            def __ror__(self, left):
+                if not _is_unionable(left):
+                    return NotImplemented
+                return typing.Union[left, self]
+
+
+if hasattr(typing, "is_protocol"):
+    is_protocol = typing.is_protocol
+    get_protocol_members = typing.get_protocol_members
+else:
+    def is_protocol(tp: type, /) -> bool:
+        """Return True if the given type is a Protocol.
+
+        Example::
+
+            >>> from typing_extensions import Protocol, is_protocol
+            >>> class P(Protocol):
+            ...     def a(self) -> str: ...
+            ...     b: int
+            >>> is_protocol(P)
+            True
+            >>> is_protocol(int)
+            False
+        """
+        return (
+            isinstance(tp, type)
+            and getattr(tp, '_is_protocol', False)
+            and tp is not Protocol
+            and tp is not typing.Protocol
+        )
+
+    def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
+        """Return the set of members defined in a Protocol.
+
+        Example::
+
+            >>> from typing_extensions import Protocol, get_protocol_members
+            >>> class P(Protocol):
+            ...     def a(self) -> str: ...
+            ...     b: int
+            >>> get_protocol_members(P)
+            frozenset({'a', 'b'})
+
+        Raise a TypeError for arguments that are not Protocols.
+        """
+        if not is_protocol(tp):
+            raise TypeError(f'{tp!r} is not a Protocol')
+        if hasattr(tp, '__protocol_attrs__'):
+            return frozenset(tp.__protocol_attrs__)
+        return frozenset(_get_protocol_attrs(tp))
+
+
+if hasattr(typing, "Doc"):
+    Doc = typing.Doc
+else:
+    class Doc:
+        """Define the documentation of a type annotation using ``Annotated``, to be
+         used in class attributes, function and method parameters, return values,
+         and variables.
+
+        The value should be a positional-only string literal to allow static tools
+        like editors and documentation generators to use it.
+
+        This complements docstrings.
+
+        The string value passed is available in the attribute ``documentation``.
+
+        Example::
+
+            >>> from typing_extensions import Annotated, Doc
+            >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+        """
+        def __init__(self, documentation: str, /) -> None:
+            self.documentation = documentation
+
+        def __repr__(self) -> str:
+            return f"Doc({self.documentation!r})"
+
+        def __hash__(self) -> int:
+            return hash(self.documentation)
+
+        def __eq__(self, other: object) -> bool:
+            if not isinstance(other, Doc):
+                return NotImplemented
+            return self.documentation == other.documentation
+
+
+_CapsuleType = getattr(_types, "CapsuleType", None)
+
+if _CapsuleType is None:
+    try:
+        import _socket
+    except ImportError:
+        pass
+    else:
+        _CAPI = getattr(_socket, "CAPI", None)
+        if _CAPI is not None:
+            _CapsuleType = type(_CAPI)
+
+if _CapsuleType is not None:
+    CapsuleType = _CapsuleType
+    __all__.append("CapsuleType")
+
+
+# Using this convoluted approach so that this keeps working
+# whether we end up using PEP 649 as written, PEP 749, or
+# some other variation: in any case, inspect.get_annotations
+# will continue to exist and will gain a `format` parameter.
+_PEP_649_OR_749_IMPLEMENTED = (
+    hasattr(inspect, 'get_annotations')
+    and inspect.get_annotations.__kwdefaults__ is not None
+    and "format" in inspect.get_annotations.__kwdefaults__
+)
+
+
+class Format(enum.IntEnum):
+    VALUE = 1
+    FORWARDREF = 2
+    STRING = 3
+
+
+if _PEP_649_OR_749_IMPLEMENTED:
+    get_annotations = inspect.get_annotations
+else:
+    def get_annotations(obj, *, globals=None, locals=None, eval_str=False,
+                        format=Format.VALUE):
+        """Compute the annotations dict for an object.
+
+        obj may be a callable, class, or module.
+        Passing in an object of any other type raises TypeError.
+
+        Returns a dict.  get_annotations() returns a new dict every time
+        it's called; calling it twice on the same object will return two
+        different but equivalent dicts.
+
+        This is a backport of `inspect.get_annotations`, which has been
+        in the standard library since Python 3.10. See the standard library
+        documentation for more:
+
+            https://docs.python.org/3/library/inspect.html#inspect.get_annotations
+
+        This backport adds the *format* argument introduced by PEP 649. The
+        three formats supported are:
+        * VALUE: the annotations are returned as-is. This is the default and
+          it is compatible with the behavior on previous Python versions.
+        * FORWARDREF: return annotations as-is if possible, but replace any
+          undefined names with ForwardRef objects. The implementation proposed by
+          PEP 649 relies on language changes that cannot be backported; the
+          typing-extensions implementation simply returns the same result as VALUE.
+        * STRING: return annotations as strings, in a format close to the original
+          source. Again, this behavior cannot be replicated directly in a backport.
+          As an approximation, typing-extensions retrieves the annotations under
+          VALUE semantics and then stringifies them.
+
+        The purpose of this backport is to allow users who would like to use
+        FORWARDREF or STRING semantics once PEP 649 is implemented, but who also
+        want to support earlier Python versions, to simply write:
+
+            typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
+
+        """
+        format = Format(format)
+
+        if eval_str and format is not Format.VALUE:
+            raise ValueError("eval_str=True is only supported with format=Format.VALUE")
+
+        if isinstance(obj, type):
+            # class
+            obj_dict = getattr(obj, '__dict__', None)
+            if obj_dict and hasattr(obj_dict, 'get'):
+                ann = obj_dict.get('__annotations__', None)
+                if isinstance(ann, _types.GetSetDescriptorType):
+                    ann = None
+            else:
+                ann = None
+
+            obj_globals = None
+            module_name = getattr(obj, '__module__', None)
+            if module_name:
+                module = sys.modules.get(module_name, None)
+                if module:
+                    obj_globals = getattr(module, '__dict__', None)
+            obj_locals = dict(vars(obj))
+            unwrap = obj
+        elif isinstance(obj, _types.ModuleType):
+            # module
+            ann = getattr(obj, '__annotations__', None)
+            obj_globals = obj.__dict__
+            obj_locals = None
+            unwrap = None
+        elif callable(obj):
+            # this includes types.Function, types.BuiltinFunctionType,
+            # types.BuiltinMethodType, functools.partial, functools.singledispatch,
+            # "class funclike" from Lib/test/test_inspect... on and on it goes.
+            ann = getattr(obj, '__annotations__', None)
+            obj_globals = getattr(obj, '__globals__', None)
+            obj_locals = None
+            unwrap = obj
+        elif hasattr(obj, '__annotations__'):
+            ann = obj.__annotations__
+            obj_globals = obj_locals = unwrap = None
+        else:
+            raise TypeError(f"{obj!r} is not a module, class, or callable.")
+
+        if ann is None:
+            return {}
+
+        if not isinstance(ann, dict):
+            raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
+
+        if not ann:
+            return {}
+
+        if not eval_str:
+            if format is Format.STRING:
+                return {
+                    key: value if isinstance(value, str) else typing._type_repr(value)
+                    for key, value in ann.items()
+                }
+            return dict(ann)
+
+        if unwrap is not None:
+            while True:
+                if hasattr(unwrap, '__wrapped__'):
+                    unwrap = unwrap.__wrapped__
+                    continue
+                if isinstance(unwrap, functools.partial):
+                    unwrap = unwrap.func
+                    continue
+                break
+            if hasattr(unwrap, "__globals__"):
+                obj_globals = unwrap.__globals__
+
+        if globals is None:
+            globals = obj_globals
+        if locals is None:
+            locals = obj_locals or {}
+
+        # "Inject" type parameters into the local namespace
+        # (unless they are shadowed by assignments *in* the local namespace),
+        # as a way of emulating annotation scopes when calling `eval()`
+        if type_params := getattr(obj, "__type_params__", ()):
+            locals = {param.__name__: param for param in type_params} | locals
+
+        return_value = {key:
+            value if not isinstance(value, str) else eval(value, globals, locals)
+            for key, value in ann.items() }
+        return return_value
+
+
+if hasattr(typing, "evaluate_forward_ref"):
+    evaluate_forward_ref = typing.evaluate_forward_ref
+else:
+    # Implements annotationlib.ForwardRef.evaluate
+    def _eval_with_owner(
+        forward_ref, *, owner=None, globals=None, locals=None, type_params=None
+    ):
+        if forward_ref.__forward_evaluated__:
+            return forward_ref.__forward_value__
+        if getattr(forward_ref, "__cell__", None) is not None:
+            try:
+                value = forward_ref.__cell__.cell_contents
+            except ValueError:
+                pass
+            else:
+                forward_ref.__forward_evaluated__ = True
+                forward_ref.__forward_value__ = value
+                return value
+        if owner is None:
+            owner = getattr(forward_ref, "__owner__", None)
+
+        if (
+            globals is None
+            and getattr(forward_ref, "__forward_module__", None) is not None
+        ):
+            globals = getattr(
+                sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None
+            )
+        if globals is None:
+            globals = getattr(forward_ref, "__globals__", None)
+        if globals is None:
+            if isinstance(owner, type):
+                module_name = getattr(owner, "__module__", None)
+                if module_name:
+                    module = sys.modules.get(module_name, None)
+                    if module:
+                        globals = getattr(module, "__dict__", None)
+            elif isinstance(owner, _types.ModuleType):
+                globals = getattr(owner, "__dict__", None)
+            elif callable(owner):
+                globals = getattr(owner, "__globals__", None)
+
+        # If we pass None to eval() below, the globals of this module are used.
+        if globals is None:
+            globals = {}
+
+        if locals is None:
+            locals = {}
+            if isinstance(owner, type):
+                locals.update(vars(owner))
+
+        if type_params is None and owner is not None:
+            # "Inject" type parameters into the local namespace
+            # (unless they are shadowed by assignments *in* the local namespace),
+            # as a way of emulating annotation scopes when calling `eval()`
+            type_params = getattr(owner, "__type_params__", None)
+
+        # type parameters require some special handling,
+        # as they exist in their own scope
+        # but `eval()` does not have a dedicated parameter for that scope.
+        # For classes, names in type parameter scopes should override
+        # names in the global scope (which here are called `localns`!),
+        # but should in turn be overridden by names in the class scope
+        # (which here are called `globalns`!)
+        if type_params is not None:
+            globals = dict(globals)
+            locals = dict(locals)
+            for param in type_params:
+                param_name = param.__name__
+                if (
+                    _FORWARD_REF_HAS_CLASS and not forward_ref.__forward_is_class__
+                ) or param_name not in globals:
+                    globals[param_name] = param
+                    locals.pop(param_name, None)
+
+        arg = forward_ref.__forward_arg__
+        if arg.isidentifier() and not keyword.iskeyword(arg):
+            if arg in locals:
+                value = locals[arg]
+            elif arg in globals:
+                value = globals[arg]
+            elif hasattr(builtins, arg):
+                return getattr(builtins, arg)
+            else:
+                raise NameError(arg)
+        else:
+            code = forward_ref.__forward_code__
+            value = eval(code, globals, locals)
+        forward_ref.__forward_evaluated__ = True
+        forward_ref.__forward_value__ = value
+        return value
+
+    def _lax_type_check(
+        value, msg, is_argument=True, *, module=None, allow_special_forms=False
+    ):
+        """
+        A lax Python 3.11+ like version of typing._type_check
+        """
+        if hasattr(typing, "_type_convert"):
+            if (
+                sys.version_info >= (3, 10, 3)
+                or (3, 9, 10) < sys.version_info[:3] < (3, 10)
+            ):
+                # allow_special_forms introduced later cpython/#30926 (bpo-46539)
+                type_ = typing._type_convert(
+                    value,
+                    module=module,
+                    allow_special_forms=allow_special_forms,
+                )
+            # module was added with bpo-41249 before is_class (bpo-46539)
+            elif "__forward_module__" in typing.ForwardRef.__slots__:
+                type_ = typing._type_convert(value, module=module)
+            else:
+                type_ = typing._type_convert(value)
+        else:
+            if value is None:
+                return type(None)
+            if isinstance(value, str):
+                return ForwardRef(value)
+            type_ = value
+        invalid_generic_forms = (Generic, Protocol)
+        if not allow_special_forms:
+            invalid_generic_forms += (ClassVar,)
+            if is_argument:
+                invalid_generic_forms += (Final,)
+        if (
+            isinstance(type_, typing._GenericAlias)
+            and get_origin(type_) in invalid_generic_forms
+        ):
+            raise TypeError(f"{type_} is not valid as type argument") from None
+        if type_ in (Any, LiteralString, NoReturn, Never, Self, TypeAlias):
+            return type_
+        if allow_special_forms and type_ in (ClassVar, Final):
+            return type_
+        if (
+            isinstance(type_, (_SpecialForm, typing._SpecialForm))
+            or type_ in (Generic, Protocol)
+        ):
+            raise TypeError(f"Plain {type_} is not valid as type argument") from None
+        if type(type_) is tuple:  # lax version with tuple instead of callable
+            raise TypeError(f"{msg} Got {type_!r:.100}.")
+        return type_
+
+    def evaluate_forward_ref(
+        forward_ref,
+        *,
+        owner=None,
+        globals=None,
+        locals=None,
+        type_params=None,
+        format=Format.VALUE,
+        _recursive_guard=frozenset(),
+    ):
+        """Evaluate a forward reference as a type hint.
+
+        This is similar to calling the ForwardRef.evaluate() method,
+        but unlike that method, evaluate_forward_ref() also:
+
+        * Recursively evaluates forward references nested within the type hint.
+        * Rejects certain objects that are not valid type hints.
+        * Replaces type hints that evaluate to None with types.NoneType.
+        * Supports the *FORWARDREF* and *STRING* formats.
+
+        *forward_ref* must be an instance of ForwardRef. *owner*, if given,
+        should be the object that holds the annotations that the forward reference
+        derived from, such as a module, class object, or function. It is used to
+        infer the namespaces to use for looking up names. *globals* and *locals*
+        can also be explicitly given to provide the global and local namespaces.
+        *type_params* is a tuple of type parameters that are in scope when
+        evaluating the forward reference. This parameter must be provided (though
+        it may be an empty tuple) if *owner* is not given and the forward reference
+        does not already have an owner set. *format* specifies the format of the
+        annotation and is a member of the annotationlib.Format enum.
+
+        """
+        if format == Format.STRING:
+            return forward_ref.__forward_arg__
+        if forward_ref.__forward_arg__ in _recursive_guard:
+            return forward_ref
+
+        # Evaluate the forward reference
+        try:
+            value = _eval_with_owner(
+                forward_ref,
+                owner=owner,
+                globals=globals,
+                locals=locals,
+                type_params=type_params,
+            )
+        except NameError:
+            if format == Format.FORWARDREF:
+                return forward_ref
+            else:
+                raise
+
+        msg = "Forward references must evaluate to types."
+        if not _FORWARD_REF_HAS_CLASS:
+            allow_special_forms = not forward_ref.__forward_is_argument__
+        else:
+            allow_special_forms = forward_ref.__forward_is_class__
+        type_ = _lax_type_check(
+            value,
+            msg,
+            is_argument=forward_ref.__forward_is_argument__,
+            allow_special_forms=allow_special_forms,
+        )
+
+        # Recursively evaluate the type
+        if isinstance(type_, ForwardRef):
+            if getattr(type_, "__forward_module__", True) is not None:
+                globals = None
+            return evaluate_forward_ref(
+                type_,
+                globals=globals,
+                locals=locals,
+                 type_params=type_params, owner=owner,
+                _recursive_guard=_recursive_guard, format=format
+            )
+        if sys.version_info < (3, 12, 5) and type_params:
+            # Make use of type_params
+            locals = dict(locals) if locals else {}
+            for tvar in type_params:
+                if tvar.__name__ not in locals:  # lets not overwrite something present
+                    locals[tvar.__name__] = tvar
+        if sys.version_info < (3, 9):
+            return typing._eval_type(
+                type_,
+                globals,
+                locals,
+            )
+        if sys.version_info < (3, 12, 5):
+            return typing._eval_type(
+                type_,
+                globals,
+                locals,
+                recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+            )
+        if sys.version_info < (3, 14):
+            return typing._eval_type(
+                type_,
+                globals,
+                locals,
+                type_params,
+                recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+            )
+        return typing._eval_type(
+            type_,
+            globals,
+            locals,
+            type_params,
+            recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+            format=format,
+            owner=owner,
+        )
+
+
+# Aliases for items that have always been in typing.
+# Explicitly assign these (rather than using `from typing import *` at the top),
+# so that we get a CI error if one of these is deleted from typing.py
+# in a future version of Python
+AbstractSet = typing.AbstractSet
+AnyStr = typing.AnyStr
+BinaryIO = typing.BinaryIO
+Callable = typing.Callable
+Collection = typing.Collection
+Container = typing.Container
+Dict = typing.Dict
+ForwardRef = typing.ForwardRef
+FrozenSet = typing.FrozenSet
+Generic = typing.Generic
+Hashable = typing.Hashable
+IO = typing.IO
+ItemsView = typing.ItemsView
+Iterable = typing.Iterable
+Iterator = typing.Iterator
+KeysView = typing.KeysView
+List = typing.List
+Mapping = typing.Mapping
+MappingView = typing.MappingView
+Match = typing.Match
+MutableMapping = typing.MutableMapping
+MutableSequence = typing.MutableSequence
+MutableSet = typing.MutableSet
+Optional = typing.Optional
+Pattern = typing.Pattern
+Reversible = typing.Reversible
+Sequence = typing.Sequence
+Set = typing.Set
+Sized = typing.Sized
+TextIO = typing.TextIO
+Tuple = typing.Tuple
+Union = typing.Union
+ValuesView = typing.ValuesView
+cast = typing.cast
+no_type_check = typing.no_type_check
+no_type_check_decorator = typing.no_type_check_decorator
diff --git a/temp_venv/pyvenv.cfg b/temp_venv/pyvenv.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..c380c1b4e8bbdc51fb25b34ec3249cb21eea893d
--- /dev/null
+++ b/temp_venv/pyvenv.cfg
@@ -0,0 +1,5 @@
+home = /opt/homebrew/opt/python@3.13/bin
+implementation = CPython
+uv = 0.6.10
+version_info = 3.13.2
+include-system-site-packages = false
diff --git a/temp_venv/share/man/man1/ipython.1 b/temp_venv/share/man/man1/ipython.1
new file mode 100644
index 0000000000000000000000000000000000000000..0f4a191f3fa4c5678013589be9c9254e3f791c9c
--- /dev/null
+++ b/temp_venv/share/man/man1/ipython.1
@@ -0,0 +1,60 @@
+.\"                                      Hey, EMACS: -*- nroff -*-
+.\" First parameter, NAME, should be all caps
+.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection
+.\" other parameters are allowed: see man(7), man(1)
+.TH IPYTHON 1 "July 15, 2011"
+.\" Please adjust this date whenever revising the manpage.
+.\"
+.\" Some roff macros, for reference:
+.\" .nh        disable hyphenation
+.\" .hy        enable hyphenation
+.\" .ad l      left justify
+.\" .ad b      justify to both left and right margins
+.\" .nf        disable filling
+.\" .fi        enable filling
+.\" .br        insert line break
+.\" .sp     insert n+1 empty lines
+.\" for manpage-specific macros, see man(7) and groff_man(7)
+.\" .SH        section heading
+.\" .SS        secondary section heading
+.\"
+.\"
+.\" To preview this page as plain text: nroff -man ipython.1
+.\"
+.SH NAME
+ipython \- Tools for Interactive Computing in Python.
+.SH SYNOPSIS
+.B ipython
+.RI [ options ] " files" ...
+
+.B ipython subcommand
+.RI [ options ] ...
+
+.SH DESCRIPTION
+An interactive Python shell with automatic history (input and output), dynamic
+object introspection, easier configuration, command completion, access to the
+system shell, integration with numerical and scientific computing tools,
+web notebook, Qt console, and more.
+
+For more information on how to use IPython, see 'ipython \-\-help',
+or 'ipython \-\-help\-all' for all available command\(hyline options.
+
+.SH "ENVIRONMENT VARIABLES"
+.sp
+.PP
+\fIIPYTHONDIR\fR
+.RS 4
+This is the location where IPython stores all its configuration files.  The default
+is $HOME/.ipython if IPYTHONDIR is not defined.
+
+You can see the computed value of IPYTHONDIR with `ipython locate`.
+
+.SH FILES
+
+IPython uses various configuration files stored in profiles within IPYTHONDIR.
+To generate the default configuration files and start configuring IPython,
+do 'ipython profile create', and edit '*_config.py' files located in
+IPYTHONDIR/profile_default.
+
+.SH AUTHORS
+IPython is written by the IPython Development Team .