index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
725,808 | flask_admin.base | get_category_menu_item | null | def get_category_menu_item(self, name):
return self._menu_categories.get(name)
| (self, name) |
725,809 | flask_admin.base | init_app |
Register all views with the Flask application.
:param app:
Flask application instance
| def init_app(self, app, index_view=None,
endpoint=None, url=None):
"""
Register all views with the Flask application.
:param app:
Flask application instance
"""
self.app = app
self._init_extension()
# Register Index view
if index_view is not None:
self._set_admin_index_view(
index_view=index_view,
endpoint=endpoint,
url=url
)
# Register views
for view in self._views:
app.register_blueprint(view.create_blueprint(self))
| (self, app, index_view=None, endpoint=None, url=None) |
725,810 | flask_admin.base | menu |
Return the menu hierarchy.
| def menu(self):
"""
Return the menu hierarchy.
"""
return self._menu
| (self) |
725,811 | flask_admin.base | menu_links |
Return menu links.
| def menu_links(self):
"""
Return menu links.
"""
return self._menu_links
| (self) |
725,812 | flask_admin.base | AdminIndexView |
Default administrative interface index page when visiting the ``/admin/`` URL.
It can be overridden by passing your own view class to the ``Admin`` constructor::
class MyHomeView(AdminIndexView):
@expose('/')
def index(self):
arg1 = 'Hello'
return self.render('admin/myhome.html', arg1=arg1)
admin = Admin(index_view=MyHomeView())
Also, you can change the root url from /admin to / with the following::
admin = Admin(
app,
index_view=AdminIndexView(
name='Home',
template='admin/myhome.html',
url='/'
)
)
Default values for the index page are:
* If a name is not provided, 'Home' will be used.
* If an endpoint is not provided, will default to ``admin``
* Default URL route is ``/admin``.
* Automatically associates with static folder.
* Default template is ``admin/index.html``
| class AdminIndexView(BaseView):
"""
Default administrative interface index page when visiting the ``/admin/`` URL.
It can be overridden by passing your own view class to the ``Admin`` constructor::
class MyHomeView(AdminIndexView):
@expose('/')
def index(self):
arg1 = 'Hello'
return self.render('admin/myhome.html', arg1=arg1)
admin = Admin(index_view=MyHomeView())
Also, you can change the root url from /admin to / with the following::
admin = Admin(
app,
index_view=AdminIndexView(
name='Home',
template='admin/myhome.html',
url='/'
)
)
Default values for the index page are:
* If a name is not provided, 'Home' will be used.
* If an endpoint is not provided, will default to ``admin``
* Default URL route is ``/admin``.
* Automatically associates with static folder.
* Default template is ``admin/index.html``
"""
def __init__(self, name=None, category=None,
endpoint=None, url=None,
template='admin/index.html',
menu_class_name=None,
menu_icon_type=None,
menu_icon_value=None):
super(AdminIndexView, self).__init__(name or babel.lazy_gettext('Home'),
category,
endpoint or 'admin',
'/admin' if url is None else url,
'static',
menu_class_name=menu_class_name,
menu_icon_type=menu_icon_type,
menu_icon_value=menu_icon_value)
self._template = template
@expose()
def index(self):
return self.render(self._template)
| (name=None, category=None, endpoint=None, url=None, template='admin/index.html', menu_class_name=None, menu_icon_type=None, menu_icon_value=None) |
725,813 | flask_admin.base | __init__ | null | def __init__(self, name=None, category=None,
endpoint=None, url=None,
template='admin/index.html',
menu_class_name=None,
menu_icon_type=None,
menu_icon_value=None):
super(AdminIndexView, self).__init__(name or babel.lazy_gettext('Home'),
category,
endpoint or 'admin',
'/admin' if url is None else url,
'static',
menu_class_name=menu_class_name,
menu_icon_type=menu_icon_type,
menu_icon_value=menu_icon_value)
self._template = template
| (self, name=None, category=None, endpoint=None, url=None, template='admin/index.html', menu_class_name=None, menu_icon_type=None, menu_icon_value=None) |
725,814 | flask_admin.base | _get_endpoint |
Generate Flask endpoint name. By default converts class name to lower case if endpoint is
not explicitly provided.
| def _get_endpoint(self, endpoint):
"""
Generate Flask endpoint name. By default converts class name to lower case if endpoint is
not explicitly provided.
"""
if endpoint:
return endpoint
return self.__class__.__name__.lower()
| (self, endpoint) |
725,815 | flask_admin.base | _get_view_url |
Generate URL for the view. Override to change default behavior.
| def _get_view_url(self, admin, url):
"""
Generate URL for the view. Override to change default behavior.
"""
if url is None:
if admin.url != '/':
url = '%s/%s' % (admin.url, self.endpoint)
else:
if self == admin.index_view:
url = '/'
else:
url = '/%s' % self.endpoint
else:
if not url.startswith('/'):
url = '%s/%s' % (admin.url, url)
return url
| (self, admin, url) |
725,816 | flask_admin.base | _handle_view |
This method will be executed before calling any view method.
It will execute the ``inaccessible_callback`` if the view is not
accessible.
:param name:
View function name
:param kwargs:
View function arguments
| def _handle_view(self, name, **kwargs):
"""
This method will be executed before calling any view method.
It will execute the ``inaccessible_callback`` if the view is not
accessible.
:param name:
View function name
:param kwargs:
View function arguments
"""
if not self.is_accessible():
return self.inaccessible_callback(name, **kwargs)
| (self, name, **kwargs) |
725,817 | flask_admin.base | _prettify_class_name |
Split words in PascalCase string into separate words.
:param name:
String to prettify
| def _prettify_class_name(self, name):
"""
Split words in PascalCase string into separate words.
:param name:
String to prettify
"""
return h.prettify_class_name(name)
| (self, name) |
725,818 | flask_admin.base | _run_view |
This method will run actual view function.
While it is similar to _handle_view, can be used to change
arguments that are passed to the view.
:param fn:
View function
:param kwargs:
Arguments
| def _run_view(self, fn, *args, **kwargs):
"""
This method will run actual view function.
While it is similar to _handle_view, can be used to change
arguments that are passed to the view.
:param fn:
View function
:param kwargs:
Arguments
"""
try:
return fn(self, *args, **kwargs)
except TypeError:
return fn(cls=self, **kwargs)
| (self, fn, *args, **kwargs) |
725,819 | flask_admin.base | create_blueprint |
Create Flask blueprint.
| def create_blueprint(self, admin):
"""
Create Flask blueprint.
"""
# Store admin instance
self.admin = admin
# If the static_url_path is not provided, use the admin's
if not self.static_url_path:
self.static_url_path = admin.static_url_path
# Generate URL
self.url = self._get_view_url(admin, self.url)
# If we're working from the root of the site, set prefix to None
if self.url == '/':
self.url = None
# prevent admin static files from conflicting with flask static files
if not self.static_url_path:
self.static_folder = 'static'
self.static_url_path = '/static/admin'
# If name is not provided, use capitalized endpoint name
if self.name is None:
self.name = self._prettify_class_name(self.__class__.__name__)
# Create blueprint and register rules
self.blueprint = Blueprint(self.endpoint, __name__,
url_prefix=self.url,
subdomain=self.admin.subdomain,
template_folder=op.join('templates', self.admin.template_mode),
static_folder=self.static_folder,
static_url_path=self.static_url_path)
for url, name, methods in self._urls:
self.blueprint.add_url_rule(url,
name,
getattr(self, name),
methods=methods)
return self.blueprint
| (self, admin) |
725,820 | flask_admin.base | get_url |
Generate URL for the endpoint. If you want to customize URL generation
logic (persist some query string argument, for example), this is
right place to do it.
:param endpoint:
Flask endpoint name
:param kwargs:
Arguments for `url_for`
| def get_url(self, endpoint, **kwargs):
"""
Generate URL for the endpoint. If you want to customize URL generation
logic (persist some query string argument, for example), this is
right place to do it.
:param endpoint:
Flask endpoint name
:param kwargs:
Arguments for `url_for`
"""
return url_for(endpoint, **kwargs)
| (self, endpoint, **kwargs) |
725,821 | flask_admin.base | inaccessible_callback |
Handle the response to inaccessible views.
By default, it throw HTTP 403 error. Override this method to
customize the behaviour.
| def inaccessible_callback(self, name, **kwargs):
"""
Handle the response to inaccessible views.
By default, it throw HTTP 403 error. Override this method to
customize the behaviour.
"""
return abort(403)
| (self, name, **kwargs) |
725,822 | flask_admin.base | index | null | def _wrap_view(f):
# Avoid wrapping view method twice
if hasattr(f, '_wrapped'):
return f
@wraps(f)
def inner(self, *args, **kwargs):
# Store current admin view
h.set_current_view(self)
# Check if administrative piece is accessible
abort = self._handle_view(f.__name__, **kwargs)
if abort is not None:
return abort
return self._run_view(f, *args, **kwargs)
inner._wrapped = True
return inner
| (self) |
725,823 | flask_admin.base | is_accessible |
Override this method to add permission checks.
Flask-Admin does not make any assumptions about the authentication system used in your application, so it is
up to you to implement it.
By default, it will allow access for everyone.
| def is_accessible(self):
"""
Override this method to add permission checks.
Flask-Admin does not make any assumptions about the authentication system used in your application, so it is
up to you to implement it.
By default, it will allow access for everyone.
"""
return True
| (self) |
725,824 | flask_admin.base | is_visible |
Override this method if you want dynamically hide or show administrative views
from Flask-Admin menu structure
By default, item is visible in menu.
Please note that item should be both visible and accessible to be displayed in menu.
| def is_visible(self):
"""
Override this method if you want dynamically hide or show administrative views
from Flask-Admin menu structure
By default, item is visible in menu.
Please note that item should be both visible and accessible to be displayed in menu.
"""
return True
| (self) |
725,825 | flask_admin.base | render |
Render template
:param template:
Template path to render
:param kwargs:
Template arguments
| def render(self, template, **kwargs):
"""
Render template
:param template:
Template path to render
:param kwargs:
Template arguments
"""
# Store self as admin_view
kwargs['admin_view'] = self
kwargs['admin_base_template'] = self.admin.base_template
# Provide i18n support even if flask-babel is not installed
# or enabled.
kwargs['_gettext'] = babel.gettext
kwargs['_ngettext'] = babel.ngettext
kwargs['h'] = h
# Expose get_url helper
kwargs['get_url'] = self.get_url
# Expose config info
kwargs['config'] = current_app.config
# Contribute extra arguments
kwargs.update(self._template_args)
return render_template(template, **kwargs)
| (self, template, **kwargs) |
725,826 | flask_admin.base | BaseView |
Base administrative view.
Derive from this class to implement your administrative interface piece. For example::
from flask_admin import BaseView, expose
class MyView(BaseView):
@expose('/')
def index(self):
return 'Hello World!'
Icons can be added to the menu by using `menu_icon_type` and `menu_icon_value`. For example::
admin.add_view(MyView(name='My View', menu_icon_type='glyph', menu_icon_value='glyphicon-home'))
| class BaseView(with_metaclass(AdminViewMeta, BaseViewClass)):
"""
Base administrative view.
Derive from this class to implement your administrative interface piece. For example::
from flask_admin import BaseView, expose
class MyView(BaseView):
@expose('/')
def index(self):
return 'Hello World!'
Icons can be added to the menu by using `menu_icon_type` and `menu_icon_value`. For example::
admin.add_view(MyView(name='My View', menu_icon_type='glyph', menu_icon_value='glyphicon-home'))
"""
@property
def _template_args(self):
"""
Extra template arguments.
If you need to pass some extra parameters to the template,
you can override particular view function, contribute
arguments you want to pass to the template and call parent view.
These arguments are local for this request and will be discarded
in the next request.
Any value passed through ``_template_args`` will override whatever
parent view function passed to the template.
For example::
class MyAdmin(ModelView):
@expose('/')
def index(self):
self._template_args['name'] = 'foobar'
self._template_args['code'] = '12345'
super(MyAdmin, self).index()
"""
args = getattr(g, '_admin_template_args', None)
if args is None:
args = g._admin_template_args = dict()
return args
def __init__(self, name=None, category=None, endpoint=None, url=None,
static_folder=None, static_url_path=None,
menu_class_name=None, menu_icon_type=None, menu_icon_value=None):
"""
Constructor.
:param name:
Name of this view. If not provided, will default to the class name.
:param category:
View category. If not provided, this view will be shown as a top-level menu item. Otherwise, it will
be in a submenu.
:param endpoint:
Base endpoint name for the view. For example, if there's a view method called "index" and
endpoint is set to "myadmin", you can use `url_for('myadmin.index')` to get the URL to the
view method. Defaults to the class name in lower case.
:param url:
Base URL. If provided, affects how URLs are generated. For example, if the url parameter
is "test", the resulting URL will look like "/admin/test/". If not provided, will
use endpoint as a base url. However, if URL starts with '/', absolute path is assumed
and '/admin/' prefix won't be applied.
:param static_url_path:
Static URL Path. If provided, this specifies the path to the static url directory.
:param menu_class_name:
Optional class name for the menu item.
:param menu_icon_type:
Optional icon. Possible icon types:
- `flask_admin.consts.ICON_TYPE_GLYPH` - Bootstrap glyph icon
- `flask_admin.consts.ICON_TYPE_FONT_AWESOME` - Font Awesome icon
- `flask_admin.consts.ICON_TYPE_IMAGE` - Image relative to Flask static directory
- `flask_admin.consts.ICON_TYPE_IMAGE_URL` - Image with full URL
:param menu_icon_value:
Icon glyph name or URL, depending on `menu_icon_type` setting
"""
self.name = name
self.category = category
self.endpoint = self._get_endpoint(endpoint)
self.url = url
self.static_folder = static_folder
self.static_url_path = static_url_path
self.menu = None
self.menu_class_name = menu_class_name
self.menu_icon_type = menu_icon_type
self.menu_icon_value = menu_icon_value
# Initialized from create_blueprint
self.admin = None
self.blueprint = None
# Default view
if self._default_view is None:
raise Exception(u'Attempted to instantiate admin view %s without default view' % self.__class__.__name__)
def _get_endpoint(self, endpoint):
"""
Generate Flask endpoint name. By default converts class name to lower case if endpoint is
not explicitly provided.
"""
if endpoint:
return endpoint
return self.__class__.__name__.lower()
def _get_view_url(self, admin, url):
"""
Generate URL for the view. Override to change default behavior.
"""
if url is None:
if admin.url != '/':
url = '%s/%s' % (admin.url, self.endpoint)
else:
if self == admin.index_view:
url = '/'
else:
url = '/%s' % self.endpoint
else:
if not url.startswith('/'):
url = '%s/%s' % (admin.url, url)
return url
def create_blueprint(self, admin):
"""
Create Flask blueprint.
"""
# Store admin instance
self.admin = admin
# If the static_url_path is not provided, use the admin's
if not self.static_url_path:
self.static_url_path = admin.static_url_path
# Generate URL
self.url = self._get_view_url(admin, self.url)
# If we're working from the root of the site, set prefix to None
if self.url == '/':
self.url = None
# prevent admin static files from conflicting with flask static files
if not self.static_url_path:
self.static_folder = 'static'
self.static_url_path = '/static/admin'
# If name is not provided, use capitalized endpoint name
if self.name is None:
self.name = self._prettify_class_name(self.__class__.__name__)
# Create blueprint and register rules
self.blueprint = Blueprint(self.endpoint, __name__,
url_prefix=self.url,
subdomain=self.admin.subdomain,
template_folder=op.join('templates', self.admin.template_mode),
static_folder=self.static_folder,
static_url_path=self.static_url_path)
for url, name, methods in self._urls:
self.blueprint.add_url_rule(url,
name,
getattr(self, name),
methods=methods)
return self.blueprint
def render(self, template, **kwargs):
"""
Render template
:param template:
Template path to render
:param kwargs:
Template arguments
"""
# Store self as admin_view
kwargs['admin_view'] = self
kwargs['admin_base_template'] = self.admin.base_template
# Provide i18n support even if flask-babel is not installed
# or enabled.
kwargs['_gettext'] = babel.gettext
kwargs['_ngettext'] = babel.ngettext
kwargs['h'] = h
# Expose get_url helper
kwargs['get_url'] = self.get_url
# Expose config info
kwargs['config'] = current_app.config
# Contribute extra arguments
kwargs.update(self._template_args)
return render_template(template, **kwargs)
def _prettify_class_name(self, name):
"""
Split words in PascalCase string into separate words.
:param name:
String to prettify
"""
return h.prettify_class_name(name)
def is_visible(self):
"""
Override this method if you want dynamically hide or show administrative views
from Flask-Admin menu structure
By default, item is visible in menu.
Please note that item should be both visible and accessible to be displayed in menu.
"""
return True
def is_accessible(self):
"""
Override this method to add permission checks.
Flask-Admin does not make any assumptions about the authentication system used in your application, so it is
up to you to implement it.
By default, it will allow access for everyone.
"""
return True
def _handle_view(self, name, **kwargs):
"""
This method will be executed before calling any view method.
It will execute the ``inaccessible_callback`` if the view is not
accessible.
:param name:
View function name
:param kwargs:
View function arguments
"""
if not self.is_accessible():
return self.inaccessible_callback(name, **kwargs)
def _run_view(self, fn, *args, **kwargs):
"""
This method will run actual view function.
While it is similar to _handle_view, can be used to change
arguments that are passed to the view.
:param fn:
View function
:param kwargs:
Arguments
"""
try:
return fn(self, *args, **kwargs)
except TypeError:
return fn(cls=self, **kwargs)
def inaccessible_callback(self, name, **kwargs):
"""
Handle the response to inaccessible views.
By default, it throw HTTP 403 error. Override this method to
customize the behaviour.
"""
return abort(403)
def get_url(self, endpoint, **kwargs):
"""
Generate URL for the endpoint. If you want to customize URL generation
logic (persist some query string argument, for example), this is
right place to do it.
:param endpoint:
Flask endpoint name
:param kwargs:
Arguments for `url_for`
"""
return url_for(endpoint, **kwargs)
@property
def _debug(self):
if not self.admin or not self.admin.app:
return False
return self.admin.app.debug
| (name=None, category=None, endpoint=None, url=None, static_folder=None, static_url_path=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None) |
725,827 | flask_admin.base | __init__ |
Constructor.
:param name:
Name of this view. If not provided, will default to the class name.
:param category:
View category. If not provided, this view will be shown as a top-level menu item. Otherwise, it will
be in a submenu.
:param endpoint:
Base endpoint name for the view. For example, if there's a view method called "index" and
endpoint is set to "myadmin", you can use `url_for('myadmin.index')` to get the URL to the
view method. Defaults to the class name in lower case.
:param url:
Base URL. If provided, affects how URLs are generated. For example, if the url parameter
is "test", the resulting URL will look like "/admin/test/". If not provided, will
use endpoint as a base url. However, if URL starts with '/', absolute path is assumed
and '/admin/' prefix won't be applied.
:param static_url_path:
Static URL Path. If provided, this specifies the path to the static url directory.
:param menu_class_name:
Optional class name for the menu item.
:param menu_icon_type:
Optional icon. Possible icon types:
- `flask_admin.consts.ICON_TYPE_GLYPH` - Bootstrap glyph icon
- `flask_admin.consts.ICON_TYPE_FONT_AWESOME` - Font Awesome icon
- `flask_admin.consts.ICON_TYPE_IMAGE` - Image relative to Flask static directory
- `flask_admin.consts.ICON_TYPE_IMAGE_URL` - Image with full URL
:param menu_icon_value:
Icon glyph name or URL, depending on `menu_icon_type` setting
| def __init__(self, name=None, category=None, endpoint=None, url=None,
static_folder=None, static_url_path=None,
menu_class_name=None, menu_icon_type=None, menu_icon_value=None):
"""
Constructor.
:param name:
Name of this view. If not provided, will default to the class name.
:param category:
View category. If not provided, this view will be shown as a top-level menu item. Otherwise, it will
be in a submenu.
:param endpoint:
Base endpoint name for the view. For example, if there's a view method called "index" and
endpoint is set to "myadmin", you can use `url_for('myadmin.index')` to get the URL to the
view method. Defaults to the class name in lower case.
:param url:
Base URL. If provided, affects how URLs are generated. For example, if the url parameter
is "test", the resulting URL will look like "/admin/test/". If not provided, will
use endpoint as a base url. However, if URL starts with '/', absolute path is assumed
and '/admin/' prefix won't be applied.
:param static_url_path:
Static URL Path. If provided, this specifies the path to the static url directory.
:param menu_class_name:
Optional class name for the menu item.
:param menu_icon_type:
Optional icon. Possible icon types:
- `flask_admin.consts.ICON_TYPE_GLYPH` - Bootstrap glyph icon
- `flask_admin.consts.ICON_TYPE_FONT_AWESOME` - Font Awesome icon
- `flask_admin.consts.ICON_TYPE_IMAGE` - Image relative to Flask static directory
- `flask_admin.consts.ICON_TYPE_IMAGE_URL` - Image with full URL
:param menu_icon_value:
Icon glyph name or URL, depending on `menu_icon_type` setting
"""
self.name = name
self.category = category
self.endpoint = self._get_endpoint(endpoint)
self.url = url
self.static_folder = static_folder
self.static_url_path = static_url_path
self.menu = None
self.menu_class_name = menu_class_name
self.menu_icon_type = menu_icon_type
self.menu_icon_value = menu_icon_value
# Initialized from create_blueprint
self.admin = None
self.blueprint = None
# Default view
if self._default_view is None:
raise Exception(u'Attempted to instantiate admin view %s without default view' % self.__class__.__name__)
| (self, name=None, category=None, endpoint=None, url=None, static_folder=None, static_url_path=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None) |
725,842 | flask_admin.base | expose |
Use this decorator to expose views in your view classes.
:param url:
Relative URL for the view
:param methods:
Allowed HTTP methods. By default only GET is allowed.
| def expose(url='/', methods=('GET',)):
"""
Use this decorator to expose views in your view classes.
:param url:
Relative URL for the view
:param methods:
Allowed HTTP methods. By default only GET is allowed.
"""
def wrap(f):
if not hasattr(f, '_urls'):
f._urls = []
f._urls.append((url, methods))
return f
return wrap
| (url='/', methods=('GET',)) |
725,843 | flask_admin.base | expose_plugview |
Decorator to expose Flask's pluggable view classes
(``flask.views.View`` or ``flask.views.MethodView``).
:param url:
Relative URL for the view
.. versionadded:: 1.0.4
| def expose_plugview(url='/'):
"""
Decorator to expose Flask's pluggable view classes
(``flask.views.View`` or ``flask.views.MethodView``).
:param url:
Relative URL for the view
.. versionadded:: 1.0.4
"""
def wrap(v):
handler = expose(url, v.methods)
if hasattr(v, 'as_view'):
return handler(v.as_view(v.__name__))
else:
return handler(v)
return wrap
| (url='/') |
725,846 | autofaiss.external.quantize | build_index |
Reads embeddings and creates a quantized index from them.
The index is stored on the current machine at the given output path.
Parameters
----------
embeddings : Union[str, np.ndarray, List[str]]
Local path containing all preprocessed vectors and cached files.
This could be a single directory or multiple directories.
Files will be added if empty.
Or directly the Numpy array of embeddings
index_path: Optional(str)
Destination path of the quantized model.
index_infos_path: Optional(str)
Destination path of the metadata file.
ids_path: Optional(str)
Only useful when id_columns is not None and file_format=`parquet`. T
his will be the path (in any filesystem)
where the mapping files Ids->vector index will be store in parquet format
save_on_disk: bool
Whether to save the index on disk, default to True.
file_format: Optional(str)
npy or parquet ; default npy
embedding_column_name: Optional(str)
embeddings column name for parquet ; default embedding
id_columns: Optional(List[str])
Can only be used when file_format=`parquet`.
In this case these are the names of the columns containing the Ids of the vectors,
and separate files will be generated to map these ids to indices in the KNN index ;
default None
index_key: Optional(str)
Optional string to give to the index factory in order to create the index.
If None, an index is chosen based on an heuristic.
index_param: Optional(str)
Optional string with hyperparameters to set to the index.
If None, the hyper-parameters are chosen based on an heuristic.
max_index_query_time_ms: float
Bound on the query time for KNN search, this bound is approximative
max_index_memory_usage: str
Maximum size allowed for the index, this bound is strict
min_nearest_neighbors_to_retrieve: int
Minimum number of nearest neighbors to retrieve when querying the index.
Parameter used only during index hyperparameter finetuning step, it is
not taken into account to select the indexing algorithm.
This parameter has the priority over the max_index_query_time_ms constraint.
current_memory_available: str
Memory available on the machine creating the index, having more memory is a boost
because it reduces the swipe between RAM and disk.
use_gpu: bool
Experimental, gpu training is faster, not tested so far
metric_type: str
Similarity function used for query:
- "ip" for inner product
- "l2" for euclidian distance
nb_cores: Optional[int]
Number of cores to use. Will try to guess the right number if not provided
make_direct_map: bool
Create a direct map allowing reconstruction of embeddings. This is only needed for IVF indices.
Note that might increase the RAM usage (approximately 8GB for 1 billion embeddings)
should_be_memory_mappable: bool
If set to true, the created index will be selected only among the indices that can be memory-mapped on disk.
This makes it possible to use 50GB indices on a machine with only 1GB of RAM. Default to False
distributed: Optional[str]
If "pyspark", create the indices using pyspark.
Only "parquet" file format is supported.
temporary_indices_folder: str
Folder to save the temporary small indices that are generated by each spark executor.
Only used when distributed = "pyspark".
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
nb_indices_to_keep: int
Number of indices to keep at most when distributed is "pyspark".
It allows you to build an index larger than `current_memory_available`
If it is not equal to 1,
- You are expected to have at most `nb_indices_to_keep` indices with the following names:
"{index_path}i" where i ranges from 1 to `nb_indices_to_keep`
- `build_index` returns a mapping from index path to metrics
Default to 1.
| def build_index(
embeddings: Union[str, np.ndarray, List[str]],
index_path: Optional[str] = "knn.index",
index_infos_path: Optional[str] = "index_infos.json",
ids_path: Optional[str] = None,
save_on_disk: bool = True,
file_format: str = "npy",
embedding_column_name: str = "embedding",
id_columns: Optional[List[str]] = None,
index_key: Optional[str] = None,
index_param: Optional[str] = None,
max_index_query_time_ms: float = 10.0,
max_index_memory_usage: str = "16G",
min_nearest_neighbors_to_retrieve: int = 20,
current_memory_available: str = "32G",
use_gpu: bool = False,
metric_type: str = "ip",
nb_cores: Optional[int] = None,
make_direct_map: bool = False,
should_be_memory_mappable: bool = False,
distributed: Optional[str] = None,
temporary_indices_folder: str = "hdfs://root/tmp/distributed_autofaiss_indices",
verbose: int = logging.INFO,
nb_indices_to_keep: int = 1,
) -> Tuple[Optional[faiss.Index], Optional[Dict[str, str]]]:
"""
Reads embeddings and creates a quantized index from them.
The index is stored on the current machine at the given output path.
Parameters
----------
embeddings : Union[str, np.ndarray, List[str]]
Local path containing all preprocessed vectors and cached files.
This could be a single directory or multiple directories.
Files will be added if empty.
Or directly the Numpy array of embeddings
index_path: Optional(str)
Destination path of the quantized model.
index_infos_path: Optional(str)
Destination path of the metadata file.
ids_path: Optional(str)
Only useful when id_columns is not None and file_format=`parquet`. T
his will be the path (in any filesystem)
where the mapping files Ids->vector index will be store in parquet format
save_on_disk: bool
Whether to save the index on disk, default to True.
file_format: Optional(str)
npy or parquet ; default npy
embedding_column_name: Optional(str)
embeddings column name for parquet ; default embedding
id_columns: Optional(List[str])
Can only be used when file_format=`parquet`.
In this case these are the names of the columns containing the Ids of the vectors,
and separate files will be generated to map these ids to indices in the KNN index ;
default None
index_key: Optional(str)
Optional string to give to the index factory in order to create the index.
If None, an index is chosen based on an heuristic.
index_param: Optional(str)
Optional string with hyperparameters to set to the index.
If None, the hyper-parameters are chosen based on an heuristic.
max_index_query_time_ms: float
Bound on the query time for KNN search, this bound is approximative
max_index_memory_usage: str
Maximum size allowed for the index, this bound is strict
min_nearest_neighbors_to_retrieve: int
Minimum number of nearest neighbors to retrieve when querying the index.
Parameter used only during index hyperparameter finetuning step, it is
not taken into account to select the indexing algorithm.
This parameter has the priority over the max_index_query_time_ms constraint.
current_memory_available: str
Memory available on the machine creating the index, having more memory is a boost
because it reduces the swipe between RAM and disk.
use_gpu: bool
Experimental, gpu training is faster, not tested so far
metric_type: str
Similarity function used for query:
- "ip" for inner product
- "l2" for euclidian distance
nb_cores: Optional[int]
Number of cores to use. Will try to guess the right number if not provided
make_direct_map: bool
Create a direct map allowing reconstruction of embeddings. This is only needed for IVF indices.
Note that might increase the RAM usage (approximately 8GB for 1 billion embeddings)
should_be_memory_mappable: bool
If set to true, the created index will be selected only among the indices that can be memory-mapped on disk.
This makes it possible to use 50GB indices on a machine with only 1GB of RAM. Default to False
distributed: Optional[str]
If "pyspark", create the indices using pyspark.
Only "parquet" file format is supported.
temporary_indices_folder: str
Folder to save the temporary small indices that are generated by each spark executor.
Only used when distributed = "pyspark".
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
nb_indices_to_keep: int
Number of indices to keep at most when distributed is "pyspark".
It allows you to build an index larger than `current_memory_available`
If it is not equal to 1,
- You are expected to have at most `nb_indices_to_keep` indices with the following names:
"{index_path}i" where i ranges from 1 to `nb_indices_to_keep`
- `build_index` returns a mapping from index path to metrics
Default to 1.
"""
setup_logging(verbose)
# if using distributed mode, it doesn't make sense to use indices that are not memory mappable
if distributed == "pyspark":
should_be_memory_mappable = True
if index_path:
index_path = make_path_absolute(index_path)
elif save_on_disk:
logger.error("Please specify a index_path if you set save_on_disk as True")
return None, None
if index_infos_path:
index_infos_path = make_path_absolute(index_infos_path)
elif save_on_disk:
logger.error("Please specify a index_infos_path if you set save_on_disk as True")
return None, None
if ids_path:
ids_path = make_path_absolute(ids_path)
if nb_indices_to_keep < 1:
logger.error("Please specify nb_indices_to_keep an integer value larger or equal to 1")
return None, None
elif nb_indices_to_keep > 1:
if distributed is None:
logger.error('nb_indices_to_keep can only be larger than 1 when distributed is "pyspark"')
return None, None
if not save_on_disk:
logger.error("Please set save_on_disk to True when nb_indices_to_keep is larger than 1")
return None, None
current_bytes = cast_memory_to_bytes(current_memory_available)
max_index_bytes = cast_memory_to_bytes(max_index_memory_usage)
memory_left = current_bytes - max_index_bytes
if nb_indices_to_keep == 1 and memory_left < current_bytes * 0.1:
logger.error(
"You do not have enough memory to build this index, "
"please increase current_memory_available or decrease max_index_memory_usage"
)
return None, None
if nb_cores is None:
nb_cores = multiprocessing.cpu_count()
logger.info(f"Using {nb_cores} omp threads (processes), consider increasing --nb_cores if you have more")
faiss.omp_set_num_threads(nb_cores)
if isinstance(embeddings, np.ndarray):
tmp_dir_embeddings = tempfile.TemporaryDirectory() # pylint: disable=consider-using-with
np.save(os.path.join(tmp_dir_embeddings.name, "emb.npy"), embeddings)
embeddings_path = tmp_dir_embeddings.name
else:
embeddings_path = embeddings # type: ignore
with Timeit("Launching the whole pipeline"):
with Timeit("Reading total number of vectors and dimension"):
embedding_reader = EmbeddingReader(
embeddings_path,
file_format=file_format,
embedding_column=embedding_column_name,
meta_columns=id_columns,
)
nb_vectors = embedding_reader.count
vec_dim = embedding_reader.dimension
logger.info(f"There are {nb_vectors} embeddings of dim {vec_dim}")
with Timeit("Compute estimated construction time of the index", indent=1):
for log_lines in get_estimated_construction_time_infos(nb_vectors, vec_dim, indent=2).split("\n"):
logger.info(log_lines)
with Timeit("Checking that your have enough memory available to create the index", indent=1):
necessary_mem, index_key_used = estimate_memory_required_for_index_creation(
nb_vectors, vec_dim, index_key, max_index_memory_usage, make_direct_map, nb_indices_to_keep
)
logger.info(
f"{cast_bytes_to_memory_string(necessary_mem)} of memory "
"will be needed to build the index (more might be used if you have more)"
)
prefix = "(default) " if index_key is None else ""
if necessary_mem > cast_memory_to_bytes(current_memory_available):
r = (
f"The current memory available on your machine ({current_memory_available}) is not "
f"enough to create the {prefix}index {index_key_used} that requires "
f"{cast_bytes_to_memory_string(necessary_mem)} to train. "
"You can decrease the number of clusters of you index since the Kmeans algorithm "
"used for clusterisation is responsible for this high memory usage."
"Consider increasing the options current_memory_available or decreasing max_index_memory_usage"
)
logger.error(r)
return None, None
if index_key is None:
with Timeit("Selecting most promising index types given data characteristics", indent=1):
best_index_keys = get_optimal_index_keys_v2(
nb_vectors,
vec_dim,
max_index_memory_usage,
make_direct_map=make_direct_map,
should_be_memory_mappable=should_be_memory_mappable,
use_gpu=use_gpu,
)
if not best_index_keys:
return None, None
index_key = best_index_keys[0]
if id_columns is not None:
logger.info(f"Id columns provided {id_columns} - will be reading the corresponding columns")
if ids_path is not None:
logger.info(f"\tWill be writing the Ids DataFrame in parquet format to {ids_path}")
fs, _ = fsspec.core.url_to_fs(ids_path, use_listings_cache=False)
if fs.exists(ids_path):
fs.rm(ids_path, recursive=True)
fs.mkdirs(ids_path)
else:
logger.error(
"\tAs ids_path=None - the Ids DataFrame will not be written and will be ignored subsequently"
)
logger.error("\tPlease provide a value ids_path for the Ids to be written")
write_ids_df_to_parquet_fn = get_write_ids_df_to_parquet_fn(ids_path) if ids_path and id_columns else None
optimize_index_fn = get_optimize_index_fn(
embedding_reader=embedding_reader,
index_key=index_key,
index_path=index_path,
index_infos_path=index_infos_path,
use_gpu=use_gpu,
save_on_disk=save_on_disk,
max_index_query_time_ms=max_index_query_time_ms,
min_nearest_neighbors_to_retrieve=min_nearest_neighbors_to_retrieve,
index_param=index_param,
make_direct_map=make_direct_map,
)
with Timeit("Creating the index", indent=1):
index, metric_infos = create_index(
embedding_reader,
index_key,
metric_type,
current_memory_available,
use_gpu=use_gpu,
embedding_ids_df_handler=write_ids_df_to_parquet_fn,
make_direct_map=make_direct_map,
distributed_engine=distributed,
temporary_indices_folder=temporary_indices_folder,
nb_indices_to_keep=nb_indices_to_keep,
index_optimizer=optimize_index_fn,
)
if metric_infos:
_log_output_dict(metric_infos)
return index, metric_infos
| (embeddings: Union[str, numpy.ndarray, List[str]], index_path: Optional[str] = 'knn.index', index_infos_path: Optional[str] = 'index_infos.json', ids_path: Optional[str] = None, save_on_disk: bool = True, file_format: str = 'npy', embedding_column_name: str = 'embedding', id_columns: Optional[List[str]] = None, index_key: Optional[str] = None, index_param: Optional[str] = None, max_index_query_time_ms: float = 10.0, max_index_memory_usage: str = '16G', min_nearest_neighbors_to_retrieve: int = 20, current_memory_available: str = '32G', use_gpu: bool = False, metric_type: str = 'ip', nb_cores: Optional[int] = None, make_direct_map: bool = False, should_be_memory_mappable: bool = False, distributed: Optional[str] = None, temporary_indices_folder: str = 'hdfs://root/tmp/distributed_autofaiss_indices', verbose: int = 20, nb_indices_to_keep: int = 1) -> Tuple[Optional[faiss.swigfaiss_avx512.Index], Optional[Dict[str, str]]] |
725,847 | autofaiss.external.quantize | build_partitioned_indexes |
Create partitioned indexes from a partitioned parquet dataset,
i.e. create one index per parquet partition
Only supported with PySpark. A PySpark session must be active before calling this function
Parameters
----------
partitions : str
List of partitions containing embeddings
output_root_dir: str
Output root directory where indexes, metrics and ids will be written
embedding_column_name: str
Parquet dataset column name containing embeddings
index_key: Optional(str)
Optional string to give to the index factory in order to create the index.
If None, an index is chosen based on an heuristic.
index_path: Optional(str)
Optional path to an index that will be used to add embeddings.
This index must be pre-trained if it needs a training
id_columns: Optional(List[str])
Parquet dataset column name(s) that are used as IDs for embeddings.
A mapping from these IDs to faiss indices will be written in separate files.
max_index_query_time_ms: float
Bound on the query time for KNN search, this bound is approximative
max_index_memory_usage: str
Maximum size allowed for the index, this bound is strict
min_nearest_neighbors_to_retrieve: int
Minimum number of nearest neighbors to retrieve when querying the index.
Parameter used only during index hyperparameter finetuning step, it is
not taken into account to select the indexing algorithm.
This parameter has the priority over the max_index_query_time_ms constraint.
current_memory_available: str
Memory available on the machine creating the index, having more memory is a boost
because it reduces the swipe between RAM and disk.
use_gpu: bool
Experimental, gpu training is faster, not tested so far
metric_type: str
Similarity function used for query:
- "ip" for inner product
- "l2" for euclidean distance
nb_cores: Optional[int]
Number of cores to use. Will try to guess the right number if not provided
make_direct_map: bool
Create a direct map allowing reconstruction of embeddings. This is only needed for IVF indices.
Note that might increase the RAM usage (approximately 8GB for 1 billion embeddings)
should_be_memory_mappable: bool
If set to true, the created index will be selected only among the indices that can be memory-mapped on disk.
This makes it possible to use 50GB indices on a machine with only 1GB of RAM. Default to False
temp_root_dir: str
Temporary directory that will be used to store intermediate results/computation
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
nb_splits_per_big_index: int
Number of indices to split a big index into.
This allows you building indices bigger than `current_memory_available`.
big_index_threshold: int
Threshold used to define big indexes.
Indexes with more `than big_index_threshold` embeddings are considered big indexes.
maximum_nb_threads: int
Maximum number of threads to parallelize index creation
| def build_partitioned_indexes(
partitions: List[str],
output_root_dir: str,
embedding_column_name: str = "embedding",
index_key: Optional[str] = None,
index_path: Optional[str] = None,
id_columns: Optional[List[str]] = None,
max_index_query_time_ms: float = 10.0,
max_index_memory_usage: str = "16G",
min_nearest_neighbors_to_retrieve: int = 20,
current_memory_available: str = "32G",
use_gpu: bool = False,
metric_type: str = "ip",
nb_cores: Optional[int] = None,
make_direct_map: bool = False,
should_be_memory_mappable: bool = False,
temp_root_dir: str = "hdfs://root/tmp/distributed_autofaiss_indices",
verbose: int = logging.INFO,
nb_splits_per_big_index: int = 1,
big_index_threshold: int = 5_000_000,
maximum_nb_threads: int = 256,
) -> List[Optional[Dict[str, str]]]:
"""
Create partitioned indexes from a partitioned parquet dataset,
i.e. create one index per parquet partition
Only supported with PySpark. A PySpark session must be active before calling this function
Parameters
----------
partitions : str
List of partitions containing embeddings
output_root_dir: str
Output root directory where indexes, metrics and ids will be written
embedding_column_name: str
Parquet dataset column name containing embeddings
index_key: Optional(str)
Optional string to give to the index factory in order to create the index.
If None, an index is chosen based on an heuristic.
index_path: Optional(str)
Optional path to an index that will be used to add embeddings.
This index must be pre-trained if it needs a training
id_columns: Optional(List[str])
Parquet dataset column name(s) that are used as IDs for embeddings.
A mapping from these IDs to faiss indices will be written in separate files.
max_index_query_time_ms: float
Bound on the query time for KNN search, this bound is approximative
max_index_memory_usage: str
Maximum size allowed for the index, this bound is strict
min_nearest_neighbors_to_retrieve: int
Minimum number of nearest neighbors to retrieve when querying the index.
Parameter used only during index hyperparameter finetuning step, it is
not taken into account to select the indexing algorithm.
This parameter has the priority over the max_index_query_time_ms constraint.
current_memory_available: str
Memory available on the machine creating the index, having more memory is a boost
because it reduces the swipe between RAM and disk.
use_gpu: bool
Experimental, gpu training is faster, not tested so far
metric_type: str
Similarity function used for query:
- "ip" for inner product
- "l2" for euclidean distance
nb_cores: Optional[int]
Number of cores to use. Will try to guess the right number if not provided
make_direct_map: bool
Create a direct map allowing reconstruction of embeddings. This is only needed for IVF indices.
Note that might increase the RAM usage (approximately 8GB for 1 billion embeddings)
should_be_memory_mappable: bool
If set to true, the created index will be selected only among the indices that can be memory-mapped on disk.
This makes it possible to use 50GB indices on a machine with only 1GB of RAM. Default to False
temp_root_dir: str
Temporary directory that will be used to store intermediate results/computation
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
nb_splits_per_big_index: int
Number of indices to split a big index into.
This allows you building indices bigger than `current_memory_available`.
big_index_threshold: int
Threshold used to define big indexes.
Indexes with more `than big_index_threshold` embeddings are considered big indexes.
maximum_nb_threads: int
Maximum number of threads to parallelize index creation
"""
setup_logging(verbose)
# Sanity checks
if not partitions:
raise ValueError("partitions can't be empty")
check_not_null_not_empty("output_root_dir", output_root_dir)
check_not_null_not_empty("embedding_column_name", embedding_column_name)
if nb_splits_per_big_index < 1:
raise ValueError(f"nb_indices_to_keep must be > 0; Got {nb_splits_per_big_index}")
if big_index_threshold < 1:
raise ValueError(f"big_index_threshold must be > 0; Got {big_index_threshold}")
if index_path is not None and not index_key:
raise ValueError(
"Please provide the index key of the input index; "
f"Got index_key: {index_key} and index_path: {index_path}"
)
if index_key:
n_dimensions = EmbeddingReader(
partitions[0], file_format="parquet", embedding_column=embedding_column_name
).dimension
# Create an empty index to validate the index key
create_empty_index(n_dimensions, index_key=index_key, metric_type=metric_type)
# Create partitioned indexes
return create_partitioned_indexes(
partitions=partitions,
output_root_dir=output_root_dir,
embedding_column_name=embedding_column_name,
index_key=index_key,
index_path=index_path,
id_columns=id_columns,
should_be_memory_mappable=should_be_memory_mappable,
max_index_query_time_ms=max_index_query_time_ms,
max_index_memory_usage=max_index_memory_usage,
min_nearest_neighbors_to_retrieve=min_nearest_neighbors_to_retrieve,
current_memory_available=current_memory_available,
use_gpu=use_gpu,
metric_type=metric_type,
nb_cores=nb_cores,
make_direct_map=make_direct_map,
temp_root_dir=temp_root_dir,
nb_splits_per_big_index=nb_splits_per_big_index,
big_index_threshold=big_index_threshold,
maximum_nb_threads=maximum_nb_threads,
)
| (partitions: List[str], output_root_dir: str, embedding_column_name: str = 'embedding', index_key: Optional[str] = None, index_path: Optional[str] = None, id_columns: Optional[List[str]] = None, max_index_query_time_ms: float = 10.0, max_index_memory_usage: str = '16G', min_nearest_neighbors_to_retrieve: int = 20, current_memory_available: str = '32G', use_gpu: bool = False, metric_type: str = 'ip', nb_cores: Optional[int] = None, make_direct_map: bool = False, should_be_memory_mappable: bool = False, temp_root_dir: str = 'hdfs://root/tmp/distributed_autofaiss_indices', verbose: int = 20, nb_splits_per_big_index: int = 1, big_index_threshold: int = 5000000, maximum_nb_threads: int = 256) -> List[Optional[Dict[str, str]]] |
725,851 | autofaiss.external.quantize | score_index |
Compute metrics on a given index, use cached ground truth for fast scoring the next times.
Parameters
----------
index_path : Union[str, faiss.Index]
Path to .index file. Or in memory index
embeddings: Union[str, np.ndarray]
Path containing all preprocessed vectors and cached files. Can also be an in memory array.
save_on_disk: bool
Whether to save on disk
output_index_info_path : str
Path to index infos .json
current_memory_available: str
Memory available on the current machine, having more memory is a boost
because it reduces the swipe between RAM and disk.
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
Returns
-------
metric_infos: Optional[Dict[str, Union[str, float, int]]]
Metric infos of the index.
| def score_index(
index_path: Union[str, faiss.Index],
embeddings: Union[str, np.ndarray],
save_on_disk: bool = True,
output_index_info_path: str = "infos.json",
current_memory_available: str = "32G",
verbose: int = logging.INFO,
) -> Optional[Dict[str, Union[str, float, int]]]:
"""
Compute metrics on a given index, use cached ground truth for fast scoring the next times.
Parameters
----------
index_path : Union[str, faiss.Index]
Path to .index file. Or in memory index
embeddings: Union[str, np.ndarray]
Path containing all preprocessed vectors and cached files. Can also be an in memory array.
save_on_disk: bool
Whether to save on disk
output_index_info_path : str
Path to index infos .json
current_memory_available: str
Memory available on the current machine, having more memory is a boost
because it reduces the swipe between RAM and disk.
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
Returns
-------
metric_infos: Optional[Dict[str, Union[str, float, int]]]
Metric infos of the index.
"""
setup_logging(verbose)
faiss.omp_set_num_threads(multiprocessing.cpu_count())
if isinstance(index_path, str):
index_path = make_path_absolute(index_path)
with fsspec.open(index_path, "rb").open() as f:
index = faiss.read_index(faiss.PyCallbackIOReader(f.read))
fs, path_in_fs = fsspec.core.url_to_fs(index_path, use_listings_cache=False)
index_memory = fs.size(path_in_fs)
else:
index = index_path
with tempfile.NamedTemporaryFile("wb") as f:
faiss.write_index(index, faiss.PyCallbackIOWriter(f.write))
fs, path_in_fs = fsspec.core.url_to_fs(f.name, use_listings_cache=False)
index_memory = fs.size(path_in_fs)
if isinstance(embeddings, np.ndarray):
tmp_dir_embeddings = tempfile.TemporaryDirectory() # pylint: disable=consider-using-with
np.save(os.path.join(tmp_dir_embeddings.name, "emb.npy"), embeddings)
embeddings_path = tmp_dir_embeddings.name
else:
embeddings_path = embeddings
embedding_reader = EmbeddingReader(embeddings_path, file_format="npy")
infos: Dict[str, Union[str, float, int]] = {}
with Timeit("Compute fast metrics"):
infos.update(compute_fast_metrics(embedding_reader, index))
logger.info("Intermediate recap:")
_log_output_dict(infos)
current_in_bytes = cast_memory_to_bytes(current_memory_available)
memory_left = current_in_bytes - index_memory
if memory_left < current_in_bytes * 0.1:
logger.info(
f"Not enough memory, at least {cast_bytes_to_memory_string(index_memory * 1.1)}"
"is needed, please increase current_memory_available"
)
return None
with Timeit("Compute medium metrics"):
infos.update(compute_medium_metrics(embedding_reader, index, memory_left))
logger.info("Performances recap:")
_log_output_dict(infos)
if save_on_disk:
with fsspec.open(output_index_info_path, "w").open() as f:
json.dump(infos, f)
return infos
| (index_path: Union[str, faiss.swigfaiss_avx512.Index], embeddings: Union[str, numpy.ndarray], save_on_disk: bool = True, output_index_info_path: str = 'infos.json', current_memory_available: str = '32G', verbose: int = 20) -> Optional[Dict[str, Union[str, float, int]]] |
725,852 | autofaiss.external.quantize | tune_index |
Set hyperparameters to the given index.
If an index_param is given, set this hyperparameters to the index,
otherwise perform a greedy heusistic to make the best out or the max_index_query_time_ms constraint
Parameters
----------
index_path : Union[str, faiss.Index]
Path to .index file
Can also be an index
index_key: str
String to give to the index factory in order to create the index.
index_param: Optional(str)
Optional string with hyperparameters to set to the index.
If None, the hyper-parameters are chosen based on an heuristic.
output_index_path: str
Path to the newly created .index file
save_on_disk: bool
Whether to save the index on disk, default to True.
min_nearest_neighbors_to_retrieve: int
Minimum number of nearest neighbors to retrieve when querying the index.
max_index_query_time_ms: float
Query speed constraint for the index to create.
use_gpu: bool
Experimental, gpu training is faster, not tested so far.
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
Returns
-------
index
The faiss index
| def tune_index(
index_path: Union[str, faiss.Index],
index_key: str,
index_param: Optional[str] = None,
output_index_path: Optional[str] = "tuned_knn.index",
save_on_disk: bool = True,
min_nearest_neighbors_to_retrieve: int = 20,
max_index_query_time_ms: float = 10.0,
use_gpu: bool = False,
verbose: int = logging.INFO,
) -> faiss.Index:
"""
Set hyperparameters to the given index.
If an index_param is given, set this hyperparameters to the index,
otherwise perform a greedy heusistic to make the best out or the max_index_query_time_ms constraint
Parameters
----------
index_path : Union[str, faiss.Index]
Path to .index file
Can also be an index
index_key: str
String to give to the index factory in order to create the index.
index_param: Optional(str)
Optional string with hyperparameters to set to the index.
If None, the hyper-parameters are chosen based on an heuristic.
output_index_path: str
Path to the newly created .index file
save_on_disk: bool
Whether to save the index on disk, default to True.
min_nearest_neighbors_to_retrieve: int
Minimum number of nearest neighbors to retrieve when querying the index.
max_index_query_time_ms: float
Query speed constraint for the index to create.
use_gpu: bool
Experimental, gpu training is faster, not tested so far.
verbose: int
set verbosity of outputs via logging level, default is `logging.INFO`
Returns
-------
index
The faiss index
"""
setup_logging(verbose)
if isinstance(index_path, str):
index_path = make_path_absolute(index_path)
with fsspec.open(index_path, "rb").open() as f:
index = faiss.read_index(faiss.PyCallbackIOReader(f.read))
else:
index = index_path
if index_param is None:
with Timeit("Compute best hyperparameters"):
index_param = get_optimal_hyperparameters(
index,
index_key,
max_speed_ms=max_index_query_time_ms,
min_nearest_neighbors_to_retrieve=min_nearest_neighbors_to_retrieve,
)
with Timeit("Set search hyperparameters for the index"):
set_search_hyperparameters(index, index_param, use_gpu)
logger.info(f"The optimal hyperparameters are {index_param}.")
if save_on_disk:
with fsspec.open(output_index_path, "wb").open() as f:
faiss.write_index(index, faiss.PyCallbackIOWriter(f.write))
logger.info("The index with these parameters has been saved on disk.")
return index
| (index_path: Union[str, faiss.swigfaiss_avx512.Index], index_key: str, index_param: Optional[str] = None, output_index_path: Optional[str] = 'tuned_knn.index', save_on_disk: bool = True, min_nearest_neighbors_to_retrieve: int = 20, max_index_query_time_ms: float = 10.0, use_gpu: bool = False, verbose: int = 20) -> faiss.swigfaiss_avx512.Index |
725,855 | osqp.interface | OSQP | null | class OSQP(object):
def __init__(self):
self._model = _osqp.OSQP()
def version(self):
return self._model.version()
def setup(self, P=None, q=None, A=None, l=None, u=None, **settings):
"""
Setup OSQP solver problem of the form
minimize 1/2 x' * P * x + q' * x
subject to l <= A * x <= u
solver settings can be specified as additional keyword arguments
"""
# TODO(bart): this will be unnecessary when the derivative will be in C
self._derivative_cache = {'P': P, 'q': q, 'A': A, 'l': l, 'u': u}
unpacked_data, settings = utils.prepare_data(P, q, A, l, u, **settings)
self._model.setup(*unpacked_data, **settings)
def update(self, q=None, l=None, u=None,
Px=None, Px_idx=np.array([]), Ax=None, Ax_idx=np.array([])):
"""
Update OSQP problem arguments
"""
# get problem dimensions
(n, m) = self._model.dimensions()
# check consistency of the input arguments
if q is not None and len(q) != n:
raise ValueError("q must have length n")
if l is not None:
if not isinstance(l, np.ndarray):
raise TypeError("l must be numpy.ndarray, not %s" %
type(l).__name__)
elif len(l) != m:
raise ValueError("l must have length m")
# Convert values to -OSQP_INFTY
l = np.maximum(l, -_osqp.constant('OSQP_INFTY'))
if u is not None:
if not isinstance(u, np.ndarray):
raise TypeError("u must be numpy.ndarray, not %s" %
type(u).__name__)
elif len(u) != m:
raise ValueError("u must have length m")
# Convert values to OSQP_INFTY
u = np.minimum(u, _osqp.constant('OSQP_INFTY'))
if Ax is None:
if len(Ax_idx) > 0:
raise ValueError("Vector Ax has not been specified")
else:
if len(Ax_idx) > 0 and len(Ax) != len(Ax_idx):
raise ValueError("Ax and Ax_idx must have the same lengths")
if Px is None:
if len(Px_idx) > 0:
raise ValueError("Vector Px has not been specified")
else:
if len(Px_idx) > 0 and len(Px) != len(Px_idx):
raise ValueError("Px and Px_idx must have the same lengths")
if q is None and l is None and u is None and Px is None and Ax is None:
raise ValueError("No updatable data has been specified")
# update linear cost
if q is not None:
self._model.update_lin_cost(q)
# update lower bound
if l is not None and u is None:
self._model.update_lower_bound(l)
# update upper bound
if u is not None and l is None:
self._model.update_upper_bound(u)
# update bounds
if l is not None and u is not None:
self._model.update_bounds(l, u)
# update matrix P
if Px is not None and Ax is None:
self._model.update_P(Px, Px_idx, len(Px))
# update matrix A
if Ax is not None and Px is None:
self._model.update_A(Ax, Ax_idx, len(Ax))
# update matrices P and A
if Px is not None and Ax is not None:
self._model.update_P_A(Px, Px_idx, len(Px), Ax, Ax_idx, len(Ax))
# TODO(bart): this will be unnecessary when the derivative will be in C
# update problem data in self._derivative_cache
if q is not None:
self._derivative_cache["q"] = q
if l is not None:
self._derivative_cache["l"] = l
if u is not None:
self._derivative_cache["u"] = u
if Px is not None:
if Px_idx.size == 0:
self._derivative_cache["P"].data = Px
else:
self._derivative_cache["P"].data[Px_idx] = Px
if Ax is not None:
if Ax_idx.size == 0:
self._derivative_cache["A"].data = Ax
else:
self._derivative_cache["A"].data[Ax_idx] = Ax
# delete results from self._derivative_cache to prohibit
# taking the derivative of unsolved problems
if "results" in self._derivative_cache.keys():
del self._derivative_cache["results"]
def update_settings(self, **kwargs):
"""
Update OSQP solver settings
It is possible to change: 'max_iter', 'eps_abs', 'eps_rel',
'eps_prim_inf', 'eps_dual_inf', 'rho'
'alpha', 'delta', 'polish',
'polish_refine_iter',
'verbose', 'scaled_termination',
'check_termination', 'time_limit',
"""
# get arguments
max_iter = kwargs.pop('max_iter', None)
eps_abs = kwargs.pop('eps_abs', None)
eps_rel = kwargs.pop('eps_rel', None)
eps_prim_inf = kwargs.pop('eps_prim_inf', None)
eps_dual_inf = kwargs.pop('eps_dual_inf', None)
rho = kwargs.pop('rho', None)
alpha = kwargs.pop('alpha', None)
delta = kwargs.pop('delta', None)
polish = kwargs.pop('polish', None)
polish_refine_iter = kwargs.pop('polish_refine_iter', None)
verbose = kwargs.pop('verbose', None)
scaled_termination = kwargs.pop('scaled_termination', None)
check_termination = kwargs.pop('check_termination', None)
warm_start = kwargs.pop('warm_start', None)
time_limit = kwargs.pop('time_limit', None)
# update them
if max_iter is not None:
self._model.update_max_iter(max_iter)
if eps_abs is not None:
self._model.update_eps_abs(eps_abs)
if eps_rel is not None:
self._model.update_eps_rel(eps_rel)
if eps_prim_inf is not None:
self._model.update_eps_prim_inf(eps_prim_inf)
if eps_dual_inf is not None:
self._model.update_eps_dual_inf(eps_dual_inf)
if rho is not None:
self._model.update_rho(rho)
if alpha is not None:
self._model.update_alpha(alpha)
if delta is not None:
self._model.update_delta(delta)
if polish is not None:
self._model.update_polish(polish)
if polish_refine_iter is not None:
self._model.update_polish_refine_iter(polish_refine_iter)
if verbose is not None:
self._model.update_verbose(verbose)
if scaled_termination is not None:
self._model.update_scaled_termination(scaled_termination)
if check_termination is not None:
self._model.update_check_termination(check_termination)
if warm_start is not None:
self._model.update_warm_start(warm_start)
if time_limit is not None:
self._model.update_time_limit(time_limit)
if max_iter is None and \
eps_abs is None and \
eps_rel is None and \
eps_prim_inf is None and \
eps_dual_inf is None and \
rho is None and \
alpha is None and \
delta is None and \
polish is None and \
polish_refine_iter is None and \
verbose is None and \
scaled_termination is None and \
check_termination is None and \
warm_start is None:
raise ValueError("No updatable settings has been specified!")
def solve(self):
"""
Solve QP Problem
"""
# Solve QP
results = self._model.solve()
# TODO(bart): this will be unnecessary when the derivative will be in C
self._derivative_cache['results'] = results
return results
def warm_start(self, x=None, y=None):
"""
Warm start primal or dual variables
"""
# get problem dimensions
(n, m) = self._model.dimensions()
if x is not None:
if len(x) != n:
raise ValueError("Wrong dimension for variable x")
if y is None:
self._model.warm_start_x(x)
if y is not None:
if len(y) != m:
raise ValueError("Wrong dimension for variable y")
if x is None:
self._model.warm_start_y(y)
if x is not None and y is not None:
self._model.warm_start(x, y)
if x is None and y is None:
raise ValueError("Unrecognized fields")
def codegen(self, folder, project_type='', parameters='vectors',
python_ext_name='emosqp', force_rewrite=False, compile_python_ext=True,
FLOAT=False, LONG=True):
"""
Generate embeddable C code for the problem
"""
# Check parameters arguments
if parameters == 'vectors':
embedded = 1
elif parameters == 'matrices':
embedded = 2
else:
raise ValueError("Unknown value of 'parameters' argument.")
# Set float and long flags
if FLOAT:
float_flag = 'ON'
else:
float_flag = 'OFF'
if LONG:
long_flag = 'ON'
else:
long_flag = 'OFF'
# Check project_type argument
expectedProject = ('', 'Makefile', 'MinGW Makefiles',
'Unix Makefiles', 'CodeBlocks', 'Xcode')
if project_type not in expectedProject:
raise ValueError("Unknown value of 'project_type' argument.")
if project_type == 'Makefile':
if system() == 'Windows':
project_type = 'MinGW Makefiles'
elif system() == 'Linux' or system() == 'Darwin':
project_type = 'Unix Makefiles'
# Convert workspace to Python
sys.stdout.write("Getting workspace from OSQP object... \t\t\t\t")
sys.stdout.flush()
work = self._model._get_workspace()
print("[done]")
# Generate code with codegen module
cg.codegen(work, folder, python_ext_name, project_type, compile_python_ext,
embedded, force_rewrite, float_flag, long_flag)
def derivative_iterative_refinement(self, rhs, max_iter=20, tol=1e-12):
M = self._derivative_cache['M']
# Prefactor
solver = self._derivative_cache['solver']
sol = solver.solve(rhs)
for k in range(max_iter):
delta_sol = solver.solve(rhs - M @ sol)
sol = sol + delta_sol
if np.linalg.norm(M @ sol - rhs) < tol:
break
if k == max_iter - 1:
warn("max_iter iterative refinement reached.")
return sol
def adjoint_derivative(self, dx=None, dy_u=None, dy_l=None,
P_idx=None, A_idx=None, eps_iter_ref=1e-04):
"""
Compute adjoint derivative after solve.
"""
P, q = self._derivative_cache['P'], self._derivative_cache['q']
A = self._derivative_cache['A']
l, u = self._derivative_cache['l'], self._derivative_cache['u']
try:
results = self._derivative_cache['results']
except KeyError:
raise ValueError("Problem has not been solved. "
"You cannot take derivatives. "
"Please call the solve function.")
if results.info.status != "solved":
raise ValueError("Problem has not been solved to optimality. "
"You cannot take derivatives")
m, n = A.shape
x = results.x
y = results.y
y_u = np.maximum(y, 0)
y_l = -np.minimum(y, 0)
if A_idx is None:
A_idx = A.nonzero()
if P_idx is None:
P_idx = P.nonzero()
if dy_u is None:
dy_u = np.zeros(m)
if dy_l is None:
dy_l = np.zeros(m)
# Make sure M matrix exists
if 'M' not in self._derivative_cache:
# Multiply second-third row by diag(y_u)^-1 and diag(y_l)^-1
# to make the matrix symmetric
inv_dia_y_u = spa.diags(np.reciprocal(y_u + 1e-20))
inv_dia_y_l = spa.diags(np.reciprocal(y_l + 1e-20))
M = spa.bmat([
[P, A.T, -A.T],
[A, spa.diags(A @ x - u) @ inv_dia_y_u, None],
[-A, None, spa.diags(l - A @ x) @ inv_dia_y_l]
], format='csc')
delta = spa.bmat([[eps_iter_ref * spa.eye(n), None],
[None, -eps_iter_ref * spa.eye(2 * m)]],
format='csc')
self._derivative_cache['M'] = M
self._derivative_cache['solver'] = qdldl.Solver(M + delta)
rhs = - np.concatenate([dx, dy_u, dy_l])
r_sol = self.derivative_iterative_refinement(rhs)
r_x, r_yu, r_yl = np.split(r_sol, [n, n+m])
# Extract derivatives for the constraints
rows, cols = A_idx
dA_vals = (y_u[rows] - y_l[rows]) * r_x[cols] + \
(r_yu[rows] - r_yl[rows]) * x[cols]
dA = spa.csc_matrix((dA_vals, (rows, cols)), shape=A.shape)
du = - r_yu
dl = r_yl
# Extract derivatives for the cost (P, q)
rows, cols = P_idx
dP_vals = .5 * (r_x[rows] * x[cols] + r_x[cols] * x[rows])
dP = spa.csc_matrix((dP_vals, P_idx), shape=P.shape)
dq = r_x
return (dP, dq, dA, dl, du)
| () |
725,856 | osqp.interface | __init__ | null | def __init__(self):
self._model = _osqp.OSQP()
| (self) |
725,857 | osqp.interface | adjoint_derivative |
Compute adjoint derivative after solve.
| def adjoint_derivative(self, dx=None, dy_u=None, dy_l=None,
P_idx=None, A_idx=None, eps_iter_ref=1e-04):
"""
Compute adjoint derivative after solve.
"""
P, q = self._derivative_cache['P'], self._derivative_cache['q']
A = self._derivative_cache['A']
l, u = self._derivative_cache['l'], self._derivative_cache['u']
try:
results = self._derivative_cache['results']
except KeyError:
raise ValueError("Problem has not been solved. "
"You cannot take derivatives. "
"Please call the solve function.")
if results.info.status != "solved":
raise ValueError("Problem has not been solved to optimality. "
"You cannot take derivatives")
m, n = A.shape
x = results.x
y = results.y
y_u = np.maximum(y, 0)
y_l = -np.minimum(y, 0)
if A_idx is None:
A_idx = A.nonzero()
if P_idx is None:
P_idx = P.nonzero()
if dy_u is None:
dy_u = np.zeros(m)
if dy_l is None:
dy_l = np.zeros(m)
# Make sure M matrix exists
if 'M' not in self._derivative_cache:
# Multiply second-third row by diag(y_u)^-1 and diag(y_l)^-1
# to make the matrix symmetric
inv_dia_y_u = spa.diags(np.reciprocal(y_u + 1e-20))
inv_dia_y_l = spa.diags(np.reciprocal(y_l + 1e-20))
M = spa.bmat([
[P, A.T, -A.T],
[A, spa.diags(A @ x - u) @ inv_dia_y_u, None],
[-A, None, spa.diags(l - A @ x) @ inv_dia_y_l]
], format='csc')
delta = spa.bmat([[eps_iter_ref * spa.eye(n), None],
[None, -eps_iter_ref * spa.eye(2 * m)]],
format='csc')
self._derivative_cache['M'] = M
self._derivative_cache['solver'] = qdldl.Solver(M + delta)
rhs = - np.concatenate([dx, dy_u, dy_l])
r_sol = self.derivative_iterative_refinement(rhs)
r_x, r_yu, r_yl = np.split(r_sol, [n, n+m])
# Extract derivatives for the constraints
rows, cols = A_idx
dA_vals = (y_u[rows] - y_l[rows]) * r_x[cols] + \
(r_yu[rows] - r_yl[rows]) * x[cols]
dA = spa.csc_matrix((dA_vals, (rows, cols)), shape=A.shape)
du = - r_yu
dl = r_yl
# Extract derivatives for the cost (P, q)
rows, cols = P_idx
dP_vals = .5 * (r_x[rows] * x[cols] + r_x[cols] * x[rows])
dP = spa.csc_matrix((dP_vals, P_idx), shape=P.shape)
dq = r_x
return (dP, dq, dA, dl, du)
| (self, dx=None, dy_u=None, dy_l=None, P_idx=None, A_idx=None, eps_iter_ref=0.0001) |
725,858 | osqp.interface | codegen |
Generate embeddable C code for the problem
| def codegen(self, folder, project_type='', parameters='vectors',
python_ext_name='emosqp', force_rewrite=False, compile_python_ext=True,
FLOAT=False, LONG=True):
"""
Generate embeddable C code for the problem
"""
# Check parameters arguments
if parameters == 'vectors':
embedded = 1
elif parameters == 'matrices':
embedded = 2
else:
raise ValueError("Unknown value of 'parameters' argument.")
# Set float and long flags
if FLOAT:
float_flag = 'ON'
else:
float_flag = 'OFF'
if LONG:
long_flag = 'ON'
else:
long_flag = 'OFF'
# Check project_type argument
expectedProject = ('', 'Makefile', 'MinGW Makefiles',
'Unix Makefiles', 'CodeBlocks', 'Xcode')
if project_type not in expectedProject:
raise ValueError("Unknown value of 'project_type' argument.")
if project_type == 'Makefile':
if system() == 'Windows':
project_type = 'MinGW Makefiles'
elif system() == 'Linux' or system() == 'Darwin':
project_type = 'Unix Makefiles'
# Convert workspace to Python
sys.stdout.write("Getting workspace from OSQP object... \t\t\t\t")
sys.stdout.flush()
work = self._model._get_workspace()
print("[done]")
# Generate code with codegen module
cg.codegen(work, folder, python_ext_name, project_type, compile_python_ext,
embedded, force_rewrite, float_flag, long_flag)
| (self, folder, project_type='', parameters='vectors', python_ext_name='emosqp', force_rewrite=False, compile_python_ext=True, FLOAT=False, LONG=True) |
725,859 | osqp.interface | derivative_iterative_refinement | null | def derivative_iterative_refinement(self, rhs, max_iter=20, tol=1e-12):
M = self._derivative_cache['M']
# Prefactor
solver = self._derivative_cache['solver']
sol = solver.solve(rhs)
for k in range(max_iter):
delta_sol = solver.solve(rhs - M @ sol)
sol = sol + delta_sol
if np.linalg.norm(M @ sol - rhs) < tol:
break
if k == max_iter - 1:
warn("max_iter iterative refinement reached.")
return sol
| (self, rhs, max_iter=20, tol=1e-12) |
725,860 | osqp.interface | setup |
Setup OSQP solver problem of the form
minimize 1/2 x' * P * x + q' * x
subject to l <= A * x <= u
solver settings can be specified as additional keyword arguments
| def setup(self, P=None, q=None, A=None, l=None, u=None, **settings):
"""
Setup OSQP solver problem of the form
minimize 1/2 x' * P * x + q' * x
subject to l <= A * x <= u
solver settings can be specified as additional keyword arguments
"""
# TODO(bart): this will be unnecessary when the derivative will be in C
self._derivative_cache = {'P': P, 'q': q, 'A': A, 'l': l, 'u': u}
unpacked_data, settings = utils.prepare_data(P, q, A, l, u, **settings)
self._model.setup(*unpacked_data, **settings)
| (self, P=None, q=None, A=None, l=None, u=None, **settings) |
725,861 | osqp.interface | solve |
Solve QP Problem
| def solve(self):
"""
Solve QP Problem
"""
# Solve QP
results = self._model.solve()
# TODO(bart): this will be unnecessary when the derivative will be in C
self._derivative_cache['results'] = results
return results
| (self) |
725,862 | osqp.interface | update |
Update OSQP problem arguments
| def update(self, q=None, l=None, u=None,
Px=None, Px_idx=np.array([]), Ax=None, Ax_idx=np.array([])):
"""
Update OSQP problem arguments
"""
# get problem dimensions
(n, m) = self._model.dimensions()
# check consistency of the input arguments
if q is not None and len(q) != n:
raise ValueError("q must have length n")
if l is not None:
if not isinstance(l, np.ndarray):
raise TypeError("l must be numpy.ndarray, not %s" %
type(l).__name__)
elif len(l) != m:
raise ValueError("l must have length m")
# Convert values to -OSQP_INFTY
l = np.maximum(l, -_osqp.constant('OSQP_INFTY'))
if u is not None:
if not isinstance(u, np.ndarray):
raise TypeError("u must be numpy.ndarray, not %s" %
type(u).__name__)
elif len(u) != m:
raise ValueError("u must have length m")
# Convert values to OSQP_INFTY
u = np.minimum(u, _osqp.constant('OSQP_INFTY'))
if Ax is None:
if len(Ax_idx) > 0:
raise ValueError("Vector Ax has not been specified")
else:
if len(Ax_idx) > 0 and len(Ax) != len(Ax_idx):
raise ValueError("Ax and Ax_idx must have the same lengths")
if Px is None:
if len(Px_idx) > 0:
raise ValueError("Vector Px has not been specified")
else:
if len(Px_idx) > 0 and len(Px) != len(Px_idx):
raise ValueError("Px and Px_idx must have the same lengths")
if q is None and l is None and u is None and Px is None and Ax is None:
raise ValueError("No updatable data has been specified")
# update linear cost
if q is not None:
self._model.update_lin_cost(q)
# update lower bound
if l is not None and u is None:
self._model.update_lower_bound(l)
# update upper bound
if u is not None and l is None:
self._model.update_upper_bound(u)
# update bounds
if l is not None and u is not None:
self._model.update_bounds(l, u)
# update matrix P
if Px is not None and Ax is None:
self._model.update_P(Px, Px_idx, len(Px))
# update matrix A
if Ax is not None and Px is None:
self._model.update_A(Ax, Ax_idx, len(Ax))
# update matrices P and A
if Px is not None and Ax is not None:
self._model.update_P_A(Px, Px_idx, len(Px), Ax, Ax_idx, len(Ax))
# TODO(bart): this will be unnecessary when the derivative will be in C
# update problem data in self._derivative_cache
if q is not None:
self._derivative_cache["q"] = q
if l is not None:
self._derivative_cache["l"] = l
if u is not None:
self._derivative_cache["u"] = u
if Px is not None:
if Px_idx.size == 0:
self._derivative_cache["P"].data = Px
else:
self._derivative_cache["P"].data[Px_idx] = Px
if Ax is not None:
if Ax_idx.size == 0:
self._derivative_cache["A"].data = Ax
else:
self._derivative_cache["A"].data[Ax_idx] = Ax
# delete results from self._derivative_cache to prohibit
# taking the derivative of unsolved problems
if "results" in self._derivative_cache.keys():
del self._derivative_cache["results"]
| (self, q=None, l=None, u=None, Px=None, Px_idx=array([], dtype=float64), Ax=None, Ax_idx=array([], dtype=float64)) |
725,863 | osqp.interface | update_settings |
Update OSQP solver settings
It is possible to change: 'max_iter', 'eps_abs', 'eps_rel',
'eps_prim_inf', 'eps_dual_inf', 'rho'
'alpha', 'delta', 'polish',
'polish_refine_iter',
'verbose', 'scaled_termination',
'check_termination', 'time_limit',
| def update_settings(self, **kwargs):
"""
Update OSQP solver settings
It is possible to change: 'max_iter', 'eps_abs', 'eps_rel',
'eps_prim_inf', 'eps_dual_inf', 'rho'
'alpha', 'delta', 'polish',
'polish_refine_iter',
'verbose', 'scaled_termination',
'check_termination', 'time_limit',
"""
# get arguments
max_iter = kwargs.pop('max_iter', None)
eps_abs = kwargs.pop('eps_abs', None)
eps_rel = kwargs.pop('eps_rel', None)
eps_prim_inf = kwargs.pop('eps_prim_inf', None)
eps_dual_inf = kwargs.pop('eps_dual_inf', None)
rho = kwargs.pop('rho', None)
alpha = kwargs.pop('alpha', None)
delta = kwargs.pop('delta', None)
polish = kwargs.pop('polish', None)
polish_refine_iter = kwargs.pop('polish_refine_iter', None)
verbose = kwargs.pop('verbose', None)
scaled_termination = kwargs.pop('scaled_termination', None)
check_termination = kwargs.pop('check_termination', None)
warm_start = kwargs.pop('warm_start', None)
time_limit = kwargs.pop('time_limit', None)
# update them
if max_iter is not None:
self._model.update_max_iter(max_iter)
if eps_abs is not None:
self._model.update_eps_abs(eps_abs)
if eps_rel is not None:
self._model.update_eps_rel(eps_rel)
if eps_prim_inf is not None:
self._model.update_eps_prim_inf(eps_prim_inf)
if eps_dual_inf is not None:
self._model.update_eps_dual_inf(eps_dual_inf)
if rho is not None:
self._model.update_rho(rho)
if alpha is not None:
self._model.update_alpha(alpha)
if delta is not None:
self._model.update_delta(delta)
if polish is not None:
self._model.update_polish(polish)
if polish_refine_iter is not None:
self._model.update_polish_refine_iter(polish_refine_iter)
if verbose is not None:
self._model.update_verbose(verbose)
if scaled_termination is not None:
self._model.update_scaled_termination(scaled_termination)
if check_termination is not None:
self._model.update_check_termination(check_termination)
if warm_start is not None:
self._model.update_warm_start(warm_start)
if time_limit is not None:
self._model.update_time_limit(time_limit)
if max_iter is None and \
eps_abs is None and \
eps_rel is None and \
eps_prim_inf is None and \
eps_dual_inf is None and \
rho is None and \
alpha is None and \
delta is None and \
polish is None and \
polish_refine_iter is None and \
verbose is None and \
scaled_termination is None and \
check_termination is None and \
warm_start is None:
raise ValueError("No updatable settings has been specified!")
| (self, **kwargs) |
725,864 | osqp.interface | version | null | def version(self):
return self._model.version()
| (self) |
725,865 | osqp.interface | warm_start |
Warm start primal or dual variables
| def warm_start(self, x=None, y=None):
"""
Warm start primal or dual variables
"""
# get problem dimensions
(n, m) = self._model.dimensions()
if x is not None:
if len(x) != n:
raise ValueError("Wrong dimension for variable x")
if y is None:
self._model.warm_start_x(x)
if y is not None:
if len(y) != m:
raise ValueError("Wrong dimension for variable y")
if x is None:
self._model.warm_start_y(y)
if x is not None and y is not None:
self._model.warm_start(x, y)
if x is None and y is None:
raise ValueError("Unrecognized fields")
| (self, x=None, y=None) |
725,871 | qsm_forward.qsm_forward | ReconParams |
A class used to represent reconstruction parameters.
Attributes
----------
subject : str
The ID of the subject.
session : str
The ID of the session.
acq : str
The acquisition name.
run : int
The run number.
TR : float
Repetition time (in seconds).
TEs : np.array
Echo times (in seconds).
flip_angle : int
Flip angle (in degrees).
B0 : int
Magnetic field strength (in Tesla).
B0_dir : np.array
B0 field direction.
phase_offset : int
Phase offset (in radians).
generate_phase_offset : bool
Boolean to control phase offset generation.
generate_shim_field : bool
Boolean to control shim field generation.
voxel_size : np.array
Voxel size (in mm).
peak_snr : float
Peak signal-to-noise ratio.
random_seed : int
Random seed to use for noise.
suffix : string
The BIDS-compliant suffix that defines the weighting of the images (e.g. T1w, T2starw, PD).
save_phase : bool
Boolean to control whether phase images are saved.
| class ReconParams:
"""
A class used to represent reconstruction parameters.
Attributes
----------
subject : str
The ID of the subject.
session : str
The ID of the session.
acq : str
The acquisition name.
run : int
The run number.
TR : float
Repetition time (in seconds).
TEs : np.array
Echo times (in seconds).
flip_angle : int
Flip angle (in degrees).
B0 : int
Magnetic field strength (in Tesla).
B0_dir : np.array
B0 field direction.
phase_offset : int
Phase offset (in radians).
generate_phase_offset : bool
Boolean to control phase offset generation.
generate_shim_field : bool
Boolean to control shim field generation.
voxel_size : np.array
Voxel size (in mm).
peak_snr : float
Peak signal-to-noise ratio.
random_seed : int
Random seed to use for noise.
suffix : string
The BIDS-compliant suffix that defines the weighting of the images (e.g. T1w, T2starw, PD).
save_phase : bool
Boolean to control whether phase images are saved.
"""
def __init__(
self,
subject="1",
session=None,
acq=None,
run=None,
TR=50e-3,
TEs=np.array([ 4e-3, 12e-3, 20e-3, 28e-3 ]),
flip_angle=15,
B0=7,
B0_dir=np.array([0, 0, 1]),
phase_offset=0,
generate_phase_offset=True,
generate_shim_field=True,
voxel_size=np.array([1.0, 1.0, 1.0]),
peak_snr=np.inf,
random_seed=None,
suffix="T2starw",
save_phase=True
):
self.subject = subject
self.session = session
self.acq = acq
self.run = run
self.TR = TR
self.TEs = TEs
self.flip_angle = flip_angle
self.B0 = B0
self.B0_dir = B0_dir
self.phase_offset = phase_offset
self.generate_phase_offset = generate_phase_offset
self.generate_shim_field = generate_shim_field
self.voxel_size = voxel_size
self.peak_snr = peak_snr
self.random_seed = random_seed
self.suffix = suffix
self.save_phase = save_phase
| (subject='1', session=None, acq=None, run=None, TR=0.05, TEs=array([0.004, 0.012, 0.02 , 0.028]), flip_angle=15, B0=7, B0_dir=array([0, 0, 1]), phase_offset=0, generate_phase_offset=True, generate_shim_field=True, voxel_size=array([1., 1., 1.]), peak_snr=inf, random_seed=None, suffix='T2starw', save_phase=True) |
725,872 | qsm_forward.qsm_forward | __init__ | null | def __init__(
self,
subject="1",
session=None,
acq=None,
run=None,
TR=50e-3,
TEs=np.array([ 4e-3, 12e-3, 20e-3, 28e-3 ]),
flip_angle=15,
B0=7,
B0_dir=np.array([0, 0, 1]),
phase_offset=0,
generate_phase_offset=True,
generate_shim_field=True,
voxel_size=np.array([1.0, 1.0, 1.0]),
peak_snr=np.inf,
random_seed=None,
suffix="T2starw",
save_phase=True
):
self.subject = subject
self.session = session
self.acq = acq
self.run = run
self.TR = TR
self.TEs = TEs
self.flip_angle = flip_angle
self.B0 = B0
self.B0_dir = B0_dir
self.phase_offset = phase_offset
self.generate_phase_offset = generate_phase_offset
self.generate_shim_field = generate_shim_field
self.voxel_size = voxel_size
self.peak_snr = peak_snr
self.random_seed = random_seed
self.suffix = suffix
self.save_phase = save_phase
| (self, subject='1', session=None, acq=None, run=None, TR=0.05, TEs=array([0.004, 0.012, 0.02 , 0.028]), flip_angle=15, B0=7, B0_dir=array([0, 0, 1]), phase_offset=0, generate_phase_offset=True, generate_shim_field=True, voxel_size=array([1., 1., 1.]), peak_snr=inf, random_seed=None, suffix='T2starw', save_phase=True) |
725,873 | qsm_forward.qsm_forward | TissueParams |
A class used to represent tissue parameters.
Attributes
----------
root_dir : str or None
The path to the root directory containing the tissue parameter files.
chi_path : str or ndarray
The path to the Chi file or a 3D numpy array containing Chi values.
M0_path : str or ndarray
The path to the M0 file or a 3D numpy array containing M0 values.
R1_path : str or ndarray
The path to the R1 file or a 3D numpy array containing R1 values.
R2star_path : str or ndarray
The path to the R2* file or a 3D numpy array containing R2* values.
mask_path : str or ndarray
The path to the brain mask file or a 3D numpy array containing brain mask values.
seg_path : str or ndarray
The path to the segmentation file or a 3D numpy array containing segmentation values.
| class TissueParams:
"""
A class used to represent tissue parameters.
Attributes
----------
root_dir : str or None
The path to the root directory containing the tissue parameter files.
chi_path : str or ndarray
The path to the Chi file or a 3D numpy array containing Chi values.
M0_path : str or ndarray
The path to the M0 file or a 3D numpy array containing M0 values.
R1_path : str or ndarray
The path to the R1 file or a 3D numpy array containing R1 values.
R2star_path : str or ndarray
The path to the R2* file or a 3D numpy array containing R2* values.
mask_path : str or ndarray
The path to the brain mask file or a 3D numpy array containing brain mask values.
seg_path : str or ndarray
The path to the segmentation file or a 3D numpy array containing segmentation values.
"""
def __init__(
self,
root_dir = "",
chi = "chimodel/ChiModelMIX.nii",
M0 = "maps/M0.nii.gz",
R1 = "maps/R1.nii.gz",
R2star = "maps/R2star.nii.gz",
mask = "masks/BrainMask.nii.gz",
seg = "masks/SegmentedModel.nii.gz",
apply_mask = False
):
if isinstance(chi, str) and not os.path.exists(os.path.join(root_dir, chi)):
raise ValueError(f"Path to chi is invalid! ({os.path.join(root_dir, chi)})")
self._chi = os.path.join(root_dir, chi) if isinstance(chi, str) and os.path.exists(os.path.join(root_dir, chi)) else chi if not isinstance(chi, str) else None
self._M0 = os.path.join(root_dir, M0) if isinstance(M0, str) and os.path.exists(os.path.join(root_dir, M0)) else M0 if not isinstance(M0, str) else None
self._R1 = os.path.join(root_dir, R1) if isinstance(R1, str) and os.path.exists(os.path.join(root_dir, R1)) else R1 if not isinstance(R1, str) else None
self._R2star = os.path.join(root_dir, R2star) if isinstance(R2star, str) and os.path.exists(os.path.join(root_dir, R2star)) else R2star if not isinstance(R2star, str) else None
self._mask = os.path.join(root_dir, mask) if isinstance(mask, str) and os.path.exists(os.path.join(root_dir, mask)) else mask if not isinstance(mask, str) else None
self._seg = os.path.join(root_dir, seg) if isinstance(seg, str) and os.path.exists(os.path.join(root_dir, seg)) else seg if not isinstance(seg, str) else None
self._apply_mask = apply_mask
self._affine = None
def set_affine(self, affine):
self._affine = affine
def _load(self, nii_path):
nii = nib.load(nii_path)
if self._affine is not None:
nii = nib.Nifti1Image(dataobj=nii.get_fdata(), affine=self._affine, header=nii.header)
return nii
@property
def voxel_size(self):
zooms = self.nii_header.get_zooms()
return zooms if len(zooms) == 3 else np.array([zooms[0] for i in range(3)])
@property
def nii_header(self):
if isinstance(self._chi, str):
return self._load(self._chi).header
header = nib.Nifti1Header()
header.set_data_shape(self._chi.shape)
return header
@property
def nii_affine(self):
if self._affine is not None:
return self._affine
if isinstance(self._chi, str):
return self._load(self._chi).affine
return np.eye(4)
def _do_apply_mask(self, nii): return nib.Nifti1Image(dataobj=nii.get_fdata() * self.mask.get_fdata(), affine=self.nii_affine, header=nii.header) if self._apply_mask else nii
@property
def chi(self): return self._do_apply_mask(self._load(self._chi) if isinstance(self._chi, str) else nib.Nifti1Image(self._chi, affine=self.nii_affine, header=self.nii_header))
@property
def mask(self): return self._load(self._mask) if isinstance(self._mask, str) else nib.Nifti1Image(self._mask or np.array(self._chi != 0), affine=self.nii_affine, header=self.nii_header)
@property
def M0(self): return self._do_apply_mask(self._load(self._M0) if isinstance(self._M0, str) else nib.Nifti1Image(self._M0 or np.array(self.mask.get_fdata() * 1), affine=self.nii_affine, header=self.nii_header))
@property
def R1(self): return self._do_apply_mask(self._load(self._R1) if isinstance(self._R1, str) else nib.Nifti1Image(self._R1 or np.array(self.mask.get_fdata() * 1), affine=self.nii_affine, header=self.nii_header))
@property
def R2star(self): return self._do_apply_mask(self._load(self._R2star) if isinstance(self._R2star, str) else nib.Nifti1Image(self._R2star or np.array(self.mask.get_fdata() * 50), affine=self.nii_affine, header=self.nii_header))
@property
def seg(self): return self._load(self._seg) if isinstance(self._seg, str) else nib.Nifti1Image(self._seg or self.mask.get_fdata(), affine=self.nii_affine, header=self.nii_header)
| (root_dir='', chi='chimodel/ChiModelMIX.nii', M0='maps/M0.nii.gz', R1='maps/R1.nii.gz', R2star='maps/R2star.nii.gz', mask='masks/BrainMask.nii.gz', seg='masks/SegmentedModel.nii.gz', apply_mask=False) |
725,874 | qsm_forward.qsm_forward | __init__ | null | def __init__(
self,
root_dir = "",
chi = "chimodel/ChiModelMIX.nii",
M0 = "maps/M0.nii.gz",
R1 = "maps/R1.nii.gz",
R2star = "maps/R2star.nii.gz",
mask = "masks/BrainMask.nii.gz",
seg = "masks/SegmentedModel.nii.gz",
apply_mask = False
):
if isinstance(chi, str) and not os.path.exists(os.path.join(root_dir, chi)):
raise ValueError(f"Path to chi is invalid! ({os.path.join(root_dir, chi)})")
self._chi = os.path.join(root_dir, chi) if isinstance(chi, str) and os.path.exists(os.path.join(root_dir, chi)) else chi if not isinstance(chi, str) else None
self._M0 = os.path.join(root_dir, M0) if isinstance(M0, str) and os.path.exists(os.path.join(root_dir, M0)) else M0 if not isinstance(M0, str) else None
self._R1 = os.path.join(root_dir, R1) if isinstance(R1, str) and os.path.exists(os.path.join(root_dir, R1)) else R1 if not isinstance(R1, str) else None
self._R2star = os.path.join(root_dir, R2star) if isinstance(R2star, str) and os.path.exists(os.path.join(root_dir, R2star)) else R2star if not isinstance(R2star, str) else None
self._mask = os.path.join(root_dir, mask) if isinstance(mask, str) and os.path.exists(os.path.join(root_dir, mask)) else mask if not isinstance(mask, str) else None
self._seg = os.path.join(root_dir, seg) if isinstance(seg, str) and os.path.exists(os.path.join(root_dir, seg)) else seg if not isinstance(seg, str) else None
self._apply_mask = apply_mask
self._affine = None
| (self, root_dir='', chi='chimodel/ChiModelMIX.nii', M0='maps/M0.nii.gz', R1='maps/R1.nii.gz', R2star='maps/R2star.nii.gz', mask='masks/BrainMask.nii.gz', seg='masks/SegmentedModel.nii.gz', apply_mask=False) |
725,875 | qsm_forward.qsm_forward | _do_apply_mask | null | def _do_apply_mask(self, nii): return nib.Nifti1Image(dataobj=nii.get_fdata() * self.mask.get_fdata(), affine=self.nii_affine, header=nii.header) if self._apply_mask else nii
| (self, nii) |
725,876 | qsm_forward.qsm_forward | _load | null | def _load(self, nii_path):
nii = nib.load(nii_path)
if self._affine is not None:
nii = nib.Nifti1Image(dataobj=nii.get_fdata(), affine=self._affine, header=nii.header)
return nii
| (self, nii_path) |
725,877 | qsm_forward.qsm_forward | set_affine | null | def set_affine(self, affine):
self._affine = affine
| (self, affine) |
725,878 | qsm_forward.qsm_forward | add_noise |
Add complex Gaussian noise to a signal.
Parameters
----------
sig : numpy.ndarray
The input signal to which noise will be added.
peak_snr : float, optional
The peak signal-to-noise ratio, by default np.inf
rng : numpy.random.Generator, optional
A random number Generator. If None, a new Generator will be created.
Returns
-------
numpy.ndarray
The input signal with added noise.
| def add_noise(sig, peak_snr=np.inf, rng=None):
"""
Add complex Gaussian noise to a signal.
Parameters
----------
sig : numpy.ndarray
The input signal to which noise will be added.
peak_snr : float, optional
The peak signal-to-noise ratio, by default np.inf
rng : numpy.random.Generator, optional
A random number Generator. If None, a new Generator will be created.
Returns
-------
numpy.ndarray
The input signal with added noise.
"""
# Create a new RNG if one was not provided
if rng is None:
rng = np.random.default_rng()
noise = rng.standard_normal(sig.shape) + 1j * rng.standard_normal(sig.shape)
sig_noisy = sig + (noise * np.max(np.abs(sig))) / peak_snr
return sig_noisy
| (sig, peak_snr=inf, rng=None) |
725,879 | qsm_forward.qsm_forward | crop_imagespace |
Crop a nD matrix around its center.
Parameters
----------
x : numpy.ndarray
The input n-dimensional matrix.
shape : tuple of int
The desired shape after cropping.
Returns
-------
numpy.ndarray
The cropped matrix.
| def crop_imagespace(x, shape):
"""
Crop a nD matrix around its center.
Parameters
----------
x : numpy.ndarray
The input n-dimensional matrix.
shape : tuple of int
The desired shape after cropping.
Returns
-------
numpy.ndarray
The cropped matrix.
"""
if np.array_equal(x.shape, np.array(shape)):
return x
m = np.array(x.shape)
s = np.array(shape)
if s.size < m.size:
s = np.concatenate((s, np.ones(m.size - s.size, dtype=int)))
if np.array_equal(m, s):
res = x
return res
idx = []
for n in range(s.size):
start = np.floor_divide(m[n], 2) + np.ceil(-s[n] / 2)
end = np.floor_divide(m[n], 2) + np.ceil(s[n] / 2)
idx.append(slice(int(start), int(end)))
res = x[tuple(idx)]
return res
| (x, shape) |
725,880 | qsm_forward.qsm_forward | crop_kspace |
Crop a 3D volume in k-space and apply optional scaling and Gibbs ringing correction.
Parameters
----------
volume : numpy.ndarray
The input 3D volume.
dims : tuple of int
The desired dimensions after cropping.
scaling : bool, optional
Whether to scale the cropped volume to maintain the total energy. Default is True.
gibbs_correction : bool, optional
Whether to apply Gibbs ringing correction. Default is True.
Returns
-------
numpy.ndarray
The cropped volume.
| def crop_kspace(volume, dims, scaling=True, gibbs_correction=True):
"""
Crop a 3D volume in k-space and apply optional scaling and Gibbs ringing correction.
Parameters
----------
volume : numpy.ndarray
The input 3D volume.
dims : tuple of int
The desired dimensions after cropping.
scaling : bool, optional
Whether to scale the cropped volume to maintain the total energy. Default is True.
gibbs_correction : bool, optional
Whether to apply Gibbs ringing correction. Default is True.
Returns
-------
numpy.ndarray
The cropped volume.
"""
if np.array_equal(volume.shape, dims):
return volume
working_volume = np.fft.ifftn(np.fft.ifftshift(crop_imagespace(np.fft.fftshift(np.fft.fftn(volume)), dims)))
# gibbs correction is only needed for non-complex volumes
if not np.iscomplexobj(volume):
working_volume = np.real(working_volume)
if gibbs_correction:
working_volume = gibbs_removal(gibbs_removal(working_volume, slice_axis=2), slice_axis=1)
if scaling:
working_volume *= np.prod(dims) / np.prod(volume.shape)
return working_volume
| (volume, dims, scaling=True, gibbs_correction=True) |
725,881 | qsm_forward.qsm_forward | generate_bids |
Simulate T2*-weighted magnitude and phase images and save the outputs in the BIDS-compliant format.
This function simulates a T2*-weighted MRI signal based on a ground truth susceptibility map,
and saves the outputs (images, JSON headers) in the BIDS-compliant format in the specified
directory.
Parameters
----------
tissue_params : TissueParams
Provides paths to different tissue parameter files or the 3D numpy arrays themselves.
recon_params : ReconParams
Provides parameters for the simulated reconstruction.
bids_dir : str
The directory where the BIDS-formatted outputs will be saved.
save_chi : bool
Whether to save the cropped chi map to the BIDS directory. Default is True.
save_mask : bool
Whether to save the cropped mask to the BIDS directory. Default is True.
save_segmentation : bool
Whether to save the cropped segmentation to the BIDS directory. Default is True.
save_field : bool
Whether to save the cropped field map to the BIDS directory. Default is False.
save_shimmed_field : bool
Whether to save the cropped and shimmed field map to the BIDS directory. Default is False.
save_shimmed_offset_field : bool
Whether to save the cropped, shimmed and offset field map to the BIDS directory. Default is False.
Returns
-------
None
Outputs are saved as files in the bids_dir directory.
| def generate_bids(tissue_params: TissueParams, recon_params: ReconParams, bids_dir, save_chi=True, save_mask=True, save_segmentation=True, save_field=False, save_shimmed_field=False, save_shimmed_offset_field=False):
"""
Simulate T2*-weighted magnitude and phase images and save the outputs in the BIDS-compliant format.
This function simulates a T2*-weighted MRI signal based on a ground truth susceptibility map,
and saves the outputs (images, JSON headers) in the BIDS-compliant format in the specified
directory.
Parameters
----------
tissue_params : TissueParams
Provides paths to different tissue parameter files or the 3D numpy arrays themselves.
recon_params : ReconParams
Provides parameters for the simulated reconstruction.
bids_dir : str
The directory where the BIDS-formatted outputs will be saved.
save_chi : bool
Whether to save the cropped chi map to the BIDS directory. Default is True.
save_mask : bool
Whether to save the cropped mask to the BIDS directory. Default is True.
save_segmentation : bool
Whether to save the cropped segmentation to the BIDS directory. Default is True.
save_field : bool
Whether to save the cropped field map to the BIDS directory. Default is False.
save_shimmed_field : bool
Whether to save the cropped and shimmed field map to the BIDS directory. Default is False.
save_shimmed_offset_field : bool
Whether to save the cropped, shimmed and offset field map to the BIDS directory. Default is False.
Returns
-------
None
Outputs are saved as files in the bids_dir directory.
"""
# create output directories
print("Creating output directory...")
os.makedirs(bids_dir, exist_ok=True)
# recon name
recon_name = f"sub-{recon_params.subject}"
if recon_params.session: recon_name += f"_ses-{recon_params.session}"
if recon_params.acq: recon_name += f"_acq-{recon_params.acq}"
if recon_params.run: recon_name += f"_run-{recon_params.run}"
# subject directory
subject_dir = os.path.join(bids_dir, f"sub-{recon_params.subject}")
if recon_params.session: subject_dir = os.path.join(subject_dir, f"ses-{recon_params.session}")
# derivatives directory
subject_dir_deriv = os.path.join(bids_dir, "derivatives", "qsm-forward", f"sub-{recon_params.subject}")
if recon_params.session: subject_dir_deriv = os.path.join(subject_dir_deriv, f"ses-{recon_params.session}")
os.makedirs(os.path.join(subject_dir, 'anat'), exist_ok=True)
os.makedirs(os.path.join(subject_dir_deriv, 'anat'), exist_ok=True)
# random number generator for noise etc.
rng = np.random.default_rng(recon_params.random_seed)
# adjust affine for B0 direction
affine = adjust_affine_for_B0_direction(tissue_params.nii_affine.copy(), recon_params.B0_dir)
tissue_params.set_affine(affine)
# image-space resizing
print("Image-space resizing of chi...")
chi_downsampled_nii = resize(tissue_params.chi, recon_params.voxel_size)
if save_chi: nib.save(chi_downsampled_nii, filename=os.path.join(subject_dir_deriv, "anat", f"{recon_name}_Chimap.nii"))
print("Image-space cropping of mask...")
if save_mask: nib.save(resize(tissue_params.mask, recon_params.voxel_size, 'nearest'), filename=os.path.join(subject_dir_deriv, "anat", f"{recon_name}_mask.nii"))
print("Image-space cropping of segmentation...")
if save_segmentation: nib.save(resize(tissue_params.seg, recon_params.voxel_size, 'nearest'), filename=os.path.join(subject_dir_deriv, "anat", f"{recon_name}_dseg.nii"))
# calculate field
print("Computing field model...")
field = generate_field(tissue_params.chi.get_fdata(), voxel_size=tissue_params.voxel_size, B0_dir=recon_params.B0_dir)
if save_field: nib.save(resize(nib.Nifti1Image(dataobj=np.array(field, dtype=np.float32), affine=tissue_params.nii_affine, header=tissue_params.nii_header), recon_params.voxel_size), filename=os.path.join(subject_dir_deriv, "anat", f"{recon_name}_fieldmap.nii"))
# simulate shim field
if recon_params.generate_shim_field:
print("Computing shim fields...")
_, field, _ = generate_shimmed_field(field, tissue_params.mask.get_fdata(), order=2)
if save_shimmed_field: nib.save(resize(nib.Nifti1Image(dataobj=np.array(field, dtype=np.float32), affine=tissue_params.nii_affine, header=tissue_params.nii_header), recon_params.voxel_size), filename=os.path.join(subject_dir_deriv, "anat", f"{recon_name}_desc-shimmed_fieldmap.nii"))
# phase offset
phase_offset = recon_params.phase_offset
if recon_params.generate_phase_offset:
print("Computing phase offset...")
phase_offset = recon_params.phase_offset + generate_phase_offset(tissue_params.M0.get_fdata(), tissue_params.mask.get_fdata(), tissue_params.M0.get_fdata().shape)
if save_shimmed_offset_field: nib.save(resize(nib.Nifti1Image(dataobj=np.array(field, dtype=np.float32), affine=tissue_params.nii_affine, header=tissue_params.nii_header), recon_params.voxel_size), filename=os.path.join(subject_dir_deriv, "anat", f"{recon_name}_desc-shimmed-offset_fieldmap.nii"))
# signal model
multiecho = len(recon_params.TEs) > 1
for i in range(len(recon_params.TEs)):
print(f"Computing MR signal for echo {i+1}...")
recon_name_i = f"{recon_name}_echo-{i+1}" if multiecho else recon_name
sigHR = generate_signal(
field=field,
B0=recon_params.B0,
TR=recon_params.TR,
TE=recon_params.TEs[i],
flip_angle=recon_params.flip_angle,
phase_offset=phase_offset,
R1=tissue_params.R1.get_fdata(),
R2star=tissue_params.R2star.get_fdata(),
M0=tissue_params.M0.get_fdata()
)
# k-space cropping of sigHR
print(f"k-space cropping of MR signal for echo {i+1}...")
resolution = np.array(np.round((np.array(tissue_params.voxel_size) / recon_params.voxel_size) * np.array(tissue_params.nii_header.get_data_shape())), dtype=int)
sigHR_cropped = crop_kspace(sigHR, resolution)
del sigHR
# noise
print(f"Simulating noise for echo {i+1}...")
sigHR_cropped_noisy = add_noise(sigHR_cropped, peak_snr=recon_params.peak_snr, rng=rng)
del sigHR_cropped
# save nifti images
mag_filename = f"{recon_name_i}" + ("_part-mag" if recon_params.save_phase else "") + f"_{recon_params.suffix}"
phs_filename = f"{recon_name_i}" + ("_part-phase" if recon_params.save_phase else "") + f"_{recon_params.suffix}"
nib.save(nib.Nifti1Image(dataobj=np.abs(sigHR_cropped_noisy), affine=chi_downsampled_nii.affine, header=chi_downsampled_nii.header), filename=os.path.join(subject_dir, "anat", f"{mag_filename}.nii"))
if recon_params.save_phase: nib.save(nib.Nifti1Image(dataobj=np.angle(sigHR_cropped_noisy), affine=chi_downsampled_nii.affine, header=chi_downsampled_nii.header), filename=os.path.join(subject_dir, "anat", f"{phs_filename}.nii"))
# json header
print(f"Creating JSON headers...")
json_dict = {
'EchoTime': recon_params.TEs[i],
'MagneticFieldStrength': recon_params.B0,
'EchoNumber': i+1,
'ProtocolName': recon_params.suffix,
'ConversionSoftware': 'qsm-forward',
'RepetitionTime': recon_params.TR,
'FlipAngle': recon_params.flip_angle,
'B0_dir': recon_params.B0_dir.tolist(),
'PhaseOffset': recon_params.generate_phase_offset or phase_offset != 0,
'ShimmField': recon_params.generate_shim_field,
'VoxelSize': recon_params.voxel_size.tolist(),
'PeakSNR': recon_params.peak_snr if recon_params.peak_snr != np.inf else "inf"
}
json_dict_phs = json_dict.copy()
json_dict_phs['ImageType'] = ['P', 'PHASE']
json_dict_mag = json_dict.copy()
json_dict_mag['ImageType'] = ['M', 'MAGNITUDE']
with open(os.path.join(subject_dir, "anat", f"{mag_filename}.json"), 'w') as mag_json_file:
json.dump(json_dict_mag, mag_json_file)
if recon_params.save_phase:
with open(os.path.join(subject_dir, "anat", f"{phs_filename}.json"), 'w') as phs_json_file:
json.dump(json_dict_phs, phs_json_file)
print("Done!")
| (tissue_params: qsm_forward.qsm_forward.TissueParams, recon_params: qsm_forward.qsm_forward.ReconParams, bids_dir, save_chi=True, save_mask=True, save_segmentation=True, save_field=False, save_shimmed_field=False, save_shimmed_offset_field=False) |
725,882 | qsm_forward.qsm_forward | generate_field |
Perform the forward convolution operation.
This function performs the forward convolution step of the QSM simulation.
Parameters
----------
chi : numpy.ndarray
The susceptibility distribution array.
Returns
-------
numpy.ndarray
The resulting magnetic field array after the forward convolution operation.
| def generate_field(chi, voxel_size=[1, 1, 1], B0_dir=[0, 0, 1]):
"""
Perform the forward convolution operation.
This function performs the forward convolution step of the QSM simulation.
Parameters
----------
chi : numpy.ndarray
The susceptibility distribution array.
Returns
-------
numpy.ndarray
The resulting magnetic field array after the forward convolution operation.
"""
dims = np.array(chi.shape)
D = _generate_3d_dipole_kernel(data_shape=dims, voxel_size=voxel_size, B0_dir=B0_dir)
chitemp = np.ones(2 * dims) * chi[-1, -1, -1]
chitemp[:dims[0], :dims[1], :dims[2]] = chi
field = np.real(np.fft.ifftn(np.fft.fftn(chitemp) * D))
field = field[:dims[0], :dims[1], :dims[2]]
return field
| (chi, voxel_size=[1, 1, 1], B0_dir=[0, 0, 1]) |
725,883 | qsm_forward.qsm_forward | generate_phase_offset |
Generate a suitable phase offset.
Parameters
----------
M0 : numpy.ndarray
The initial magnetization.
mask : numpy.ndarray
A binary mask that indicates the internal region of interest.
dims : tuple of int
The dimensions of the input image.
Returns
-------
numpy.ndarray
The phase offset of the input image.
| def generate_phase_offset(M0, mask, dims):
"""
Generate a suitable phase offset.
Parameters
----------
M0 : numpy.ndarray
The initial magnetization.
mask : numpy.ndarray
A binary mask that indicates the internal region of interest.
dims : tuple of int
The dimensions of the input image.
Returns
-------
numpy.ndarray
The phase offset of the input image.
"""
c, w = _center_of_mass(M0)
x, y, z = np.meshgrid(
np.arange(1, dims[1]+1)-c[1],
np.arange(1, dims[0]+1)-c[0],
np.arange(1, dims[2]+1)-c[2]
)
temp = (x/w[1])**2 + (y/w[0])**2 + (z/w[2])**2
max_temp = np.max(temp[mask != 0])
min_temp = np.min(temp[mask != 0])
phase_offset = -temp / (max_temp - min_temp) * np.pi
return phase_offset
| (M0, mask, dims) |
725,884 | qsm_forward.qsm_forward | generate_shimmed_field |
Simulate field shimming by fitting the field with second- and third-order Legendre polynomials.
Parameters
----------
field : numpy.ndarray
3D array representing the magnetic field to fit.
mask : numpy.ndarray
3D binary array. Must be the same shape as `field`. A True value at a coordinate will
include that point in the fit.
order : int, optional
The order of the polynomial to fit. Must be 0, 1, or 2. Default is 2.
Returns
-------
FIT3D : numpy.ndarray
3D array representing the fitted field.
Residuals : numpy.ndarray
3D array representing the residuals of the fit.
b : numpy.ndarray
1D array representing the coefficients of the fitted polynomial.
Raises
------
ValueError
If `field` and `mask` shapes are not the same.
| def generate_shimmed_field(field, mask, order=2):
"""
Simulate field shimming by fitting the field with second- and third-order Legendre polynomials.
Parameters
----------
field : numpy.ndarray
3D array representing the magnetic field to fit.
mask : numpy.ndarray
3D binary array. Must be the same shape as `field`. A True value at a coordinate will
include that point in the fit.
order : int, optional
The order of the polynomial to fit. Must be 0, 1, or 2. Default is 2.
Returns
-------
FIT3D : numpy.ndarray
3D array representing the fitted field.
Residuals : numpy.ndarray
3D array representing the residuals of the fit.
b : numpy.ndarray
1D array representing the coefficients of the fitted polynomial.
Raises
------
ValueError
If `field` and `mask` shapes are not the same.
"""
dim = field.shape
## for volume fitting
#mask = np.ones(mask.shape)
indices = np.nonzero(mask)
x1, y1, z1 = indices
R = field[indices]
b = None
if len(indices[0]) > (3*order)**2:
model = _create_model(x1, y1, z1, dim, order)
b = np.linalg.pinv(model) @ R
temp = R - model @ b
del model, R
indices = np.meshgrid(*[range(d) for d in dim], indexing='ij')
x1, y1, z1 = [ind.flatten() for ind in indices]
model = _create_model(x1, y1, z1, dim, order)
Fit = model @ b
del model
FIT3D = Fit.reshape(dim)
Residuals = (field-FIT3D)
else:
FIT3D = np.zeros_like(field)
Residuals = (field-FIT3D) * mask
return FIT3D, Residuals, b
| (field, mask, order=2) |
725,885 | qsm_forward.qsm_forward | generate_signal |
Compute the MRI signal based on the given parameters.
Parameters
----------
field : numpy.ndarray
The magnetic field distribution.
B0 : float, optional
The main magnetic field strength. Default is 3.
TR : float, optional
The repetition time. Default is 1.
TE : float, optional
The echo time. Default is 30e-3.
flip_angle : float, optional
The flip angle in degrees. Default is 90.
phase_offset : float, optional
The phase offset. Default is 0.
R1 : float or numpy.ndarray, optional
The longitudinal relaxation rate. Can be a single value or a 3D numpy array. Default is 1.
R2star : float or numpy.ndarray, optional
The effective transverse relaxation rate. Can be a single value or a 3D numpy array. Default is 50.
M0 : float or numpy.ndarray, optional
The equilibrium magnetization. Can be a single value or a 3D numpy array. Default is 1.
Returns
-------
numpy.ndarray
The computed MRI signal.
| def generate_signal(field, B0=3, TR=1, TE=30e-3, flip_angle=90, phase_offset=0, R1=1, R2star=50, M0=1):
"""
Compute the MRI signal based on the given parameters.
Parameters
----------
field : numpy.ndarray
The magnetic field distribution.
B0 : float, optional
The main magnetic field strength. Default is 3.
TR : float, optional
The repetition time. Default is 1.
TE : float, optional
The echo time. Default is 30e-3.
flip_angle : float, optional
The flip angle in degrees. Default is 90.
phase_offset : float, optional
The phase offset. Default is 0.
R1 : float or numpy.ndarray, optional
The longitudinal relaxation rate. Can be a single value or a 3D numpy array. Default is 1.
R2star : float or numpy.ndarray, optional
The effective transverse relaxation rate. Can be a single value or a 3D numpy array. Default is 50.
M0 : float or numpy.ndarray, optional
The equilibrium magnetization. Can be a single value or a 3D numpy array. Default is 1.
Returns
-------
numpy.ndarray
The computed MRI signal.
"""
sigHR = M0 * np.exp(1j * (2 * np.pi * field * B0 * 42.58 * TE + phase_offset)) * np.exp(-TE * R2star) \
* (1 - np.exp(-TR * R1)) * np.sin(np.deg2rad(flip_angle)) / (1 - np.cos(np.deg2rad(flip_angle)) * np.exp(-TR * R1))
sigHR[np.isnan(sigHR)]
return sigHR
| (field, B0=3, TR=1, TE=0.03, flip_angle=90, phase_offset=0, R1=1, R2star=50, M0=1) |
725,886 | qsm_forward.qsm_forward | generate_susceptibility_phantom | null | def generate_susceptibility_phantom(resolution, background, large_cylinder_val, small_cylinder_radii, small_cylinder_vals):
assert len(small_cylinder_radii) == len(small_cylinder_vals), "Number of small cylinders and their values should be the same"
# Initialize the 3D array with the background value
array = np.full(resolution, fill_value=background, dtype=float)
# Calculate the center and the large radius
center = [res//2 for res in resolution]
large_radius = min(center[1:]) * 0.75
# Create coordinates for the 3D array
z,y,x = np.indices(resolution)
# Calculate the lower and upper limit for the height
lower_limit1 = (1 - 0.75) / 2 * resolution[0]
upper_limit2 = (1 + 0.75) / 2 * resolution[0]
lower_limit3 = (1 - 0.6) / 2 * resolution[0]
upper_limit4 = (1 + 0.6) / 2 * resolution[0]
# Create the large cylinder along x-axis
large_cylinder = ((z-center[2])**2 + (y-center[1])**2 < large_radius**2) & (x >= lower_limit1) & (x < upper_limit2)
array[large_cylinder] = large_cylinder_val
# Calculate angle between each small cylinder
angle = 2*np.pi/len(small_cylinder_radii)
# Create the small cylinders
for i, (small_radius, small_val) in enumerate(zip(small_cylinder_radii, small_cylinder_vals)):
# Calculate center of the small cylinder
small_center_z = center[2] + large_radius/2 * np.cos(i*angle)
small_center_y = center[1] + large_radius/2 * np.sin(i*angle)
small_cylinder = ((z-small_center_z)**2 + (y-small_center_y)**2 < small_radius**2) & (x >= lower_limit3) & (x < upper_limit4)
array[small_cylinder] = small_val
return array
| (resolution, background, large_cylinder_val, small_cylinder_radii, small_cylinder_vals) |
725,888 | qsm_forward.qsm_forward | resize |
Resize a Nifti image to a voxel size.
Parameters
----------
nii : nibabel.nifti1.Nifti1Image
The input Nifti image.
voxel_size : list of float
The desired voxel size after resizing.
interpolation : str
Can be 'continuous', 'linear', or 'nearest'. Indicates the resample method. Default='continuous'.
Returns
-------
nibabel.nifti1.Nifti1Image
The resized Nifti image.
| def resize(nii, voxel_size, interpolation='continuous'):
"""
Resize a Nifti image to a voxel size.
Parameters
----------
nii : nibabel.nifti1.Nifti1Image
The input Nifti image.
voxel_size : list of float
The desired voxel size after resizing.
interpolation : str
Can be 'continuous', 'linear', or 'nearest'. Indicates the resample method. Default='continuous'.
Returns
-------
nibabel.nifti1.Nifti1Image
The resized Nifti image.
"""
original_shape = np.array(nii.header.get_data_shape())
target_shape = np.array(np.round((np.array(nii.header.get_zooms()) / voxel_size) * original_shape), dtype=int)
if np.array_equal(original_shape, target_shape):
return nii
# Create a new affine matrix that directly sets the diagonal to the new voxel sizes
new_affine = np.eye(4)
new_affine[:3, :3] = nii.affine[:3, :3]
scale_factors = np.divide(nii.header.get_zooms(), voxel_size)
for i in range(3):
new_affine[i, i] = nii.affine[i, i] / scale_factors[i]
new_affine[:3, 3] = nii.affine[:3, 3]
# Adjust the voxel sizes in the new affine
for i in range(3):
new_affine[i, i] = voxel_size[i] * (nii.affine[i, i] / nii.header.get_zooms()[i])
return resample_img(
nii,
target_affine=new_affine,
target_shape=target_shape,
interpolation=interpolation
)
| (nii, voxel_size, interpolation='continuous') |
725,892 | validator_collection.checkers | are_dicts_equivalent | Indicate if :ref:`dicts <python:dict>` passed to this function have identical
keys and values.
:param args: One or more values, passed as positional arguments.
:param strict_typing: If ``True``, will only identify items as equivalent if they have
identical sub-typing. If ``False``, related sub-types will be returned as equivalent.
Defaults to ``True``.
:type strict_typing: :class:`bool <python:bool>`
:param missing_as_none: If ``True``, will treat missing keys in one value and
:obj:`None <python:None>` keys in the other as equivalent. If ``False``, missing and
:obj:`None <pythoN:None>` keys will fail. Defaults to ``False``.
:type missing_as_none: :class:`bool <python:bool>`
:returns: ``True`` if ``args`` have identical keys/values, and ``False`` if not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (*args, **kwargs) |
725,893 | validator_collection.checkers | are_equivalent | Indicate if arguments passed to this function are equivalent.
.. hint::
This checker operates recursively on the members contained within iterables
and :class:`dict <python:dict>` objects.
.. caution::
If you only pass one argument to this checker - even if it is an iterable -
the checker will *always* return ``True``.
To evaluate members of an iterable for equivalence, you should instead
unpack the iterable into the function like so:
.. code-block:: python
obj = [1, 1, 1, 2]
result = are_equivalent(*obj)
# Will return ``False`` by unpacking and evaluating the iterable's members
result = are_equivalent(obj)
# Will always return True
:param args: One or more values, passed as positional arguments.
:param strict_typing: If ``True``, will only identify items as equivalent if they have
identical sub-typing. If ``False``, related sub-types will be returned as equivalent.
Defaults to ``True``.
:type strict_typing: :class:`bool <python:bool>`
:returns: ``True`` if ``args`` are equivalent, and ``False`` if not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (*args, **kwargs) |
725,894 | validator_collection.validators | bytesIO | Validate that ``value`` is a :class:`BytesIO <python:io.BytesIO>` object.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`BytesIO <python:io.BytesIO>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises NotBytesIOError: if ``value`` is not a :class:`BytesIO <python:io.BytesIO>`
object.
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, **kwargs) |
725,896 | validator_collection.validators | date | Validate that ``value`` is a valid date.
:param value: The value to validate.
:type value: :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>`
/ :class:`date <python:datetime.date>` / :obj:`None <python:None>`
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:param minimum: If supplied, will make sure that ``value`` is on or after this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will attempt to coerce ``value`` to a
:class:`date <python:datetime.date>` if it is a timestamp value. If ``False``,
will not.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`date <python:datetime.date>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`date <python:datetime.date>` and is not :obj:`None <python:None>`
:raises MinimumValueError: if ``minimum`` is supplied but ``value`` occurs before
``minimum``
:raises MaximumValueError: if ``maximum`` is supplied but ``value`` occurs after
``maximum``
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, minimum=None, maximum=None, coerce_value=True, **kwargs) |
725,897 | validator_collection.validators | datetime | Validate that ``value`` is a valid datetime.
.. caution::
If supplying a string, the string needs to be in an ISO 8601-format to pass
validation. If it is not in an ISO 8601-format, validation will fail.
:param value: The value to validate.
:type value: :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>`
/ :class:`date <python:datetime.date>` / :obj:`None <python:None>`
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:param minimum: If supplied, will make sure that ``value`` is on or after this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>` /
:obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>` /
:obj:`None <python:None>`
:param coerce_value: If ``True``, will coerce dates to
:class:`datetime <python:datetime.datetime>` objects with times of 00:00:00. If ``False``, will error
if ``value`` is not an unambiguous timestamp. Defaults to ``True``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`datetime <python:datetime.datetime>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`datetime <python:datetime.datetime>` value and is not
:obj:`None <python:None>`
:raises MinimumValueError: if ``minimum`` is supplied but ``value`` occurs
before ``minimum``
:raises MaximumValueError: if ``maximum`` is supplied but ``value`` occurs
after ``minimum``
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, minimum=None, maximum=None, coerce_value=True, **kwargs) |
725,898 | validator_collection.validators | decimal | Validate that ``value`` is a :class:`Decimal <python:decimal.Decimal>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value``
is :obj:`None <python:None>`. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if
``value`` is :obj:`None <python:None>`. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`Decimal <python:decimal.Decimal>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is :obj:`None <python:None>` and
``allow_empty`` is ``False``
:raises MinimumValueError: if ``minimum`` is supplied and ``value`` is less than the
``minimum``
:raises MaximumValueError: if ``maximum`` is supplied and ``value`` is more than the
``maximum``
:raises CannotCoerceError: if unable to coerce ``value`` to a
:class:`Decimal <python:decimal.Decimal>`
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, minimum=None, maximum=None, **kwargs) |
725,899 | validator_collection.validators | dict | Validate that ``value`` is a :class:`dict <python:dict>`.
.. hint::
If ``value`` is a string, this validator will assume it is a JSON
object and try to convert it into a :class:`dict <python:dict>`
You can override the JSON serializer used by passing it to the
``json_serializer`` property. By default, will utilize the Python
:class:`json <json>` encoder/decoder.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:param json_serializer: The JSON encoder/decoder to use to deserialize a
string passed in ``value``. If not supplied, will default to the Python
:class:`json <python:json>` encoder/decoder.
:type json_serializer: callable
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`dict <python:dict>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`dict <python:dict>`
:raises NotADictError: if ``value`` is not a :class:`dict <python:dict>`
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, json_serializer=None, **kwargs) |
725,900 | validator_collection.validators | directory_exists | Validate that ``value`` is a valid directory that exists on the local
filesystem.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: The file name represented by ``value``.
:rtype: Path-like object / :obj:`None <python:None>`
:raises EmptyValueError: if ``allow_empty`` is ``False`` and ``value``
is empty
:raises NotPathlikeError: if ``value`` is not a path-like object
:raises PathExistsError: if ``value`` does not exist on the local filesystem
:raises NotADirectoryError: if ``value`` is not a valid directory
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, **kwargs) |
725,901 | validator_collection.validators | domain | Validate that ``value`` is a valid domain name.
.. caution::
This validator does not verify that ``value`` **exists** as a domain. It
merely verifies that its contents *might* exist as a domain.
.. note::
This validator checks to validate that ``value`` resembles a valid
domain name. It is - generally - compliant with
`RFC 1035 <https://tools.ietf.org/html/rfc1035>`_ and
`RFC 6761 <https://tools.ietf.org/html/rfc6761>`_, however it diverges
in a number of key ways:
* Including authentication (e.g. ``username:[email protected]``) will
fail validation.
* Including a path (e.g. ``domain.dev/path/to/file``) will fail validation.
* Including a port (e.g. ``domain.dev:8080``) will fail validation.
If you are hoping to validate a more complete URL, we recommend that you
see :func:`url <validator_collection.validators.url>`.
.. hint::
Leading and trailing whitespace will be automatically stripped.
:param value: The value to validate.
:type value: :class:`str <python:str>` / :obj:`None <python:None>`
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:param allow_ips: If ``True``, will succeed when validating IP addresses,
If ``False``, will raise a :class:`InvalidDomainError` if ``value`` is an IP
address. Defaults to ``False``.
:type allow_ips: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`str <python:str>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` is not a :class:`str <python:str>` or
:obj:`None <python:None>`
:raises InvalidDomainError: if ``value`` is not a valid domain name or
empty with ``allow_empty`` set to ``True``
:raises SlashInDomainError: if ``value`` contains a slash or backslash
:raises AtInDomainError: if ``value`` contains an ``@`` symbol
:raises ColonInDomainError: if ``value`` contains a ``:`` symbol
:raises WhitespaceInDomainError: if ``value`` contains whitespace
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, allow_ips=False, **kwargs) |
725,902 | validator_collection.validators | email | Validate that ``value`` is a valid email address.
.. note::
Email address validation is...complicated. The methodology that we have
adopted here is *generally* compliant with
`RFC 5322 <https://tools.ietf.org/html/rfc5322>`_ and uses a combination of
string parsing and regular expressions.
String parsing in particular is used to validate certain *highly unusual*
but still valid email patterns, including the use of escaped text and
comments within an email address' local address (the user name part).
This approach ensures more complete coverage for unusual edge cases, while
still letting us use regular expressions that perform quickly.
:param value: The value to validate.
:type value: :class:`str <python:str>` / :obj:`None <python:None>`
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`str <python:str>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` is not a :class:`str <python:str>` or
:obj:`None <python:None>`
:raises InvalidEmailError: if ``value`` is not a valid email address or
empty with ``allow_empty`` set to ``True``
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, **kwargs) |
725,904 | validator_collection.validators | file_exists | Validate that ``value`` is a valid file that exists on the local filesystem.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: The file name represented by ``value``.
:rtype: Path-like object / :obj:`None <python:None>`
:raises EmptyValueError: if ``allow_empty`` is ``False`` and ``value``
is empty
:raises NotPathlikeError: if ``value`` is not a path-like object
:raises PathExistsError: if ``value`` does not exist on the local filesystem
:raises NotAFileError: if ``value`` is not a valid file
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, **kwargs) |
725,905 | validator_collection.validators | float | Validate that ``value`` is a :class:`float <python:float>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is :obj:`None <python:None>`. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if
``value`` is :obj:`None <python:None>`. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`float <python:float>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is :obj:`None <python:None>` and
``allow_empty`` is ``False``
:raises MinimumValueError: if ``minimum`` is supplied and ``value`` is less
than the ``minimum``
:raises MaximumValueError: if ``maximum`` is supplied and ``value`` is more
than the ``maximum``
:raises CannotCoerceError: if unable to coerce ``value`` to a
:class:`float <python:float>`
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, minimum=None, maximum=None, **kwargs) |
725,906 | validator_collection.validators | fraction | Validate that ``value`` is a :class:`Fraction <python:fractions.Fraction>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if ``value``
is :obj:`None <python:None>`. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if
``value`` is :obj:`None <python:None>`. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:rtype: :class:`Fraction <python:fractions.Fraction>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is :obj:`None <python:None>` and
``allow_empty`` is ``False``
:raises MinimumValueError: if ``minimum`` is supplied and ``value`` is less
than the ``minimum``
:raises MaximumValueError: if ``maximum`` is supplied and ``value`` is more
than the ``maximum``
:raises CannotCoerceError: if unable to coerce ``value`` to a
:class:`Fraction <python:fractions.Fraction>`
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, minimum=None, maximum=None, **kwargs) |
725,907 | validator_collection.checkers | has_length | Indicate whether ``value`` has a length greater than or equal to a
supplied ``minimum`` and/or less than or equal to ``maximum``.
.. note::
This function works on any ``value`` that supports the
:func:`len() <python:len>` operation. This means that ``value`` must implement
the :func:`__len__ <python:__len__>` magic method.
If ``value`` does not support length evaluation, the checker will raise
:class:`NotImplemented <python:NotImplemented>`.
:param value: The ``value`` to check.
:type value: anything that supports length evaluation
:param minimum: If supplied, will return ``True`` if ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will return ``True`` if ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` has length greater than or equal to a
supplied ``minimum`` and less than or equal to a supplied ``maximum``.
Otherwise, returns ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
:raises TypeError: if ``value`` does not support length evaluation
:raises ValueError: if both ``minimum`` and ``maximum`` are
:obj:`None <python:None>`
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, **kwargs) |
725,908 | validator_collection.validators | integer | Validate that ``value`` is an :class:`int <python:int>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is :obj:`None <python:None>`. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>` if
``value`` is :obj:`None <python:None>`.
Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:param coerce_value: If ``True``, will force any numeric ``value`` to an integer
(always rounding up). If ``False``, will raise an error if ``value`` is numeric
but not a whole number. Defaults to ``False``.
:type coerce_value: :class:`bool <python:bool>`
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:param base: Indicates the base that is used to determine the integer value.
The allowed values are 0 and 2–36. Base-2, -8, and -16 literals can be
optionally prefixed with ``0b/0B``, ``0o/0O/0``, or ``0x/0X``, as with
integer literals in code. Base 0 means to interpret the string exactly as
an integer literal, so that the actual base is 2, 8, 10, or 16. Defaults to
``10``.
:returns: ``value`` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is :obj:`None <python:None>` and
``allow_empty`` is ``False``
:raises MinimumValueError: if ``minimum`` is supplied and ``value`` is less
than the ``minimum``
:raises MaximumValueError: if ``maximum`` is supplied and ``value`` is more
than the ``maximum``
:raises NotAnIntegerError: if ``coerce_value`` is ``False``, and ``value``
is not an integer
:raises CannotCoerceError: if ``value`` cannot be coerced to an
:class:`int <python:int>`
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, coerce_value=False, minimum=None, maximum=None, base=10, **kwargs) |
725,909 | validator_collection.validators | ip_address | Validate that ``value`` is a valid IP address.
.. note::
First, the validator will check if the address is a valid IPv6 address.
If that doesn't work, the validator will check if the address is a valid
IPv4 address.
If neither works, the validator will raise an error (as always).
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises InvalidIPAddressError: if ``value`` is not a valid IP address or empty with
``allow_empty`` set to ``True``
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, **kwargs) |
725,910 | validator_collection.validators | ipv4 | Validate that ``value`` is a valid IP version 4 address.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises InvalidIPAddressError: if ``value`` is not a valid IP version 4 address or
empty with ``allow_empty`` set to ``True``
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False) |
725,911 | validator_collection.validators | ipv6 | Validate that ``value`` is a valid IP address version 6.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises InvalidIPAddressError: if ``value`` is not a valid IP version 6 address or
empty with ``allow_empty`` is not set to ``True``
| # -*- coding: utf-8 -*-
# The lack of a module docstring for this module is **INTENTIONAL**.
# The module is imported into the documentation using Sphinx's autodoc
# extension, and its member function documentation is automatically incorporated
# there as needed.
import decimal as decimal_
import fractions
import io
import math
import os
import uuid as uuid_
import datetime as datetime_
import string as string_
import sys
from ast import parse
import jsonschema
from validator_collection._compat import numeric_types, integer_types, datetime_types,\
date_types, time_types, timestamp_types, tzinfo_types, POSITIVE_INFINITY, \
NEGATIVE_INFINITY, TimeZone, json_, is_py2, is_py3, dict_, float_, basestring, re
from validator_collection._decorators import disable_on_env
from validator_collection import errors
URL_UNSAFE_CHARACTERS = ('[', ']', '{', '}', '|', '^', '%', '~')
URL_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address exclusion
# private & local networks
r"(?!(?:10|127)(?:\.\d{1,3}){3})"
r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})"
r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
r"(?:"
r"(?:localhost|invalid|test|example)|("
# host name
r"(?:(?:[A-z\u00a1-\uffff0-9]-*_*)*[A-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[A-z\u00a1-\uffff0-9]-*)*[A-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[A-z\u00a1-\uffff]{2,}))"
r")))"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
URL_SPECIAL_IP_REGEX = re.compile(
r"^"
# protocol identifier
r"(?:(?:https?|ftp)://)"
# user:pass authentication
r"(?:\S+(?::\S*)?@)?"
r"(?:"
# IP address dotted notation octets
# excludes loopback network 0.0.0.0
# excludes reserved space >= 224.0.0.0
# excludes network & broadcast addresses
# (first & last IP address of each class)
r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])"
r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}"
r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))"
r"|"
# host name
r"(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)"
# domain name
r"(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*"
# TLD identifier
r"(?:\.(?:[a-z\u00a1-\uffff]{2,}))"
r")"
# port number
r"(?::\d{2,5})?"
# resource path
r"(?:/\S*)?"
r"$"
, re.UNICODE)
DOMAIN_REGEX = re.compile(
r"\b((?=[a-z\u00a1-\uffff0-9-]{1,63}\.)(xn--)?[a-z\u00a1-\uffff0-9]+"
r"(-[a-z\u00a1-\uffff0-9]+)*\.)+[a-z]{2,63}\b",
re.UNICODE|re.IGNORECASE
)
URL_PROTOCOLS = ('http://',
'https://',
'ftp://')
SPECIAL_USE_DOMAIN_NAMES = ('localhost',
'invalid',
'test',
'example')
EMAIL_REGEX = re.compile(
r"(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|\""
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*\")"
r"@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])"
r"?|\[(?:(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9]))\.){3}"
r"(?:(2(5[0-5]|[0-4][0-9])|1[0-9][0-9]|[1-9]?[0-9])|[a-z0-9-]*[a-z0-9]:"
r"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])"
)
VARIABLE_NAME_REGEX = re.compile(
r"(^[a-zA-Z_])([a-zA-Z0-9_]*)"
)
MAC_ADDRESS_REGEX = re.compile(r'^(?:[0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}$')
IPV6_REGEX = re.compile(
'^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)(?:%25(?:[A-Za-z0-9\\-._~]|%[0-9A-Fa-f]{2})+)?$'
)
TIMEDELTA_REGEX = re.compile(r'((?P<days>\d+) days?, )?(?P<hours>\d+):'
r'(?P<minutes>\d+):(?P<seconds>\d+(\.\d+)?)')
MIME_TYPE_REGEX = re.compile(r"^multipart|[-\w.]+/[-\w.\+]+$")
# pylint: disable=W0613
## CORE
@disable_on_env
def uuid(value,
allow_empty = False,
**kwargs):
"""Validate that ``value`` is a valid :class:`UUID <python:uuid.UUID>`.
:param value: The value to validate.
:param allow_empty: If ``True``, returns :obj:`None <python:None>` if
``value`` is empty. If ``False``, raises a
:class:`EmptyValueError <validator_collection.errors.EmptyValueError>`
if ``value`` is empty. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``value`` coerced to a :class:`UUID <python:uuid.UUID>` object /
:obj:`None <python:None>`
:rtype: :class:`UUID <python:uuid.UUID>` / :obj:`None <python:None>`
:raises EmptyValueError: if ``value`` is empty and ``allow_empty`` is ``False``
:raises CannotCoerceError: if ``value`` cannot be coerced to a
:class:`UUID <python:uuid.UUID>`
"""
if not value and not allow_empty:
raise errors.EmptyValueError('value (%s) was empty' % value)
elif not value:
return None
if isinstance(value, uuid_.UUID):
return value
try:
value = uuid_.UUID(value)
except ValueError:
raise errors.CannotCoerceError('value (%s) cannot be coerced to a valid UUID')
return value
| (value, allow_empty=False, **kwargs) |
725,912 | validator_collection.checkers | is_between | Indicate whether ``value`` is greater than or equal to a supplied ``minimum``
and/or less than or equal to ``maximum``.
.. note::
This function works on any ``value`` that support comparison operators,
whether they are numbers or not. Technically, this means that ``value``,
``minimum``, or ``maximum`` need to implement the Python magic methods
:func:`__lte__ <python:object.__lte__>` and :func:`__gte__ <python:object.__gte__>`.
If ``value``, ``minimum``, or ``maximum`` do not support comparison
operators, they will raise :class:`NotImplemented <python:NotImplemented>`.
:param value: The ``value`` to check.
:type value: anything that supports comparison operators
:param minimum: If supplied, will return ``True`` if ``value`` is greater than or
equal to this value.
:type minimum: anything that supports comparison operators /
:obj:`None <python:None>`
:param maximum: If supplied, will return ``True`` if ``value`` is less than or
equal to this value.
:type maximum: anything that supports comparison operators /
:obj:`None <python:None>`
:returns: ``True`` if ``value`` is greater than or equal to a supplied ``minimum``
and less than or equal to a supplied ``maximum``. Otherwise, returns ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
:raises NotImplemented: if ``value``, ``minimum``, or ``maximum`` do not
support comparison operators
:raises ValueError: if both ``minimum`` and ``maximum`` are
:obj:`None <python:None>`
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, **kwargs) |
725,913 | validator_collection.checkers | is_bytesIO | Indicate whether ``value`` is a :class:`BytesIO <python:io.BytesIO>` object.
.. note::
This checker will return ``True`` even if ``value`` is empty, so long as
its type is a :class:`BytesIO <python:io.BytesIO>`.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,914 | validator_collection.checkers | is_date | Indicate whether ``value`` is a :class:`date <python:datetime.date>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after
this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`date <python:datetime.date>`. If ``False``,
will only return ``True`` if ``value`` is a date value only. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, coerce_value=False, **kwargs) |
725,915 | validator_collection.checkers | is_datetime | Indicate whether ``value`` is a :class:`datetime <python:datetime.datetime>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after
this value.
:type minimum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :class:`datetime <python:datetime.datetime>` /
:class:`date <python:datetime.date>` / compliant :class:`str <python:str>`
/ :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`datetime <python:datetime.datetime>`. If ``False``,
will only return ``True`` if ``value`` is a complete timestamp. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, coerce_value=False, **kwargs) |
725,916 | validator_collection.checkers | is_decimal | Indicate whether ``value`` contains a :class:`Decimal <python:decimal.Decimal>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, **kwargs) |
725,917 | validator_collection.checkers | is_dict | Indicate whether ``value`` is a valid :class:`dict <python:dict>`
.. note::
This will return ``True`` even if ``value`` is an empty
:class:`dict <python:dict>`.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,918 | validator_collection.checkers | is_directory | Indicate whether ``value`` is a directory that exists on the local filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,919 | validator_collection.checkers | is_domain | Indicate whether ``value`` is a valid domain.
.. caution::
This validator does not verify that ``value`` **exists** as a domain. It
merely verifies that its contents *might* exist as a domain.
.. note::
This validator checks to validate that ``value`` resembles a valid
domain name. It is - generally - compliant with
`RFC 1035 <https://tools.ietf.org/html/rfc1035>`_ and
`RFC 6761 <https://tools.ietf.org/html/rfc6761>`_, however it diverges
in a number of key ways:
* Including authentication (e.g. ``username:[email protected]``) will
fail validation.
* Including a path (e.g. ``domain.dev/path/to/file``) will fail validation.
* Including a port (e.g. ``domain.dev:8080``) will fail validation.
If you are hoping to validate a more complete URL, we recommend that you
see :func:`url <validator_collection.validators.url>`.
:param value: The value to evaluate.
:param allow_ips: If ``True``, will succeed when validating IP addresses,
If ``False``, will fail if ``value`` is an IP address. Defaults to ``False``.
:type allow_ips: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,920 | validator_collection.checkers | is_email | Indicate whether ``value`` is an email address.
.. note::
Email address validation is...complicated. The methodology that we have
adopted here is *generally* compliant with
`RFC 5322 <https://tools.ietf.org/html/rfc5322>`_ and uses a combination of
string parsing and regular expressions.
String parsing in particular is used to validate certain *highly unusual*
but still valid email patterns, including the use of escaped text and
comments within an email address' local address (the user name part).
This approach ensures more complete coverage for unusual edge cases, while
still letting us use regular expressions that perform quickly.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,921 | validator_collection.checkers | is_file | Indicate whether ``value`` is a file that exists on the local filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,922 | validator_collection.checkers | is_float | Indicate whether ``value`` is a :class:`float <python:float>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, **kwargs) |
725,923 | validator_collection.checkers | is_fraction | Indicate whether ``value`` is a :class:`Fraction <python:fractions.Fraction>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, **kwargs) |
725,924 | validator_collection.checkers | is_integer | Indicate whether ``value`` contains a whole number.
:param value: The value to evaluate.
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be coerced
to whole number. If ``False``, will only return ``True`` if ``value`` is already
a whole number (regardless of type). Defaults to ``False``.
:type coerce_value: :class:`bool <python:bool>`
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:param base: Indicates the base that is used to determine the integer value.
The allowed values are 0 and 2–36. Base-2, -8, and -16 literals can be
optionally prefixed with ``0b/0B``, ``0o/0O/0``, or ``0x/0X``, as with
integer literals in code. Base 0 means to interpret the string exactly as
an integer literal, so that the actual base is 2, 8, 10, or 16. Defaults to
``10``.
:type base: :class:`int <python:int>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, coerce_value=False, minimum=None, maximum=None, base=10, **kwargs) |
725,925 | validator_collection.checkers | is_ip_address | Indicate whether ``value`` is a valid IP address (version 4 or version 6).
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,926 | validator_collection.checkers | is_ipv4 | Indicate whether ``value`` is a valid IP version 4 address.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,927 | validator_collection.checkers | is_ipv6 | Indicate whether ``value`` is a valid IP version 6 address.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,928 | validator_collection.checkers | is_iterable | Indicate whether ``obj`` is iterable.
:param forbid_literals: A collection of literals that will be considered invalid
even if they are (actually) iterable. Defaults to a :class:`tuple <python:tuple>`
containing :class:`str <python:str>` and :class:`bytes <python:bytes>`.
:type forbid_literals: iterable
:param minimum_length: If supplied, indicates the minimum number of members
needed to be valid.
:type minimum_length: :class:`int <python:int>`
:param maximum_length: If supplied, indicates the minimum number of members
needed to be valid.
:type maximum_length: :class:`int <python:int>`
:returns: ``True`` if ``obj`` is a valid iterable, ``False`` if not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (obj, forbid_literals=(<class 'str'>, <class 'bytes'>), minimum_length=None, maximum_length=None, **kwargs) |
725,929 | validator_collection.checkers | is_mac_address | Indicate whether ``value`` is a valid MAC address.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,930 | validator_collection.checkers | is_none | Indicate whether ``value`` is :obj:`None <python:None>`.
:param value: The value to evaluate.
:param allow_empty: If ``True``, accepts falsey values as equivalent to
:obj:`None <python:None>`. Defaults to ``False``.
:type allow_empty: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is :obj:`None <python:None>`, ``False``
if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, allow_empty=False, **kwargs) |
725,931 | validator_collection.checkers | is_not_empty | Indicate whether ``value`` is empty.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is empty, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,932 | validator_collection.checkers | is_numeric | Indicate whether ``value`` is a numeric value.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is greater than or
equal to this value.
:type minimum: numeric
:param maximum: If supplied, will make sure that ``value`` is less than or
equal to this value.
:type maximum: numeric
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, **kwargs) |
725,933 | validator_collection.checkers | is_on_filesystem | Indicate whether ``value`` is a file or directory that exists on the local
filesystem.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,934 | validator_collection.checkers | is_pathlike | Indicate whether ``value`` is a path-like object.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,935 | validator_collection.checkers | is_string | Indicate whether ``value`` is a string.
:param value: The value to evaluate.
:param coerce_value: If ``True``, will check whether ``value`` can be coerced
to a string if it is not already. Defaults to ``False``.
:type coerce_value: :class:`bool <python:bool>`
:param minimum_length: If supplied, indicates the minimum number of characters
needed to be valid.
:type minimum_length: :class:`int <python:int>`
:param maximum_length: If supplied, indicates the minimum number of characters
needed to be valid.
:type maximum_length: :class:`int <python:int>`
:param whitespace_padding: If ``True`` and the value is below the
``minimum_length``, pad the value with spaces. Defaults to ``False``.
:type whitespace_padding: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, coerce_value=False, minimum_length=None, maximum_length=None, whitespace_padding=False, **kwargs) |
725,936 | validator_collection.checkers | is_stringIO | Indicate whether ``value`` is a :class:`StringIO <python:io.StringIO>` object.
.. note::
This checker will return ``True`` even if ``value`` is empty, so long as
its type is a :class:`String <python:io.StringIO>`.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, **kwargs) |
725,937 | validator_collection.checkers | is_time | Indicate whether ``value`` is a :class:`time <python:datetime.time>`.
:param value: The value to evaluate.
:param minimum: If supplied, will make sure that ``value`` is on or after this value.
:type minimum: :func:`datetime <validator_collection.validators.datetime>` or
:func:`time <validator_collection.validators.time>`-compliant
:class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` /
:class:`time <python:datetime.time> / numeric / :obj:`None <python:None>`
:param maximum: If supplied, will make sure that ``value`` is on or before this
value.
:type maximum: :func:`datetime <validator_collection.validators.datetime>` or
:func:`time <validator_collection.validators.time>`-compliant
:class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` /
:class:`time <python:datetime.time> / numeric / :obj:`None <python:None>`
:param coerce_value: If ``True``, will return ``True`` if ``value`` can be
coerced to a :class:`time <python:datetime.time>`. If ``False``,
will only return ``True`` if ``value`` is a valid time. Defaults to
``False``.
:type coerce_value: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, minimum=None, maximum=None, coerce_value=False, **kwargs) |
725,938 | validator_collection.checkers | is_timezone | Indicate whether ``value`` is a :class:`tzinfo <python:datetime.tzinfo>`.
.. caution::
This does **not** validate whether the value is a timezone that actually
exists, nor can it resolve timzone names (e.g. ``'Eastern'`` or ``'CET'``).
For that kind of functionality, we recommend you utilize:
`pytz <https://pypi.python.org/pypi/pytz>`_
:param value: The value to evaluate.
:param positive: Indicates whether the ``value`` is positive or negative
(only has meaning if ``value`` is a string). Defaults to ``True``.
:type positive: :class:`bool <python:bool>`
:returns: ``True`` if ``value`` is valid, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (value, positive=True, **kwargs) |
725,939 | validator_collection.checkers | is_type | Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
| def is_type(obj,
type_,
**kwargs):
"""Indicate if ``obj`` is a type in ``type_``.
.. hint::
This checker is particularly useful when you want to evaluate whether
``obj`` is of a particular type, but importing that type directly to use
in :func:`isinstance() <python:isinstance>` would cause a circular import
error.
To use this checker in that kind of situation, you can instead pass the
*name* of the type you want to check as a string in ``type_``. The checker
will evaluate it and see whether ``obj`` is of a type or inherits from a
type whose name matches the string you passed.
:param obj: The object whose type should be checked.
:type obj: :class:`object <python:object>`
:param type_: The type(s) to check against.
:type type_: :class:`type <python:type>` / iterable of :class:`type <python:type>` /
:class:`str <python:str>` with type name / iterable of :class:`str <python:str>`
with type name
:returns: ``True`` if ``obj`` is a type in ``type_``. Otherwise, ``False``.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the underlying validator
"""
if not is_iterable(type_):
type_ = [type_]
return_value = False
for check_for_type in type_:
if isinstance(check_for_type, type):
return_value = isinstance(obj, check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
elif obj.__class__.__name__ == check_for_type:
return_value = True
else:
return_value = _check_base_classes(obj.__class__.__bases__,
check_for_type)
if not return_value:
try:
return_value = issubclass(obj, check_for_type)
except TypeError:
pass
if return_value is True:
break
return return_value
| (obj, type_, **kwargs) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.