index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
37,483 | flask_debugtoolbar.utils | gzip_compress | null | def gzip_compress(data, compresslevel=6):
buff = io.BytesIO()
with gzip.GzipFile(fileobj=buff, mode='wb', compresslevel=compresslevel) as f:
f.write(data)
return buff.getvalue()
| (data, compresslevel=6) |
37,484 | flask_debugtoolbar.utils | gzip_decompress | null | def gzip_decompress(data):
with gzip.GzipFile(fileobj=io.BytesIO(data), mode='rb') as f:
return f.read()
| (data) |
37,485 | flask_debugtoolbar.compat | <lambda> | null | iteritems = lambda d: iter(d.items())
| (d) |
37,487 | flask_debugtoolbar | replace_insensitive | Similar to string.replace() but is case insensitive
Code borrowed from:
http://forums.devshed.com/python-programming-11/case-insensitive-string-replace-490921.html
| def replace_insensitive(string, target, replacement):
"""Similar to string.replace() but is case insensitive
Code borrowed from:
http://forums.devshed.com/python-programming-11/case-insensitive-string-replace-490921.html
"""
no_case = string.lower()
index = no_case.rfind(target.lower())
if index >= 0:
return string[:index] + replacement + string[index + len(target):]
else: # no results so return the original string
return string
| (string, target, replacement) |
37,488 | flask.helpers | send_from_directory | Send a file from within a directory using :func:`send_file`.
.. code-block:: python
@app.route("/uploads/<path:name>")
def download_file(name):
return send_from_directory(
app.config['UPLOAD_FOLDER'], name, as_attachment=True
)
This is a secure way to serve files from a folder, such as static
files or uploads. Uses :func:`~werkzeug.security.safe_join` to
ensure the path coming from the client is not maliciously crafted to
point outside the specified directory.
If the final path does not point to an existing regular file,
raises a 404 :exc:`~werkzeug.exceptions.NotFound` error.
:param directory: The directory that ``path`` must be located under,
relative to the current application's root path.
:param path: The path to the file to send, relative to
``directory``.
:param kwargs: Arguments to pass to :func:`send_file`.
.. versionchanged:: 2.0
``path`` replaces the ``filename`` parameter.
.. versionadded:: 2.0
Moved the implementation to Werkzeug. This is now a wrapper to
pass some Flask-specific arguments.
.. versionadded:: 0.5
| def send_from_directory(
directory: os.PathLike[str] | str,
path: os.PathLike[str] | str,
**kwargs: t.Any,
) -> Response:
"""Send a file from within a directory using :func:`send_file`.
.. code-block:: python
@app.route("/uploads/<path:name>")
def download_file(name):
return send_from_directory(
app.config['UPLOAD_FOLDER'], name, as_attachment=True
)
This is a secure way to serve files from a folder, such as static
files or uploads. Uses :func:`~werkzeug.security.safe_join` to
ensure the path coming from the client is not maliciously crafted to
point outside the specified directory.
If the final path does not point to an existing regular file,
raises a 404 :exc:`~werkzeug.exceptions.NotFound` error.
:param directory: The directory that ``path`` must be located under,
relative to the current application's root path.
:param path: The path to the file to send, relative to
``directory``.
:param kwargs: Arguments to pass to :func:`send_file`.
.. versionchanged:: 2.0
``path`` replaces the ``filename`` parameter.
.. versionadded:: 2.0
Moved the implementation to Werkzeug. This is now a wrapper to
pass some Flask-specific arguments.
.. versionadded:: 0.5
"""
return werkzeug.utils.send_from_directory( # type: ignore[return-value]
directory, path, **_prepare_send_file_kwargs(**kwargs)
)
| (directory: 'os.PathLike[str] | str', path: 'os.PathLike[str] | str', **kwargs: 't.Any') -> 'Response' |
37,563 | requests.exceptions | RequestException | There was an ambiguous exception that occurred while handling your
request.
| class RequestException(IOError):
"""There was an ambiguous exception that occurred while handling your
request.
"""
def __init__(self, *args, **kwargs):
"""Initialize RequestException with `request` and `response` objects."""
response = kwargs.pop("response", None)
self.response = response
self.request = kwargs.pop("request", None)
if response is not None and not self.request and hasattr(response, "request"):
self.request = self.response.request
super().__init__(*args, **kwargs)
| (*args, **kwargs) |
37,564 | requests.exceptions | __init__ | Initialize RequestException with `request` and `response` objects. | def __init__(self, *args, **kwargs):
"""Initialize RequestException with `request` and `response` objects."""
response = kwargs.pop("response", None)
self.response = response
self.request = kwargs.pop("request", None)
if response is not None and not self.request and hasattr(response, "request"):
self.request = self.response.request
super().__init__(*args, **kwargs)
| (self, *args, **kwargs) |
37,565 | sphinx.application | Sphinx | The main application class and extensibility interface.
:ivar srcdir: Directory containing source.
:ivar confdir: Directory containing ``conf.py``.
:ivar doctreedir: Directory for storing pickled doctrees.
:ivar outdir: Directory for storing build documents.
| class Sphinx:
"""The main application class and extensibility interface.
:ivar srcdir: Directory containing source.
:ivar confdir: Directory containing ``conf.py``.
:ivar doctreedir: Directory for storing pickled doctrees.
:ivar outdir: Directory for storing build documents.
"""
warningiserror: bool
_warncount: int
def __init__(self, srcdir: str | os.PathLike[str], confdir: str | os.PathLike[str] | None,
outdir: str | os.PathLike[str], doctreedir: str | os.PathLike[str],
buildername: str, confoverrides: dict | None = None,
status: IO | None = sys.stdout, warning: IO | None = sys.stderr,
freshenv: bool = False, warningiserror: bool = False,
tags: list[str] | None = None,
verbosity: int = 0, parallel: int = 0, keep_going: bool = False,
pdb: bool = False) -> None:
self.phase = BuildPhase.INITIALIZATION
self.verbosity = verbosity
self.extensions: dict[str, Extension] = {}
self.registry = SphinxComponentRegistry()
# validate provided directories
self.srcdir = _StrPath(srcdir).resolve()
self.outdir = _StrPath(outdir).resolve()
self.doctreedir = _StrPath(doctreedir).resolve()
if not path.isdir(self.srcdir):
raise ApplicationError(__('Cannot find source directory (%s)') %
self.srcdir)
if path.exists(self.outdir) and not path.isdir(self.outdir):
raise ApplicationError(__('Output directory (%s) is not a directory') %
self.outdir)
if self.srcdir == self.outdir:
raise ApplicationError(__('Source directory and destination '
'directory cannot be identical'))
self.parallel = parallel
if status is None:
self._status: IO = StringIO()
self.quiet: bool = True
else:
self._status = status
self.quiet = False
if warning is None:
self._warning: IO = StringIO()
else:
self._warning = warning
self._warncount = 0
self.keep_going = warningiserror and keep_going
if self.keep_going:
self.warningiserror = False
else:
self.warningiserror = warningiserror
self.pdb = pdb
logging.setup(self, self._status, self._warning)
self.events = EventManager(self)
# keep last few messages for traceback
# This will be filled by sphinx.util.logging.LastMessagesWriter
self.messagelog: deque = deque(maxlen=10)
# say hello to the world
logger.info(bold(__('Running Sphinx v%s') % sphinx.__display_version__))
# status code for command-line application
self.statuscode = 0
# read config
self.tags = Tags(tags)
if confdir is None:
# set confdir to srcdir if -C given (!= no confdir); a few pieces
# of code expect a confdir to be set
self.confdir = self.srcdir
self.config = Config({}, confoverrides or {})
else:
self.confdir = _StrPath(confdir).resolve()
self.config = Config.read(self.confdir, confoverrides or {}, self.tags)
# set up translation infrastructure
self._init_i18n()
# check the Sphinx version if requested
if self.config.needs_sphinx and self.config.needs_sphinx > sphinx.__display_version__:
raise VersionRequirementError(
__('This project needs at least Sphinx v%s and therefore cannot '
'be built with this version.') % self.config.needs_sphinx)
# load all built-in extension modules, first-party extension modules,
# and first-party themes
for extension in builtin_extensions:
self.setup_extension(extension)
# load all user-given extension modules
for extension in self.config.extensions:
self.setup_extension(extension)
# preload builder module (before init config values)
self.preload_builder(buildername)
if not path.isdir(outdir):
with progress_message(__('making output directory')):
ensuredir(outdir)
# the config file itself can be an extension
if self.config.setup:
prefix = __('while setting up extension %s:') % "conf.py"
with prefixed_warnings(prefix):
if callable(self.config.setup):
self.config.setup(self)
else:
raise ConfigError(
__("'setup' as currently defined in conf.py isn't a Python callable. "
"Please modify its definition to make it a callable function. "
"This is needed for conf.py to behave as a Sphinx extension."),
)
# Report any warnings for overrides.
self.config._report_override_warnings()
self.events.emit('config-inited', self.config)
# create the project
self.project = Project(self.srcdir, self.config.source_suffix)
# set up the build environment
self.env = self._init_env(freshenv)
# create the builder
self.builder = self.create_builder(buildername)
# build environment post-initialisation, after creating the builder
self._post_init_env()
# set up the builder
self._init_builder()
def _init_i18n(self) -> None:
"""Load translated strings from the configured localedirs if enabled in
the configuration.
"""
if self.config.language == 'en':
self.translator, _ = locale.init([], None)
else:
logger.info(bold(__('loading translations [%s]... ') % self.config.language),
nonl=True)
# compile mo files if sphinx.po file in user locale directories are updated
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
for catalog in repo.catalogs:
if catalog.domain == 'sphinx' and catalog.is_outdated():
catalog.write_mo(self.config.language,
self.config.gettext_allow_fuzzy_translations)
locale_dirs: list[str | None] = list(repo.locale_dirs)
locale_dirs += [None]
locale_dirs += [path.join(package_dir, 'locale')]
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
if has_translation:
logger.info(__('done'))
else:
logger.info(__('not available for built-in messages'))
def _init_env(self, freshenv: bool) -> BuildEnvironment:
filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
if freshenv or not os.path.exists(filename):
return self._create_fresh_env()
else:
return self._load_existing_env(filename)
def _create_fresh_env(self) -> BuildEnvironment:
env = BuildEnvironment(self)
self._fresh_env_used = True
return env
@progress_message(__('loading pickled environment'))
def _load_existing_env(self, filename: str) -> BuildEnvironment:
try:
with open(filename, 'rb') as f:
env = pickle.load(f)
env.setup(self)
self._fresh_env_used = False
except Exception as err:
logger.info(__('failed: %s'), err)
env = self._create_fresh_env()
return env
def _post_init_env(self) -> None:
if self._fresh_env_used:
self.env.find_files(self.config, self.builder)
del self._fresh_env_used
def preload_builder(self, name: str) -> None:
self.registry.preload_builder(self, name)
def create_builder(self, name: str) -> Builder:
if name is None:
logger.info(__('No builder selected, using default: html'))
name = 'html'
return self.registry.create_builder(self, name, self.env)
def _init_builder(self) -> None:
self.builder.init()
self.events.emit('builder-inited')
# ---- main "build" method -------------------------------------------------
def build(self, force_all: bool = False, filenames: list[str] | None = None) -> None:
self.phase = BuildPhase.READING
try:
if force_all:
self.builder.build_all()
elif filenames:
self.builder.build_specific(filenames)
else:
self.builder.build_update()
self.events.emit('build-finished', None)
except Exception as err:
# delete the saved env to force a fresh build next time
envfile = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
if path.isfile(envfile):
os.unlink(envfile)
self.events.emit('build-finished', err)
raise
if self._warncount and self.keep_going:
self.statuscode = 1
status = (__('succeeded') if self.statuscode == 0
else __('finished with problems'))
if self._warncount:
if self.warningiserror:
if self._warncount == 1:
msg = __('build %s, %s warning (with warnings treated as errors).')
else:
msg = __('build %s, %s warnings (with warnings treated as errors).')
else:
if self._warncount == 1:
msg = __('build %s, %s warning.')
else:
msg = __('build %s, %s warnings.')
logger.info(bold(msg % (status, self._warncount)))
else:
logger.info(bold(__('build %s.') % status))
if self.statuscode == 0 and self.builder.epilog:
logger.info('')
logger.info(self.builder.epilog % {
'outdir': relpath(self.outdir),
'project': self.config.project,
})
self.builder.cleanup()
# ---- general extensibility interface -------------------------------------
def setup_extension(self, extname: str) -> None:
"""Import and setup a Sphinx extension module.
Load the extension given by the module *name*. Use this if your
extension needs the features provided by another extension. No-op if
called twice.
"""
logger.debug('[app] setting up extension: %r', extname)
self.registry.load_extension(self, extname)
@staticmethod
def require_sphinx(version: tuple[int, int] | str) -> None:
"""Check the Sphinx version if requested.
Compare *version* with the version of the running Sphinx, and abort the
build when it is too old.
:param version: The required version in the form of ``major.minor`` or
``(major, minor)``.
.. versionadded:: 1.0
.. versionchanged:: 7.1
Type of *version* now allows ``(major, minor)`` form.
"""
if isinstance(version, tuple):
major, minor = version
else:
major, minor = map(int, version.split('.')[:2])
if (major, minor) > sphinx.version_info[:2]:
req = f'{major}.{minor}'
raise VersionRequirementError(req)
# event interface
def connect(self, event: str, callback: Callable, priority: int = 500) -> int:
"""Register *callback* to be called when *event* is emitted.
For details on available core events and the arguments of callback
functions, please see :ref:`events`.
:param event: The name of target event
:param callback: Callback function for the event
:param priority: The priority of the callback. The callbacks will be invoked
in order of *priority* (ascending).
:return: A listener ID. It can be used for :meth:`disconnect`.
.. versionchanged:: 3.0
Support *priority*
"""
listener_id = self.events.connect(event, callback, priority)
logger.debug('[app] connecting event %r (%d): %r [id=%s]',
event, priority, callback, listener_id)
return listener_id
def disconnect(self, listener_id: int) -> None:
"""Unregister callback by *listener_id*.
:param listener_id: A listener_id that :meth:`connect` returns
"""
logger.debug('[app] disconnecting event: [id=%s]', listener_id)
self.events.disconnect(listener_id)
def emit(self, event: str, *args: Any,
allowed_exceptions: tuple[type[Exception], ...] = ()) -> list:
"""Emit *event* and pass *arguments* to the callback functions.
Return the return values of all callbacks as a list. Do not emit core
Sphinx events in extensions!
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
"""
return self.events.emit(event, *args, allowed_exceptions=allowed_exceptions)
def emit_firstresult(self, event: str, *args: Any,
allowed_exceptions: tuple[type[Exception], ...] = ()) -> Any:
"""Emit *event* and pass *arguments* to the callback functions.
Return the result of the first callback that doesn't return ``None``.
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionadded:: 0.5
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
"""
return self.events.emit_firstresult(event, *args,
allowed_exceptions=allowed_exceptions)
# registering addon parts
def add_builder(self, builder: type[Builder], override: bool = False) -> None:
"""Register a new builder.
:param builder: A builder class
:param override: If true, install the builder forcedly even if another builder
is already installed as the same name
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_builder(builder, override=override)
# TODO(stephenfin): Describe 'types' parameter
def add_config_value(self, name: str, default: Any, rebuild: _ConfigRebuild,
types: type | Collection[type] | ENUM = ()) -> None:
"""Register a configuration value.
This is necessary for Sphinx to recognize new values and set default
values accordingly.
:param name: The name of the configuration value. It is recommended to be prefixed
with the extension name (ex. ``html_logo``, ``epub_title``)
:param default: The default value of the configuration.
:param rebuild: The condition of rebuild. It must be one of those values:
* ``'env'`` if a change in the setting only takes effect when a
document is parsed -- this means that the whole environment must be
rebuilt.
* ``'html'`` if a change in the setting needs a full rebuild of HTML
documents.
* ``''`` if a change in the setting will not need any special rebuild.
:param types: The type of configuration value. A list of types can be specified. For
example, ``[str]`` is used to describe a configuration that takes string
value.
.. versionchanged:: 0.4
If the *default* value is a callable, it will be called with the
config object as its argument in order to get the default value.
This can be used to implement config values whose default depends on
other values.
.. versionchanged:: 0.6
Changed *rebuild* from a simple boolean (equivalent to ``''`` or
``'env'``) to a string. However, booleans are still accepted and
converted internally.
"""
logger.debug('[app] adding config value: %r', (name, default, rebuild, types))
self.config.add(name, default, rebuild, types)
def add_event(self, name: str) -> None:
"""Register an event called *name*.
This is needed to be able to emit it.
:param name: The name of the event
"""
logger.debug('[app] adding event: %r', name)
self.events.add(name)
def set_translator(self, name: str, translator_class: type[nodes.NodeVisitor],
override: bool = False) -> None:
"""Register or override a Docutils translator class.
This is used to register a custom output translator or to replace a
builtin translator. This allows extensions to use a custom translator
and define custom nodes for the translator (see :meth:`add_node`).
:param name: The name of the builder for the translator
:param translator_class: A translator class
:param override: If true, install the translator forcedly even if another translator
is already installed as the same name
.. versionadded:: 1.3
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_translator(name, translator_class, override=override)
def add_node(self, node: type[Element], override: bool = False,
**kwargs: tuple[Callable, Callable | None]) -> None:
"""Register a Docutils node class.
This is necessary for Docutils internals. It may also be used in the
future to validate nodes in the parsed documents.
:param node: A node class
:param kwargs: Visitor functions for each builder (see below)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
Node visitor functions for the Sphinx HTML, LaTeX, text and manpage
writers can be given as keyword arguments: the keyword should be one or
more of ``'html'``, ``'latex'``, ``'text'``, ``'man'``, ``'texinfo'``
or any other supported translators, the value a 2-tuple of ``(visit,
depart)`` methods. ``depart`` can be ``None`` if the ``visit``
function raises :exc:`docutils.nodes.SkipNode`. Example:
.. code-block:: python
class math(docutils.nodes.Element): pass
def visit_math_html(self, node):
self.body.append(self.starttag(node, 'math'))
def depart_math_html(self, node):
self.body.append('</math>')
app.add_node(math, html=(visit_math_html, depart_math_html))
Obviously, translators for which you don't specify visitor methods will
choke on the node when encountered in a document to translate.
.. versionchanged:: 0.5
Added the support for keyword arguments giving visit functions.
"""
logger.debug('[app] adding node: %r', (node, kwargs))
if not override and docutils.is_node_registered(node):
logger.warning(__('node class %r is already registered, '
'its visitors will be overridden'),
node.__name__, type='app', subtype='add_node')
docutils.register_node(node)
self.registry.add_translation_handlers(node, **kwargs)
def add_enumerable_node(self, node: type[Element], figtype: str,
title_getter: TitleGetter | None = None, override: bool = False,
**kwargs: tuple[Callable, Callable]) -> None:
"""Register a Docutils node class as a numfig target.
Sphinx numbers the node automatically. And then the users can refer it
using :rst:role:`numref`.
:param node: A node class
:param figtype: The type of enumerable nodes. Each figtype has individual numbering
sequences. As system figtypes, ``figure``, ``table`` and
``code-block`` are defined. It is possible to add custom nodes to
these default figtypes. It is also possible to define new custom
figtype if a new figtype is given.
:param title_getter: A getter function to obtain the title of node. It takes an
instance of the enumerable node, and it must return its title as
string. The title is used to the default title of references for
:rst:role:`ref`. By default, Sphinx searches
``docutils.nodes.caption`` or ``docutils.nodes.title`` from the
node as a title.
:param kwargs: Visitor functions for each builder (same as :meth:`add_node`)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
.. versionadded:: 1.4
"""
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
self.add_node(node, override=override, **kwargs)
def add_directive(self, name: str, cls: type[Directive], override: bool = False) -> None:
"""Register a Docutils directive.
:param name: The name of the directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
For example, a custom directive named ``my-directive`` would be added
like this:
.. code-block:: python
from docutils.parsers.rst import Directive, directives
class MyDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'class': directives.class_option,
'name': directives.unchanged,
}
def run(self):
...
def setup(app):
app.add_directive('my-directive', MyDirective)
For more details, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-directives.html>`__ .
.. versionchanged:: 0.6
Docutils 0.5-style directive classes are now supported.
.. deprecated:: 1.8
Docutils 0.4-style (function based) directives support is deprecated.
.. versionchanged:: 1.8
Add *override* keyword.
"""
logger.debug('[app] adding directive: %r', (name, cls))
if not override and docutils.is_directive_registered(name):
logger.warning(__('directive %r is already registered, it will be overridden'),
name, type='app', subtype='add_directive')
docutils.register_directive(name, cls)
def add_role(self, name: str, role: Any, override: bool = False) -> None:
"""Register a Docutils role.
:param name: The name of role
:param role: A role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
For more details about role functions, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-roles.html>`__ .
.. versionchanged:: 1.8
Add *override* keyword.
"""
logger.debug('[app] adding role: %r', (name, role))
if not override and docutils.is_role_registered(name):
logger.warning(__('role %r is already registered, it will be overridden'),
name, type='app', subtype='add_role')
docutils.register_role(name, role)
def add_generic_role(self, name: str, nodeclass: Any, override: bool = False) -> None:
"""Register a generic Docutils role.
Register a Docutils role that does nothing but wrap its contents in the
node given by *nodeclass*.
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 0.6
.. versionchanged:: 1.8
Add *override* keyword.
"""
# Don't use ``roles.register_generic_role`` because it uses
# ``register_canonical_role``.
logger.debug('[app] adding generic role: %r', (name, nodeclass))
if not override and docutils.is_role_registered(name):
logger.warning(__('role %r is already registered, it will be overridden'),
name, type='app', subtype='add_generic_role')
role = roles.GenericRole(name, nodeclass)
docutils.register_role(name, role) # type: ignore[arg-type]
def add_domain(self, domain: type[Domain], override: bool = False) -> None:
"""Register a domain.
:param domain: A domain class
:param override: If false, do not install it if another domain
is already installed as the same name
If true, unconditionally install the domain.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_domain(domain, override=override)
def add_directive_to_domain(self, domain: str, name: str,
cls: type[Directive], override: bool = False) -> None:
"""Register a Docutils directive in a domain.
Like :meth:`add_directive`, but the directive is added to the domain
named *domain*.
:param domain: The name of target domain
:param name: A name of directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_directive_to_domain(domain, name, cls, override=override)
def add_role_to_domain(self, domain: str, name: str, role: RoleFunction | XRefRole,
override: bool = False) -> None:
"""Register a Docutils role in a domain.
Like :meth:`add_role`, but the role is added to the domain named
*domain*.
:param domain: The name of the target domain
:param name: The name of the role
:param role: The role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_role_to_domain(domain, name, role, override=override)
def add_index_to_domain(self, domain: str, index: type[Index], override: bool = False,
) -> None:
"""Register a custom index for a domain.
Add a custom *index* class to the domain named *domain*.
:param domain: The name of the target domain
:param index: The index class
:param override: If false, do not install it if another index
is already installed as the same name
If true, unconditionally install the index.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_index_to_domain(domain, index)
def add_object_type(self, directivename: str, rolename: str, indextemplate: str = '',
parse_node: Callable | None = None,
ref_nodeclass: type[TextElement] | None = None,
objname: str = '', doc_field_types: Sequence = (),
override: bool = False,
) -> None:
"""Register a new object type.
This method is a very convenient way to add a new :term:`object` type
that can be cross-referenced. It will do this:
- Create a new directive (called *directivename*) for documenting an
object. It will automatically add index entries if *indextemplate*
is nonempty; if given, it must contain exactly one instance of
``%s``. See the example below for how the template will be
interpreted.
- Create a new role (called *rolename*) to cross-reference to these
object descriptions.
- If you provide *parse_node*, it must be a function that takes a
string and a docutils node, and it must populate the node with
children parsed from the string. It must then return the name of the
item to be used in cross-referencing and index entries. See the
:file:`conf.py` file in the source for this documentation for an
example.
- The *objname* (if not given, will default to *directivename*) names
the type of object. It is used when listing objects, e.g. in search
results.
For example, if you have this call in a custom Sphinx extension::
app.add_object_type('directive', 'dir', 'pair: %s; directive')
you can use this markup in your documents::
.. rst:directive:: function
Document a function.
<...>
See also the :rst:dir:`function` directive.
For the directive, an index entry will be generated as if you had prepended ::
.. index:: pair: function; directive
The reference node will be of class ``literal`` (so it will be rendered
in a proportional font, as appropriate for code) unless you give the
*ref_nodeclass* argument, which must be a docutils node class. Most
useful are ``docutils.nodes.emphasis`` or ``docutils.nodes.strong`` --
you can also use ``docutils.nodes.generated`` if you want no further
text decoration. If the text should be treated as literal (e.g. no
smart quote replacement), but not have typewriter styling, use
``sphinx.addnodes.literal_emphasis`` or
``sphinx.addnodes.literal_strong``.
For the role content, you have the same syntactical possibilities as
for standard Sphinx roles (see :ref:`xref-syntax`).
If *override* is True, the given object_type is forcedly installed even if
an object_type having the same name is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_object_type(directivename, rolename, indextemplate, parse_node,
ref_nodeclass, objname, doc_field_types,
override=override)
def add_crossref_type(self, directivename: str, rolename: str, indextemplate: str = '',
ref_nodeclass: type[TextElement] | None = None, objname: str = '',
override: bool = False) -> None:
"""Register a new crossref object type.
This method is very similar to :meth:`~Sphinx.add_object_type` except that the
directive it generates must be empty, and will produce no output.
That means that you can add semantic targets to your sources, and refer
to them using custom roles instead of generic ones (like
:rst:role:`ref`). Example call::
app.add_crossref_type('topic', 'topic', 'single: %s',
docutils.nodes.emphasis)
Example usage::
.. topic:: application API
The application API
-------------------
Some random text here.
See also :topic:`this section <application API>`.
(Of course, the element following the ``topic`` directive needn't be a
section.)
:param override: If false, do not install it if another cross-reference type
is already installed as the same name
If true, unconditionally install the cross-reference type.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_crossref_type(directivename, rolename,
indextemplate, ref_nodeclass, objname,
override=override)
def add_transform(self, transform: type[Transform]) -> None:
"""Register a Docutils transform to be applied after parsing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied after
Sphinx parses a reST document.
:param transform: A transform class
.. list-table:: priority range categories for Sphinx transforms
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 0-99
- Fix invalid nodes by docutils. Translate a doctree.
* - 100-299
- Preparation
* - 300-399
- early
* - 400-699
- main
* - 700-799
- Post processing. Deadline to modify text and referencing.
* - 800-899
- Collect referencing and referenced nodes. Domain processing.
* - 900-999
- Finalize and clean up.
refs: `Transform Priority Range Categories`__
__ https://docutils.sourceforge.io/docs/ref/transforms.html#transform-priority-range-categories
""" # NoQA: E501,RUF100 # Flake8 thinks the URL is too long, Ruff special cases URLs.
self.registry.add_transform(transform)
def add_post_transform(self, transform: type[Transform]) -> None:
"""Register a Docutils transform to be applied before writing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied before
Sphinx writes a document.
:param transform: A transform class
"""
self.registry.add_post_transform(transform)
def add_js_file(self, filename: str | None, priority: int = 500,
loading_method: str | None = None, **kwargs: Any) -> None:
"""Register a JavaScript file to include in the HTML output.
:param filename: The name of a JavaScript file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme, or ``None`` .
The ``None`` value is used to create an inline
``<script>`` tag. See the description of *kwargs*
below.
:param priority: Files are included in ascending order of priority. If
multiple JavaScript files have the same priority,
those files will be included in order of registration.
See list of "priority range for JavaScript files" below.
:param loading_method: The loading method for the JavaScript file.
Either ``'async'`` or ``'defer'`` are allowed.
:param kwargs: Extra keyword arguments are included as attributes of the
``<script>`` tag. If the special keyword argument
``body`` is given, its value will be added as the content
of the ``<script>`` tag.
Example::
app.add_js_file('example.js')
# => <script src="_static/example.js"></script>
app.add_js_file('example.js', loading_method="async")
# => <script src="_static/example.js" async="async"></script>
app.add_js_file(None, body="var myVariable = 'foo';")
# => <script>var myVariable = 'foo';</script>
.. list-table:: priority range for JavaScript files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in JavaScript files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_js_files`
A JavaScript file can be added to the specific HTML page when an extension
calls this method on :event:`html-page-context` event.
.. versionadded:: 0.5
.. versionchanged:: 1.8
Renamed from ``app.add_javascript()``.
And it allows keyword arguments as attributes of script tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a JavaScript file to the specific page.
.. versionchanged:: 4.4
Take loading_method argument. Allow to change the loading method of the
JavaScript file.
"""
if loading_method == 'async':
kwargs['async'] = 'async'
elif loading_method == 'defer':
kwargs['defer'] = 'defer'
self.registry.add_js_file(filename, priority=priority, **kwargs)
with contextlib.suppress(AttributeError):
self.builder.add_js_file( # type: ignore[attr-defined]
filename, priority=priority, **kwargs,
)
def add_css_file(self, filename: str, priority: int = 500, **kwargs: Any) -> None:
"""Register a stylesheet to include in the HTML output.
:param filename: The name of a CSS file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme.
:param priority: Files are included in ascending order of priority. If
multiple CSS files have the same priority,
those files will be included in order of registration.
See list of "priority range for CSS files" below.
:param kwargs: Extra keyword arguments are included as attributes of the
``<link>`` tag.
Example::
app.add_css_file('custom.css')
# => <link rel="stylesheet" href="_static/custom.css" type="text/css" />
app.add_css_file('print.css', media='print')
# => <link rel="stylesheet" href="_static/print.css"
# type="text/css" media="print" />
app.add_css_file('fancy.css', rel='alternate stylesheet', title='fancy')
# => <link rel="alternate stylesheet" href="_static/fancy.css"
# type="text/css" title="fancy" />
.. list-table:: priority range for CSS files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in CSS files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_css_files`
A CSS file can be added to the specific HTML page when an extension calls
this method on :event:`html-page-context` event.
.. versionadded:: 1.0
.. versionchanged:: 1.6
Optional ``alternate`` and/or ``title`` attributes can be supplied
with the arguments *alternate* (a Boolean) and *title* (a string).
The default is no title and *alternate* = ``False``. For
more information, refer to the `documentation
<https://mdn.io/Web/CSS/Alternative_style_sheets>`__.
.. versionchanged:: 1.8
Renamed from ``app.add_stylesheet()``.
And it allows keyword arguments as attributes of link tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a CSS file to the specific page.
"""
logger.debug('[app] adding stylesheet: %r', filename)
self.registry.add_css_files(filename, priority=priority, **kwargs)
with contextlib.suppress(AttributeError):
self.builder.add_css_file( # type: ignore[attr-defined]
filename, priority=priority, **kwargs,
)
def add_latex_package(self, packagename: str, options: str | None = None,
after_hyperref: bool = False) -> None:
r"""Register a package to include in the LaTeX source code.
Add *packagename* to the list of packages that LaTeX source code will
include. If you provide *options*, it will be taken to the `\usepackage`
declaration. If you set *after_hyperref* truthy, the package will be
loaded after ``hyperref`` package.
.. code-block:: python
app.add_latex_package('mypackage')
# => \usepackage{mypackage}
app.add_latex_package('mypackage', 'foo,bar')
# => \usepackage[foo,bar]{mypackage}
.. versionadded:: 1.3
.. versionadded:: 3.1
*after_hyperref* option.
"""
self.registry.add_latex_package(packagename, options, after_hyperref)
def add_lexer(self, alias: str, lexer: type[Lexer]) -> None:
"""Register a new lexer for source code.
Use *lexer* to highlight code blocks with the given language *alias*.
.. versionadded:: 0.6
.. versionchanged:: 2.1
Take a lexer class as an argument.
.. versionchanged:: 4.0
Removed support for lexer instances as an argument.
"""
logger.debug('[app] adding lexer: %r', (alias, lexer))
lexer_classes[alias] = lexer
def add_autodocumenter(self, cls: Any, override: bool = False) -> None:
"""Register a new documenter class for the autodoc extension.
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
extension. It must be a subclass of
:class:`sphinx.ext.autodoc.Documenter`. This allows auto-documenting
new types of objects. See the source of the autodoc module for
examples on how to subclass :class:`~sphinx.ext.autodoc.Documenter`.
If *override* is True, the given *cls* is forcedly installed even if
a documenter having the same name is already installed.
See :ref:`autodoc_ext_tutorial`.
.. versionadded:: 0.6
.. versionchanged:: 2.2
Add *override* keyword.
"""
logger.debug('[app] adding autodocumenter: %r', cls)
from sphinx.ext.autodoc.directive import AutodocDirective
self.registry.add_documenter(cls.objtype, cls)
self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
def add_autodoc_attrgetter(self, typ: type, getter: Callable[[Any, str, Any], Any],
) -> None:
"""Register a new ``getattr``-like function for the autodoc extension.
Add *getter*, which must be a function with an interface compatible to
the :func:`getattr` builtin, as the autodoc attribute getter for
objects that are instances of *typ*. All cases where autodoc needs to
get an attribute of a type are then handled by this function instead of
:func:`getattr`.
.. versionadded:: 0.6
"""
logger.debug('[app] adding autodoc attrgetter: %r', (typ, getter))
self.registry.add_autodoc_attrgetter(typ, getter)
def add_search_language(self, cls: Any) -> None:
"""Register a new language for the HTML search index.
Add *cls*, which must be a subclass of
:class:`sphinx.search.SearchLanguage`, as a support language for
building the HTML full-text search index. The class must have a *lang*
attribute that indicates the language it should be used for. See
:confval:`html_search_language`.
.. versionadded:: 1.1
"""
logger.debug('[app] adding search language: %r', cls)
from sphinx.search import SearchLanguage, languages
assert issubclass(cls, SearchLanguage)
languages[cls.lang] = cls
def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None:
"""Register a suffix of source files.
Same as :confval:`source_suffix`. The users can override this
using the config setting.
:param override: If false, do not install it the same suffix
is already installed.
If true, unconditionally install the suffix.
.. versionadded:: 1.8
"""
self.registry.add_source_suffix(suffix, filetype, override=override)
def add_source_parser(self, parser: type[Parser], override: bool = False) -> None:
"""Register a parser class.
:param override: If false, do not install it if another parser
is already installed for the same suffix.
If true, unconditionally install the parser.
.. versionadded:: 1.4
.. versionchanged:: 1.8
*suffix* argument is deprecated. It only accepts *parser* argument.
Use :meth:`add_source_suffix` API to register suffix instead.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_source_parser(parser, override=override)
def add_env_collector(self, collector: type[EnvironmentCollector]) -> None:
"""Register an environment collector class.
Refer to :ref:`collector-api`.
.. versionadded:: 1.6
"""
logger.debug('[app] adding environment collector: %r', collector)
collector().enable(self)
def add_html_theme(self, name: str, theme_path: str) -> None:
"""Register a HTML Theme.
The *name* is a name of theme, and *theme_path* is a full path to the
theme (refs: :ref:`distribute-your-theme`).
.. versionadded:: 1.6
"""
logger.debug('[app] adding HTML theme: %r, %r', name, theme_path)
self.registry.add_html_theme(name, theme_path)
def add_html_math_renderer(
self,
name: str,
inline_renderers: tuple[Callable, Callable | None] | None = None,
block_renderers: tuple[Callable, Callable | None] | None = None,
) -> None:
"""Register a math renderer for HTML.
The *name* is a name of math renderer. Both *inline_renderers* and
*block_renderers* are used as visitor functions for the HTML writer:
the former for inline math node (``nodes.math``), the latter for
block math node (``nodes.math_block``). Regarding visitor functions,
see :meth:`add_node` for details.
.. versionadded:: 1.8
"""
self.registry.add_html_math_renderer(name, inline_renderers, block_renderers)
def add_message_catalog(self, catalog: str, locale_dir: str) -> None:
"""Register a message catalog.
:param catalog: The name of the catalog
:param locale_dir: The base path of the message catalog
For more details, see :func:`sphinx.locale.get_translation()`.
.. versionadded:: 1.8
"""
locale.init([locale_dir], self.config.language, catalog)
locale.init_console(locale_dir, catalog)
# ---- other methods -------------------------------------------------
def is_parallel_allowed(self, typ: str) -> bool:
"""Check whether parallel processing is allowed or not.
:param typ: A type of processing; ``'read'`` or ``'write'``.
"""
if typ == 'read':
attrname = 'parallel_read_safe'
message_not_declared = __("the %s extension does not declare if it "
"is safe for parallel reading, assuming "
"it isn't - please ask the extension author "
"to check and make it explicit")
message_not_safe = __("the %s extension is not safe for parallel reading")
elif typ == 'write':
attrname = 'parallel_write_safe'
message_not_declared = __("the %s extension does not declare if it "
"is safe for parallel writing, assuming "
"it isn't - please ask the extension author "
"to check and make it explicit")
message_not_safe = __("the %s extension is not safe for parallel writing")
else:
raise ValueError('parallel type %s is not supported' % typ)
for ext in self.extensions.values():
allowed = getattr(ext, attrname, None)
if allowed is None:
logger.warning(message_not_declared, ext.name)
logger.warning(__('doing serial %s'), typ)
return False
elif not allowed:
logger.warning(message_not_safe, ext.name)
logger.warning(__('doing serial %s'), typ)
return False
return True
def set_html_assets_policy(self, policy: Literal['always', 'per_page']) -> None:
"""Set the policy to include assets in HTML pages.
- always: include the assets in all the pages
- per_page: include the assets only in pages where they are used
.. versionadded: 4.1
"""
if policy not in ('always', 'per_page'):
raise ValueError('policy %s is not supported' % policy)
self.registry.html_assets_policy = policy
| (srcdir: 'str | os.PathLike[str]', confdir: 'str | os.PathLike[str] | None', outdir: 'str | os.PathLike[str]', doctreedir: 'str | os.PathLike[str]', buildername: 'str', confoverrides: 'dict | None' = None, status: 'IO | None' = <_io.TextIOWrapper name='<stdout>' mode='w' encoding='utf-8'>, warning: 'IO | None' = <_io.TextIOWrapper name='<stderr>' mode='w' encoding='utf-8'>, freshenv: 'bool' = False, warningiserror: bool = False, tags: 'list[str] | None' = None, verbosity: 'int' = 0, parallel: 'int' = 0, keep_going: 'bool' = False, pdb: 'bool' = False) -> 'None' |
37,566 | sphinx.application | __init__ | null | def __init__(self, srcdir: str | os.PathLike[str], confdir: str | os.PathLike[str] | None,
outdir: str | os.PathLike[str], doctreedir: str | os.PathLike[str],
buildername: str, confoverrides: dict | None = None,
status: IO | None = sys.stdout, warning: IO | None = sys.stderr,
freshenv: bool = False, warningiserror: bool = False,
tags: list[str] | None = None,
verbosity: int = 0, parallel: int = 0, keep_going: bool = False,
pdb: bool = False) -> None:
self.phase = BuildPhase.INITIALIZATION
self.verbosity = verbosity
self.extensions: dict[str, Extension] = {}
self.registry = SphinxComponentRegistry()
# validate provided directories
self.srcdir = _StrPath(srcdir).resolve()
self.outdir = _StrPath(outdir).resolve()
self.doctreedir = _StrPath(doctreedir).resolve()
if not path.isdir(self.srcdir):
raise ApplicationError(__('Cannot find source directory (%s)') %
self.srcdir)
if path.exists(self.outdir) and not path.isdir(self.outdir):
raise ApplicationError(__('Output directory (%s) is not a directory') %
self.outdir)
if self.srcdir == self.outdir:
raise ApplicationError(__('Source directory and destination '
'directory cannot be identical'))
self.parallel = parallel
if status is None:
self._status: IO = StringIO()
self.quiet: bool = True
else:
self._status = status
self.quiet = False
if warning is None:
self._warning: IO = StringIO()
else:
self._warning = warning
self._warncount = 0
self.keep_going = warningiserror and keep_going
if self.keep_going:
self.warningiserror = False
else:
self.warningiserror = warningiserror
self.pdb = pdb
logging.setup(self, self._status, self._warning)
self.events = EventManager(self)
# keep last few messages for traceback
# This will be filled by sphinx.util.logging.LastMessagesWriter
self.messagelog: deque = deque(maxlen=10)
# say hello to the world
logger.info(bold(__('Running Sphinx v%s') % sphinx.__display_version__))
# status code for command-line application
self.statuscode = 0
# read config
self.tags = Tags(tags)
if confdir is None:
# set confdir to srcdir if -C given (!= no confdir); a few pieces
# of code expect a confdir to be set
self.confdir = self.srcdir
self.config = Config({}, confoverrides or {})
else:
self.confdir = _StrPath(confdir).resolve()
self.config = Config.read(self.confdir, confoverrides or {}, self.tags)
# set up translation infrastructure
self._init_i18n()
# check the Sphinx version if requested
if self.config.needs_sphinx and self.config.needs_sphinx > sphinx.__display_version__:
raise VersionRequirementError(
__('This project needs at least Sphinx v%s and therefore cannot '
'be built with this version.') % self.config.needs_sphinx)
# load all built-in extension modules, first-party extension modules,
# and first-party themes
for extension in builtin_extensions:
self.setup_extension(extension)
# load all user-given extension modules
for extension in self.config.extensions:
self.setup_extension(extension)
# preload builder module (before init config values)
self.preload_builder(buildername)
if not path.isdir(outdir):
with progress_message(__('making output directory')):
ensuredir(outdir)
# the config file itself can be an extension
if self.config.setup:
prefix = __('while setting up extension %s:') % "conf.py"
with prefixed_warnings(prefix):
if callable(self.config.setup):
self.config.setup(self)
else:
raise ConfigError(
__("'setup' as currently defined in conf.py isn't a Python callable. "
"Please modify its definition to make it a callable function. "
"This is needed for conf.py to behave as a Sphinx extension."),
)
# Report any warnings for overrides.
self.config._report_override_warnings()
self.events.emit('config-inited', self.config)
# create the project
self.project = Project(self.srcdir, self.config.source_suffix)
# set up the build environment
self.env = self._init_env(freshenv)
# create the builder
self.builder = self.create_builder(buildername)
# build environment post-initialisation, after creating the builder
self._post_init_env()
# set up the builder
self._init_builder()
| (self, srcdir: str | os.PathLike[str], confdir: str | os.PathLike[str] | None, outdir: str | os.PathLike[str], doctreedir: str | os.PathLike[str], buildername: str, confoverrides: Optional[dict] = None, status: typing.IO | None = <_io.TextIOWrapper name='<stdout>' mode='w' encoding='utf-8'>, warning: typing.IO | None = <_io.TextIOWrapper name='<stderr>' mode='w' encoding='utf-8'>, freshenv: bool = False, warningiserror: bool = False, tags: Optional[list[str]] = None, verbosity: int = 0, parallel: int = 0, keep_going: bool = False, pdb: bool = False) -> NoneType |
37,567 | sphinx.application | _create_fresh_env | null | def _create_fresh_env(self) -> BuildEnvironment:
env = BuildEnvironment(self)
self._fresh_env_used = True
return env
| (self) -> sphinx.environment.BuildEnvironment |
37,568 | sphinx.application | _init_builder | null | def _init_builder(self) -> None:
self.builder.init()
self.events.emit('builder-inited')
| (self) -> NoneType |
37,569 | sphinx.application | _init_env | null | def _init_env(self, freshenv: bool) -> BuildEnvironment:
filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
if freshenv or not os.path.exists(filename):
return self._create_fresh_env()
else:
return self._load_existing_env(filename)
| (self, freshenv: bool) -> sphinx.environment.BuildEnvironment |
37,570 | sphinx.application | _init_i18n | Load translated strings from the configured localedirs if enabled in
the configuration.
| def _init_i18n(self) -> None:
"""Load translated strings from the configured localedirs if enabled in
the configuration.
"""
if self.config.language == 'en':
self.translator, _ = locale.init([], None)
else:
logger.info(bold(__('loading translations [%s]... ') % self.config.language),
nonl=True)
# compile mo files if sphinx.po file in user locale directories are updated
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
for catalog in repo.catalogs:
if catalog.domain == 'sphinx' and catalog.is_outdated():
catalog.write_mo(self.config.language,
self.config.gettext_allow_fuzzy_translations)
locale_dirs: list[str | None] = list(repo.locale_dirs)
locale_dirs += [None]
locale_dirs += [path.join(package_dir, 'locale')]
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
if has_translation:
logger.info(__('done'))
else:
logger.info(__('not available for built-in messages'))
| (self) -> NoneType |
37,571 | sphinx.application | _load_existing_env | null | """Sphinx application class and extensibility interface.
Gracefully adapted from the TextPress system by Armin.
"""
from __future__ import annotations
import contextlib
import os
import pickle
import sys
from collections import deque
from collections.abc import Collection, Sequence # NoQA: TCH003
from io import StringIO
from os import path
from typing import IO, TYPE_CHECKING, Any, Callable, Literal
from docutils.nodes import TextElement # NoQA: TCH002
from docutils.parsers.rst import Directive, roles
from docutils.transforms import Transform # NoQA: TCH002
from pygments.lexer import Lexer # NoQA: TCH002
import sphinx
from sphinx import locale, package_dir
from sphinx.config import ENUM, Config, _ConfigRebuild
from sphinx.environment import BuildEnvironment
from sphinx.errors import ApplicationError, ConfigError, VersionRequirementError
from sphinx.events import EventManager
from sphinx.highlighting import lexer_classes
from sphinx.locale import __
from sphinx.project import Project
from sphinx.registry import SphinxComponentRegistry
from sphinx.util import docutils, logging
from sphinx.util._pathlib import _StrPath
from sphinx.util.build_phase import BuildPhase
from sphinx.util.console import bold
from sphinx.util.display import progress_message
from sphinx.util.i18n import CatalogRepository
from sphinx.util.logging import prefixed_warnings
from sphinx.util.osutil import ensuredir, relpath
from sphinx.util.tags import Tags
if TYPE_CHECKING:
from docutils import nodes
from docutils.nodes import Element
from docutils.parsers import Parser
from sphinx.builders import Builder
from sphinx.domains import Domain, Index
from sphinx.environment.collectors import EnvironmentCollector
from sphinx.extension import Extension
from sphinx.roles import XRefRole
from sphinx.theming import Theme
from sphinx.util.typing import RoleFunction, TitleGetter
builtin_extensions: tuple[str, ...] = (
'sphinx.addnodes',
'sphinx.builders.changes',
'sphinx.builders.epub3',
'sphinx.builders.dirhtml',
'sphinx.builders.dummy',
'sphinx.builders.gettext',
'sphinx.builders.html',
'sphinx.builders.latex',
'sphinx.builders.linkcheck',
'sphinx.builders.manpage',
'sphinx.builders.singlehtml',
'sphinx.builders.texinfo',
'sphinx.builders.text',
'sphinx.builders.xml',
'sphinx.config',
'sphinx.domains.c',
'sphinx.domains.changeset',
'sphinx.domains.citation',
'sphinx.domains.cpp',
'sphinx.domains.index',
'sphinx.domains.javascript',
'sphinx.domains.math',
'sphinx.domains.python',
'sphinx.domains.rst',
'sphinx.domains.std',
'sphinx.directives',
'sphinx.directives.code',
'sphinx.directives.other',
'sphinx.directives.patches',
'sphinx.extension',
'sphinx.parsers',
'sphinx.registry',
'sphinx.roles',
'sphinx.transforms',
'sphinx.transforms.compact_bullet_list',
'sphinx.transforms.i18n',
'sphinx.transforms.references',
'sphinx.transforms.post_transforms',
'sphinx.transforms.post_transforms.code',
'sphinx.transforms.post_transforms.images',
'sphinx.versioning',
# collectors should be loaded by specific order
'sphinx.environment.collectors.dependencies',
'sphinx.environment.collectors.asset',
'sphinx.environment.collectors.metadata',
'sphinx.environment.collectors.title',
'sphinx.environment.collectors.toctree',
)
_first_party_extensions = (
# 1st party extensions
'sphinxcontrib.applehelp',
'sphinxcontrib.devhelp',
'sphinxcontrib.htmlhelp',
'sphinxcontrib.serializinghtml',
'sphinxcontrib.qthelp',
)
_first_party_themes = (
# Alabaster is loaded automatically to be used as the default theme
'alabaster',
)
builtin_extensions += _first_party_themes
builtin_extensions += _first_party_extensions
ENV_PICKLE_FILENAME = 'environment.pickle'
logger = logging.getLogger(__name__)
class Sphinx:
"""The main application class and extensibility interface.
:ivar srcdir: Directory containing source.
:ivar confdir: Directory containing ``conf.py``.
:ivar doctreedir: Directory for storing pickled doctrees.
:ivar outdir: Directory for storing build documents.
"""
warningiserror: bool
_warncount: int
def __init__(self, srcdir: str | os.PathLike[str], confdir: str | os.PathLike[str] | None,
outdir: str | os.PathLike[str], doctreedir: str | os.PathLike[str],
buildername: str, confoverrides: dict | None = None,
status: IO | None = sys.stdout, warning: IO | None = sys.stderr,
freshenv: bool = False, warningiserror: bool = False,
tags: list[str] | None = None,
verbosity: int = 0, parallel: int = 0, keep_going: bool = False,
pdb: bool = False) -> None:
self.phase = BuildPhase.INITIALIZATION
self.verbosity = verbosity
self.extensions: dict[str, Extension] = {}
self.registry = SphinxComponentRegistry()
# validate provided directories
self.srcdir = _StrPath(srcdir).resolve()
self.outdir = _StrPath(outdir).resolve()
self.doctreedir = _StrPath(doctreedir).resolve()
if not path.isdir(self.srcdir):
raise ApplicationError(__('Cannot find source directory (%s)') %
self.srcdir)
if path.exists(self.outdir) and not path.isdir(self.outdir):
raise ApplicationError(__('Output directory (%s) is not a directory') %
self.outdir)
if self.srcdir == self.outdir:
raise ApplicationError(__('Source directory and destination '
'directory cannot be identical'))
self.parallel = parallel
if status is None:
self._status: IO = StringIO()
self.quiet: bool = True
else:
self._status = status
self.quiet = False
if warning is None:
self._warning: IO = StringIO()
else:
self._warning = warning
self._warncount = 0
self.keep_going = warningiserror and keep_going
if self.keep_going:
self.warningiserror = False
else:
self.warningiserror = warningiserror
self.pdb = pdb
logging.setup(self, self._status, self._warning)
self.events = EventManager(self)
# keep last few messages for traceback
# This will be filled by sphinx.util.logging.LastMessagesWriter
self.messagelog: deque = deque(maxlen=10)
# say hello to the world
logger.info(bold(__('Running Sphinx v%s') % sphinx.__display_version__))
# status code for command-line application
self.statuscode = 0
# read config
self.tags = Tags(tags)
if confdir is None:
# set confdir to srcdir if -C given (!= no confdir); a few pieces
# of code expect a confdir to be set
self.confdir = self.srcdir
self.config = Config({}, confoverrides or {})
else:
self.confdir = _StrPath(confdir).resolve()
self.config = Config.read(self.confdir, confoverrides or {}, self.tags)
# set up translation infrastructure
self._init_i18n()
# check the Sphinx version if requested
if self.config.needs_sphinx and self.config.needs_sphinx > sphinx.__display_version__:
raise VersionRequirementError(
__('This project needs at least Sphinx v%s and therefore cannot '
'be built with this version.') % self.config.needs_sphinx)
# load all built-in extension modules, first-party extension modules,
# and first-party themes
for extension in builtin_extensions:
self.setup_extension(extension)
# load all user-given extension modules
for extension in self.config.extensions:
self.setup_extension(extension)
# preload builder module (before init config values)
self.preload_builder(buildername)
if not path.isdir(outdir):
with progress_message(__('making output directory')):
ensuredir(outdir)
# the config file itself can be an extension
if self.config.setup:
prefix = __('while setting up extension %s:') % "conf.py"
with prefixed_warnings(prefix):
if callable(self.config.setup):
self.config.setup(self)
else:
raise ConfigError(
__("'setup' as currently defined in conf.py isn't a Python callable. "
"Please modify its definition to make it a callable function. "
"This is needed for conf.py to behave as a Sphinx extension."),
)
# Report any warnings for overrides.
self.config._report_override_warnings()
self.events.emit('config-inited', self.config)
# create the project
self.project = Project(self.srcdir, self.config.source_suffix)
# set up the build environment
self.env = self._init_env(freshenv)
# create the builder
self.builder = self.create_builder(buildername)
# build environment post-initialisation, after creating the builder
self._post_init_env()
# set up the builder
self._init_builder()
def _init_i18n(self) -> None:
"""Load translated strings from the configured localedirs if enabled in
the configuration.
"""
if self.config.language == 'en':
self.translator, _ = locale.init([], None)
else:
logger.info(bold(__('loading translations [%s]... ') % self.config.language),
nonl=True)
# compile mo files if sphinx.po file in user locale directories are updated
repo = CatalogRepository(self.srcdir, self.config.locale_dirs,
self.config.language, self.config.source_encoding)
for catalog in repo.catalogs:
if catalog.domain == 'sphinx' and catalog.is_outdated():
catalog.write_mo(self.config.language,
self.config.gettext_allow_fuzzy_translations)
locale_dirs: list[str | None] = list(repo.locale_dirs)
locale_dirs += [None]
locale_dirs += [path.join(package_dir, 'locale')]
self.translator, has_translation = locale.init(locale_dirs, self.config.language)
if has_translation:
logger.info(__('done'))
else:
logger.info(__('not available for built-in messages'))
def _init_env(self, freshenv: bool) -> BuildEnvironment:
filename = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
if freshenv or not os.path.exists(filename):
return self._create_fresh_env()
else:
return self._load_existing_env(filename)
def _create_fresh_env(self) -> BuildEnvironment:
env = BuildEnvironment(self)
self._fresh_env_used = True
return env
@progress_message(__('loading pickled environment'))
def _load_existing_env(self, filename: str) -> BuildEnvironment:
try:
with open(filename, 'rb') as f:
env = pickle.load(f)
env.setup(self)
self._fresh_env_used = False
except Exception as err:
logger.info(__('failed: %s'), err)
env = self._create_fresh_env()
return env
def _post_init_env(self) -> None:
if self._fresh_env_used:
self.env.find_files(self.config, self.builder)
del self._fresh_env_used
def preload_builder(self, name: str) -> None:
self.registry.preload_builder(self, name)
def create_builder(self, name: str) -> Builder:
if name is None:
logger.info(__('No builder selected, using default: html'))
name = 'html'
return self.registry.create_builder(self, name, self.env)
def _init_builder(self) -> None:
self.builder.init()
self.events.emit('builder-inited')
# ---- main "build" method -------------------------------------------------
def build(self, force_all: bool = False, filenames: list[str] | None = None) -> None:
self.phase = BuildPhase.READING
try:
if force_all:
self.builder.build_all()
elif filenames:
self.builder.build_specific(filenames)
else:
self.builder.build_update()
self.events.emit('build-finished', None)
except Exception as err:
# delete the saved env to force a fresh build next time
envfile = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
if path.isfile(envfile):
os.unlink(envfile)
self.events.emit('build-finished', err)
raise
if self._warncount and self.keep_going:
self.statuscode = 1
status = (__('succeeded') if self.statuscode == 0
else __('finished with problems'))
if self._warncount:
if self.warningiserror:
if self._warncount == 1:
msg = __('build %s, %s warning (with warnings treated as errors).')
else:
msg = __('build %s, %s warnings (with warnings treated as errors).')
else:
if self._warncount == 1:
msg = __('build %s, %s warning.')
else:
msg = __('build %s, %s warnings.')
logger.info(bold(msg % (status, self._warncount)))
else:
logger.info(bold(__('build %s.') % status))
if self.statuscode == 0 and self.builder.epilog:
logger.info('')
logger.info(self.builder.epilog % {
'outdir': relpath(self.outdir),
'project': self.config.project,
})
self.builder.cleanup()
# ---- general extensibility interface -------------------------------------
def setup_extension(self, extname: str) -> None:
"""Import and setup a Sphinx extension module.
Load the extension given by the module *name*. Use this if your
extension needs the features provided by another extension. No-op if
called twice.
"""
logger.debug('[app] setting up extension: %r', extname)
self.registry.load_extension(self, extname)
@staticmethod
def require_sphinx(version: tuple[int, int] | str) -> None:
"""Check the Sphinx version if requested.
Compare *version* with the version of the running Sphinx, and abort the
build when it is too old.
:param version: The required version in the form of ``major.minor`` or
``(major, minor)``.
.. versionadded:: 1.0
.. versionchanged:: 7.1
Type of *version* now allows ``(major, minor)`` form.
"""
if isinstance(version, tuple):
major, minor = version
else:
major, minor = map(int, version.split('.')[:2])
if (major, minor) > sphinx.version_info[:2]:
req = f'{major}.{minor}'
raise VersionRequirementError(req)
# event interface
def connect(self, event: str, callback: Callable, priority: int = 500) -> int:
"""Register *callback* to be called when *event* is emitted.
For details on available core events and the arguments of callback
functions, please see :ref:`events`.
:param event: The name of target event
:param callback: Callback function for the event
:param priority: The priority of the callback. The callbacks will be invoked
in order of *priority* (ascending).
:return: A listener ID. It can be used for :meth:`disconnect`.
.. versionchanged:: 3.0
Support *priority*
"""
listener_id = self.events.connect(event, callback, priority)
logger.debug('[app] connecting event %r (%d): %r [id=%s]',
event, priority, callback, listener_id)
return listener_id
def disconnect(self, listener_id: int) -> None:
"""Unregister callback by *listener_id*.
:param listener_id: A listener_id that :meth:`connect` returns
"""
logger.debug('[app] disconnecting event: [id=%s]', listener_id)
self.events.disconnect(listener_id)
def emit(self, event: str, *args: Any,
allowed_exceptions: tuple[type[Exception], ...] = ()) -> list:
"""Emit *event* and pass *arguments* to the callback functions.
Return the return values of all callbacks as a list. Do not emit core
Sphinx events in extensions!
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
"""
return self.events.emit(event, *args, allowed_exceptions=allowed_exceptions)
def emit_firstresult(self, event: str, *args: Any,
allowed_exceptions: tuple[type[Exception], ...] = ()) -> Any:
"""Emit *event* and pass *arguments* to the callback functions.
Return the result of the first callback that doesn't return ``None``.
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionadded:: 0.5
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
"""
return self.events.emit_firstresult(event, *args,
allowed_exceptions=allowed_exceptions)
# registering addon parts
def add_builder(self, builder: type[Builder], override: bool = False) -> None:
"""Register a new builder.
:param builder: A builder class
:param override: If true, install the builder forcedly even if another builder
is already installed as the same name
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_builder(builder, override=override)
# TODO(stephenfin): Describe 'types' parameter
def add_config_value(self, name: str, default: Any, rebuild: _ConfigRebuild,
types: type | Collection[type] | ENUM = ()) -> None:
"""Register a configuration value.
This is necessary for Sphinx to recognize new values and set default
values accordingly.
:param name: The name of the configuration value. It is recommended to be prefixed
with the extension name (ex. ``html_logo``, ``epub_title``)
:param default: The default value of the configuration.
:param rebuild: The condition of rebuild. It must be one of those values:
* ``'env'`` if a change in the setting only takes effect when a
document is parsed -- this means that the whole environment must be
rebuilt.
* ``'html'`` if a change in the setting needs a full rebuild of HTML
documents.
* ``''`` if a change in the setting will not need any special rebuild.
:param types: The type of configuration value. A list of types can be specified. For
example, ``[str]`` is used to describe a configuration that takes string
value.
.. versionchanged:: 0.4
If the *default* value is a callable, it will be called with the
config object as its argument in order to get the default value.
This can be used to implement config values whose default depends on
other values.
.. versionchanged:: 0.6
Changed *rebuild* from a simple boolean (equivalent to ``''`` or
``'env'``) to a string. However, booleans are still accepted and
converted internally.
"""
logger.debug('[app] adding config value: %r', (name, default, rebuild, types))
self.config.add(name, default, rebuild, types)
def add_event(self, name: str) -> None:
"""Register an event called *name*.
This is needed to be able to emit it.
:param name: The name of the event
"""
logger.debug('[app] adding event: %r', name)
self.events.add(name)
def set_translator(self, name: str, translator_class: type[nodes.NodeVisitor],
override: bool = False) -> None:
"""Register or override a Docutils translator class.
This is used to register a custom output translator or to replace a
builtin translator. This allows extensions to use a custom translator
and define custom nodes for the translator (see :meth:`add_node`).
:param name: The name of the builder for the translator
:param translator_class: A translator class
:param override: If true, install the translator forcedly even if another translator
is already installed as the same name
.. versionadded:: 1.3
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_translator(name, translator_class, override=override)
def add_node(self, node: type[Element], override: bool = False,
**kwargs: tuple[Callable, Callable | None]) -> None:
"""Register a Docutils node class.
This is necessary for Docutils internals. It may also be used in the
future to validate nodes in the parsed documents.
:param node: A node class
:param kwargs: Visitor functions for each builder (see below)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
Node visitor functions for the Sphinx HTML, LaTeX, text and manpage
writers can be given as keyword arguments: the keyword should be one or
more of ``'html'``, ``'latex'``, ``'text'``, ``'man'``, ``'texinfo'``
or any other supported translators, the value a 2-tuple of ``(visit,
depart)`` methods. ``depart`` can be ``None`` if the ``visit``
function raises :exc:`docutils.nodes.SkipNode`. Example:
.. code-block:: python
class math(docutils.nodes.Element): pass
def visit_math_html(self, node):
self.body.append(self.starttag(node, 'math'))
def depart_math_html(self, node):
self.body.append('</math>')
app.add_node(math, html=(visit_math_html, depart_math_html))
Obviously, translators for which you don't specify visitor methods will
choke on the node when encountered in a document to translate.
.. versionchanged:: 0.5
Added the support for keyword arguments giving visit functions.
"""
logger.debug('[app] adding node: %r', (node, kwargs))
if not override and docutils.is_node_registered(node):
logger.warning(__('node class %r is already registered, '
'its visitors will be overridden'),
node.__name__, type='app', subtype='add_node')
docutils.register_node(node)
self.registry.add_translation_handlers(node, **kwargs)
def add_enumerable_node(self, node: type[Element], figtype: str,
title_getter: TitleGetter | None = None, override: bool = False,
**kwargs: tuple[Callable, Callable]) -> None:
"""Register a Docutils node class as a numfig target.
Sphinx numbers the node automatically. And then the users can refer it
using :rst:role:`numref`.
:param node: A node class
:param figtype: The type of enumerable nodes. Each figtype has individual numbering
sequences. As system figtypes, ``figure``, ``table`` and
``code-block`` are defined. It is possible to add custom nodes to
these default figtypes. It is also possible to define new custom
figtype if a new figtype is given.
:param title_getter: A getter function to obtain the title of node. It takes an
instance of the enumerable node, and it must return its title as
string. The title is used to the default title of references for
:rst:role:`ref`. By default, Sphinx searches
``docutils.nodes.caption`` or ``docutils.nodes.title`` from the
node as a title.
:param kwargs: Visitor functions for each builder (same as :meth:`add_node`)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
.. versionadded:: 1.4
"""
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
self.add_node(node, override=override, **kwargs)
def add_directive(self, name: str, cls: type[Directive], override: bool = False) -> None:
"""Register a Docutils directive.
:param name: The name of the directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
For example, a custom directive named ``my-directive`` would be added
like this:
.. code-block:: python
from docutils.parsers.rst import Directive, directives
class MyDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'class': directives.class_option,
'name': directives.unchanged,
}
def run(self):
...
def setup(app):
app.add_directive('my-directive', MyDirective)
For more details, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-directives.html>`__ .
.. versionchanged:: 0.6
Docutils 0.5-style directive classes are now supported.
.. deprecated:: 1.8
Docutils 0.4-style (function based) directives support is deprecated.
.. versionchanged:: 1.8
Add *override* keyword.
"""
logger.debug('[app] adding directive: %r', (name, cls))
if not override and docutils.is_directive_registered(name):
logger.warning(__('directive %r is already registered, it will be overridden'),
name, type='app', subtype='add_directive')
docutils.register_directive(name, cls)
def add_role(self, name: str, role: Any, override: bool = False) -> None:
"""Register a Docutils role.
:param name: The name of role
:param role: A role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
For more details about role functions, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-roles.html>`__ .
.. versionchanged:: 1.8
Add *override* keyword.
"""
logger.debug('[app] adding role: %r', (name, role))
if not override and docutils.is_role_registered(name):
logger.warning(__('role %r is already registered, it will be overridden'),
name, type='app', subtype='add_role')
docutils.register_role(name, role)
def add_generic_role(self, name: str, nodeclass: Any, override: bool = False) -> None:
"""Register a generic Docutils role.
Register a Docutils role that does nothing but wrap its contents in the
node given by *nodeclass*.
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 0.6
.. versionchanged:: 1.8
Add *override* keyword.
"""
# Don't use ``roles.register_generic_role`` because it uses
# ``register_canonical_role``.
logger.debug('[app] adding generic role: %r', (name, nodeclass))
if not override and docutils.is_role_registered(name):
logger.warning(__('role %r is already registered, it will be overridden'),
name, type='app', subtype='add_generic_role')
role = roles.GenericRole(name, nodeclass)
docutils.register_role(name, role) # type: ignore[arg-type]
def add_domain(self, domain: type[Domain], override: bool = False) -> None:
"""Register a domain.
:param domain: A domain class
:param override: If false, do not install it if another domain
is already installed as the same name
If true, unconditionally install the domain.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_domain(domain, override=override)
def add_directive_to_domain(self, domain: str, name: str,
cls: type[Directive], override: bool = False) -> None:
"""Register a Docutils directive in a domain.
Like :meth:`add_directive`, but the directive is added to the domain
named *domain*.
:param domain: The name of target domain
:param name: A name of directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_directive_to_domain(domain, name, cls, override=override)
def add_role_to_domain(self, domain: str, name: str, role: RoleFunction | XRefRole,
override: bool = False) -> None:
"""Register a Docutils role in a domain.
Like :meth:`add_role`, but the role is added to the domain named
*domain*.
:param domain: The name of the target domain
:param name: The name of the role
:param role: The role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_role_to_domain(domain, name, role, override=override)
def add_index_to_domain(self, domain: str, index: type[Index], override: bool = False,
) -> None:
"""Register a custom index for a domain.
Add a custom *index* class to the domain named *domain*.
:param domain: The name of the target domain
:param index: The index class
:param override: If false, do not install it if another index
is already installed as the same name
If true, unconditionally install the index.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_index_to_domain(domain, index)
def add_object_type(self, directivename: str, rolename: str, indextemplate: str = '',
parse_node: Callable | None = None,
ref_nodeclass: type[TextElement] | None = None,
objname: str = '', doc_field_types: Sequence = (),
override: bool = False,
) -> None:
"""Register a new object type.
This method is a very convenient way to add a new :term:`object` type
that can be cross-referenced. It will do this:
- Create a new directive (called *directivename*) for documenting an
object. It will automatically add index entries if *indextemplate*
is nonempty; if given, it must contain exactly one instance of
``%s``. See the example below for how the template will be
interpreted.
- Create a new role (called *rolename*) to cross-reference to these
object descriptions.
- If you provide *parse_node*, it must be a function that takes a
string and a docutils node, and it must populate the node with
children parsed from the string. It must then return the name of the
item to be used in cross-referencing and index entries. See the
:file:`conf.py` file in the source for this documentation for an
example.
- The *objname* (if not given, will default to *directivename*) names
the type of object. It is used when listing objects, e.g. in search
results.
For example, if you have this call in a custom Sphinx extension::
app.add_object_type('directive', 'dir', 'pair: %s; directive')
you can use this markup in your documents::
.. rst:directive:: function
Document a function.
<...>
See also the :rst:dir:`function` directive.
For the directive, an index entry will be generated as if you had prepended ::
.. index:: pair: function; directive
The reference node will be of class ``literal`` (so it will be rendered
in a proportional font, as appropriate for code) unless you give the
*ref_nodeclass* argument, which must be a docutils node class. Most
useful are ``docutils.nodes.emphasis`` or ``docutils.nodes.strong`` --
you can also use ``docutils.nodes.generated`` if you want no further
text decoration. If the text should be treated as literal (e.g. no
smart quote replacement), but not have typewriter styling, use
``sphinx.addnodes.literal_emphasis`` or
``sphinx.addnodes.literal_strong``.
For the role content, you have the same syntactical possibilities as
for standard Sphinx roles (see :ref:`xref-syntax`).
If *override* is True, the given object_type is forcedly installed even if
an object_type having the same name is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_object_type(directivename, rolename, indextemplate, parse_node,
ref_nodeclass, objname, doc_field_types,
override=override)
def add_crossref_type(self, directivename: str, rolename: str, indextemplate: str = '',
ref_nodeclass: type[TextElement] | None = None, objname: str = '',
override: bool = False) -> None:
"""Register a new crossref object type.
This method is very similar to :meth:`~Sphinx.add_object_type` except that the
directive it generates must be empty, and will produce no output.
That means that you can add semantic targets to your sources, and refer
to them using custom roles instead of generic ones (like
:rst:role:`ref`). Example call::
app.add_crossref_type('topic', 'topic', 'single: %s',
docutils.nodes.emphasis)
Example usage::
.. topic:: application API
The application API
-------------------
Some random text here.
See also :topic:`this section <application API>`.
(Of course, the element following the ``topic`` directive needn't be a
section.)
:param override: If false, do not install it if another cross-reference type
is already installed as the same name
If true, unconditionally install the cross-reference type.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_crossref_type(directivename, rolename,
indextemplate, ref_nodeclass, objname,
override=override)
def add_transform(self, transform: type[Transform]) -> None:
"""Register a Docutils transform to be applied after parsing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied after
Sphinx parses a reST document.
:param transform: A transform class
.. list-table:: priority range categories for Sphinx transforms
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 0-99
- Fix invalid nodes by docutils. Translate a doctree.
* - 100-299
- Preparation
* - 300-399
- early
* - 400-699
- main
* - 700-799
- Post processing. Deadline to modify text and referencing.
* - 800-899
- Collect referencing and referenced nodes. Domain processing.
* - 900-999
- Finalize and clean up.
refs: `Transform Priority Range Categories`__
__ https://docutils.sourceforge.io/docs/ref/transforms.html#transform-priority-range-categories
""" # NoQA: E501,RUF100 # Flake8 thinks the URL is too long, Ruff special cases URLs.
self.registry.add_transform(transform)
def add_post_transform(self, transform: type[Transform]) -> None:
"""Register a Docutils transform to be applied before writing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied before
Sphinx writes a document.
:param transform: A transform class
"""
self.registry.add_post_transform(transform)
def add_js_file(self, filename: str | None, priority: int = 500,
loading_method: str | None = None, **kwargs: Any) -> None:
"""Register a JavaScript file to include in the HTML output.
:param filename: The name of a JavaScript file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme, or ``None`` .
The ``None`` value is used to create an inline
``<script>`` tag. See the description of *kwargs*
below.
:param priority: Files are included in ascending order of priority. If
multiple JavaScript files have the same priority,
those files will be included in order of registration.
See list of "priority range for JavaScript files" below.
:param loading_method: The loading method for the JavaScript file.
Either ``'async'`` or ``'defer'`` are allowed.
:param kwargs: Extra keyword arguments are included as attributes of the
``<script>`` tag. If the special keyword argument
``body`` is given, its value will be added as the content
of the ``<script>`` tag.
Example::
app.add_js_file('example.js')
# => <script src="_static/example.js"></script>
app.add_js_file('example.js', loading_method="async")
# => <script src="_static/example.js" async="async"></script>
app.add_js_file(None, body="var myVariable = 'foo';")
# => <script>var myVariable = 'foo';</script>
.. list-table:: priority range for JavaScript files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in JavaScript files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_js_files`
A JavaScript file can be added to the specific HTML page when an extension
calls this method on :event:`html-page-context` event.
.. versionadded:: 0.5
.. versionchanged:: 1.8
Renamed from ``app.add_javascript()``.
And it allows keyword arguments as attributes of script tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a JavaScript file to the specific page.
.. versionchanged:: 4.4
Take loading_method argument. Allow to change the loading method of the
JavaScript file.
"""
if loading_method == 'async':
kwargs['async'] = 'async'
elif loading_method == 'defer':
kwargs['defer'] = 'defer'
self.registry.add_js_file(filename, priority=priority, **kwargs)
with contextlib.suppress(AttributeError):
self.builder.add_js_file( # type: ignore[attr-defined]
filename, priority=priority, **kwargs,
)
def add_css_file(self, filename: str, priority: int = 500, **kwargs: Any) -> None:
"""Register a stylesheet to include in the HTML output.
:param filename: The name of a CSS file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme.
:param priority: Files are included in ascending order of priority. If
multiple CSS files have the same priority,
those files will be included in order of registration.
See list of "priority range for CSS files" below.
:param kwargs: Extra keyword arguments are included as attributes of the
``<link>`` tag.
Example::
app.add_css_file('custom.css')
# => <link rel="stylesheet" href="_static/custom.css" type="text/css" />
app.add_css_file('print.css', media='print')
# => <link rel="stylesheet" href="_static/print.css"
# type="text/css" media="print" />
app.add_css_file('fancy.css', rel='alternate stylesheet', title='fancy')
# => <link rel="alternate stylesheet" href="_static/fancy.css"
# type="text/css" title="fancy" />
.. list-table:: priority range for CSS files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in CSS files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_css_files`
A CSS file can be added to the specific HTML page when an extension calls
this method on :event:`html-page-context` event.
.. versionadded:: 1.0
.. versionchanged:: 1.6
Optional ``alternate`` and/or ``title`` attributes can be supplied
with the arguments *alternate* (a Boolean) and *title* (a string).
The default is no title and *alternate* = ``False``. For
more information, refer to the `documentation
<https://mdn.io/Web/CSS/Alternative_style_sheets>`__.
.. versionchanged:: 1.8
Renamed from ``app.add_stylesheet()``.
And it allows keyword arguments as attributes of link tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a CSS file to the specific page.
"""
logger.debug('[app] adding stylesheet: %r', filename)
self.registry.add_css_files(filename, priority=priority, **kwargs)
with contextlib.suppress(AttributeError):
self.builder.add_css_file( # type: ignore[attr-defined]
filename, priority=priority, **kwargs,
)
def add_latex_package(self, packagename: str, options: str | None = None,
after_hyperref: bool = False) -> None:
r"""Register a package to include in the LaTeX source code.
Add *packagename* to the list of packages that LaTeX source code will
include. If you provide *options*, it will be taken to the `\usepackage`
declaration. If you set *after_hyperref* truthy, the package will be
loaded after ``hyperref`` package.
.. code-block:: python
app.add_latex_package('mypackage')
# => \usepackage{mypackage}
app.add_latex_package('mypackage', 'foo,bar')
# => \usepackage[foo,bar]{mypackage}
.. versionadded:: 1.3
.. versionadded:: 3.1
*after_hyperref* option.
"""
self.registry.add_latex_package(packagename, options, after_hyperref)
def add_lexer(self, alias: str, lexer: type[Lexer]) -> None:
"""Register a new lexer for source code.
Use *lexer* to highlight code blocks with the given language *alias*.
.. versionadded:: 0.6
.. versionchanged:: 2.1
Take a lexer class as an argument.
.. versionchanged:: 4.0
Removed support for lexer instances as an argument.
"""
logger.debug('[app] adding lexer: %r', (alias, lexer))
lexer_classes[alias] = lexer
def add_autodocumenter(self, cls: Any, override: bool = False) -> None:
"""Register a new documenter class for the autodoc extension.
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
extension. It must be a subclass of
:class:`sphinx.ext.autodoc.Documenter`. This allows auto-documenting
new types of objects. See the source of the autodoc module for
examples on how to subclass :class:`~sphinx.ext.autodoc.Documenter`.
If *override* is True, the given *cls* is forcedly installed even if
a documenter having the same name is already installed.
See :ref:`autodoc_ext_tutorial`.
.. versionadded:: 0.6
.. versionchanged:: 2.2
Add *override* keyword.
"""
logger.debug('[app] adding autodocumenter: %r', cls)
from sphinx.ext.autodoc.directive import AutodocDirective
self.registry.add_documenter(cls.objtype, cls)
self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
def add_autodoc_attrgetter(self, typ: type, getter: Callable[[Any, str, Any], Any],
) -> None:
"""Register a new ``getattr``-like function for the autodoc extension.
Add *getter*, which must be a function with an interface compatible to
the :func:`getattr` builtin, as the autodoc attribute getter for
objects that are instances of *typ*. All cases where autodoc needs to
get an attribute of a type are then handled by this function instead of
:func:`getattr`.
.. versionadded:: 0.6
"""
logger.debug('[app] adding autodoc attrgetter: %r', (typ, getter))
self.registry.add_autodoc_attrgetter(typ, getter)
def add_search_language(self, cls: Any) -> None:
"""Register a new language for the HTML search index.
Add *cls*, which must be a subclass of
:class:`sphinx.search.SearchLanguage`, as a support language for
building the HTML full-text search index. The class must have a *lang*
attribute that indicates the language it should be used for. See
:confval:`html_search_language`.
.. versionadded:: 1.1
"""
logger.debug('[app] adding search language: %r', cls)
from sphinx.search import SearchLanguage, languages
assert issubclass(cls, SearchLanguage)
languages[cls.lang] = cls
def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None:
"""Register a suffix of source files.
Same as :confval:`source_suffix`. The users can override this
using the config setting.
:param override: If false, do not install it the same suffix
is already installed.
If true, unconditionally install the suffix.
.. versionadded:: 1.8
"""
self.registry.add_source_suffix(suffix, filetype, override=override)
def add_source_parser(self, parser: type[Parser], override: bool = False) -> None:
"""Register a parser class.
:param override: If false, do not install it if another parser
is already installed for the same suffix.
If true, unconditionally install the parser.
.. versionadded:: 1.4
.. versionchanged:: 1.8
*suffix* argument is deprecated. It only accepts *parser* argument.
Use :meth:`add_source_suffix` API to register suffix instead.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_source_parser(parser, override=override)
def add_env_collector(self, collector: type[EnvironmentCollector]) -> None:
"""Register an environment collector class.
Refer to :ref:`collector-api`.
.. versionadded:: 1.6
"""
logger.debug('[app] adding environment collector: %r', collector)
collector().enable(self)
def add_html_theme(self, name: str, theme_path: str) -> None:
"""Register a HTML Theme.
The *name* is a name of theme, and *theme_path* is a full path to the
theme (refs: :ref:`distribute-your-theme`).
.. versionadded:: 1.6
"""
logger.debug('[app] adding HTML theme: %r, %r', name, theme_path)
self.registry.add_html_theme(name, theme_path)
def add_html_math_renderer(
self,
name: str,
inline_renderers: tuple[Callable, Callable | None] | None = None,
block_renderers: tuple[Callable, Callable | None] | None = None,
) -> None:
"""Register a math renderer for HTML.
The *name* is a name of math renderer. Both *inline_renderers* and
*block_renderers* are used as visitor functions for the HTML writer:
the former for inline math node (``nodes.math``), the latter for
block math node (``nodes.math_block``). Regarding visitor functions,
see :meth:`add_node` for details.
.. versionadded:: 1.8
"""
self.registry.add_html_math_renderer(name, inline_renderers, block_renderers)
def add_message_catalog(self, catalog: str, locale_dir: str) -> None:
"""Register a message catalog.
:param catalog: The name of the catalog
:param locale_dir: The base path of the message catalog
For more details, see :func:`sphinx.locale.get_translation()`.
.. versionadded:: 1.8
"""
locale.init([locale_dir], self.config.language, catalog)
locale.init_console(locale_dir, catalog)
# ---- other methods -------------------------------------------------
def is_parallel_allowed(self, typ: str) -> bool:
"""Check whether parallel processing is allowed or not.
:param typ: A type of processing; ``'read'`` or ``'write'``.
"""
if typ == 'read':
attrname = 'parallel_read_safe'
message_not_declared = __("the %s extension does not declare if it "
"is safe for parallel reading, assuming "
"it isn't - please ask the extension author "
"to check and make it explicit")
message_not_safe = __("the %s extension is not safe for parallel reading")
elif typ == 'write':
attrname = 'parallel_write_safe'
message_not_declared = __("the %s extension does not declare if it "
"is safe for parallel writing, assuming "
"it isn't - please ask the extension author "
"to check and make it explicit")
message_not_safe = __("the %s extension is not safe for parallel writing")
else:
raise ValueError('parallel type %s is not supported' % typ)
for ext in self.extensions.values():
allowed = getattr(ext, attrname, None)
if allowed is None:
logger.warning(message_not_declared, ext.name)
logger.warning(__('doing serial %s'), typ)
return False
elif not allowed:
logger.warning(message_not_safe, ext.name)
logger.warning(__('doing serial %s'), typ)
return False
return True
def set_html_assets_policy(self, policy: Literal['always', 'per_page']) -> None:
"""Set the policy to include assets in HTML pages.
- always: include the assets in all the pages
- per_page: include the assets only in pages where they are used
.. versionadded: 4.1
"""
if policy not in ('always', 'per_page'):
raise ValueError('policy %s is not supported' % policy)
self.registry.html_assets_policy = policy
| (self, filename: str) -> sphinx.environment.BuildEnvironment |
37,572 | sphinx.application | _post_init_env | null | def _post_init_env(self) -> None:
if self._fresh_env_used:
self.env.find_files(self.config, self.builder)
del self._fresh_env_used
| (self) -> NoneType |
37,573 | sphinx.application | add_autodoc_attrgetter | Register a new ``getattr``-like function for the autodoc extension.
Add *getter*, which must be a function with an interface compatible to
the :func:`getattr` builtin, as the autodoc attribute getter for
objects that are instances of *typ*. All cases where autodoc needs to
get an attribute of a type are then handled by this function instead of
:func:`getattr`.
.. versionadded:: 0.6
| def add_autodoc_attrgetter(self, typ: type, getter: Callable[[Any, str, Any], Any],
) -> None:
"""Register a new ``getattr``-like function for the autodoc extension.
Add *getter*, which must be a function with an interface compatible to
the :func:`getattr` builtin, as the autodoc attribute getter for
objects that are instances of *typ*. All cases where autodoc needs to
get an attribute of a type are then handled by this function instead of
:func:`getattr`.
.. versionadded:: 0.6
"""
logger.debug('[app] adding autodoc attrgetter: %r', (typ, getter))
self.registry.add_autodoc_attrgetter(typ, getter)
| (self, typ: type, getter: Callable[[Any, str, Any], Any]) -> NoneType |
37,574 | sphinx.application | add_autodocumenter | Register a new documenter class for the autodoc extension.
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
extension. It must be a subclass of
:class:`sphinx.ext.autodoc.Documenter`. This allows auto-documenting
new types of objects. See the source of the autodoc module for
examples on how to subclass :class:`~sphinx.ext.autodoc.Documenter`.
If *override* is True, the given *cls* is forcedly installed even if
a documenter having the same name is already installed.
See :ref:`autodoc_ext_tutorial`.
.. versionadded:: 0.6
.. versionchanged:: 2.2
Add *override* keyword.
| def add_autodocumenter(self, cls: Any, override: bool = False) -> None:
"""Register a new documenter class for the autodoc extension.
Add *cls* as a new documenter class for the :mod:`sphinx.ext.autodoc`
extension. It must be a subclass of
:class:`sphinx.ext.autodoc.Documenter`. This allows auto-documenting
new types of objects. See the source of the autodoc module for
examples on how to subclass :class:`~sphinx.ext.autodoc.Documenter`.
If *override* is True, the given *cls* is forcedly installed even if
a documenter having the same name is already installed.
See :ref:`autodoc_ext_tutorial`.
.. versionadded:: 0.6
.. versionchanged:: 2.2
Add *override* keyword.
"""
logger.debug('[app] adding autodocumenter: %r', cls)
from sphinx.ext.autodoc.directive import AutodocDirective
self.registry.add_documenter(cls.objtype, cls)
self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
| (self, cls: Any, override: bool = False) -> NoneType |
37,575 | sphinx.application | add_builder | Register a new builder.
:param builder: A builder class
:param override: If true, install the builder forcedly even if another builder
is already installed as the same name
.. versionchanged:: 1.8
Add *override* keyword.
| def add_builder(self, builder: type[Builder], override: bool = False) -> None:
"""Register a new builder.
:param builder: A builder class
:param override: If true, install the builder forcedly even if another builder
is already installed as the same name
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_builder(builder, override=override)
| (self, builder: 'type[Builder]', override: 'bool' = False) -> 'None' |
37,576 | sphinx.application | add_config_value | Register a configuration value.
This is necessary for Sphinx to recognize new values and set default
values accordingly.
:param name: The name of the configuration value. It is recommended to be prefixed
with the extension name (ex. ``html_logo``, ``epub_title``)
:param default: The default value of the configuration.
:param rebuild: The condition of rebuild. It must be one of those values:
* ``'env'`` if a change in the setting only takes effect when a
document is parsed -- this means that the whole environment must be
rebuilt.
* ``'html'`` if a change in the setting needs a full rebuild of HTML
documents.
* ``''`` if a change in the setting will not need any special rebuild.
:param types: The type of configuration value. A list of types can be specified. For
example, ``[str]`` is used to describe a configuration that takes string
value.
.. versionchanged:: 0.4
If the *default* value is a callable, it will be called with the
config object as its argument in order to get the default value.
This can be used to implement config values whose default depends on
other values.
.. versionchanged:: 0.6
Changed *rebuild* from a simple boolean (equivalent to ``''`` or
``'env'``) to a string. However, booleans are still accepted and
converted internally.
| def add_config_value(self, name: str, default: Any, rebuild: _ConfigRebuild,
types: type | Collection[type] | ENUM = ()) -> None:
"""Register a configuration value.
This is necessary for Sphinx to recognize new values and set default
values accordingly.
:param name: The name of the configuration value. It is recommended to be prefixed
with the extension name (ex. ``html_logo``, ``epub_title``)
:param default: The default value of the configuration.
:param rebuild: The condition of rebuild. It must be one of those values:
* ``'env'`` if a change in the setting only takes effect when a
document is parsed -- this means that the whole environment must be
rebuilt.
* ``'html'`` if a change in the setting needs a full rebuild of HTML
documents.
* ``''`` if a change in the setting will not need any special rebuild.
:param types: The type of configuration value. A list of types can be specified. For
example, ``[str]`` is used to describe a configuration that takes string
value.
.. versionchanged:: 0.4
If the *default* value is a callable, it will be called with the
config object as its argument in order to get the default value.
This can be used to implement config values whose default depends on
other values.
.. versionchanged:: 0.6
Changed *rebuild* from a simple boolean (equivalent to ``''`` or
``'env'``) to a string. However, booleans are still accepted and
converted internally.
"""
logger.debug('[app] adding config value: %r', (name, default, rebuild, types))
self.config.add(name, default, rebuild, types)
| (self, name: str, default: Any, rebuild: Literal['', 'env', 'epub', 'gettext', 'html', 'applehelp', 'devhelp'], types: type | collections.abc.Collection[type] | sphinx.config.ENUM = ()) -> NoneType |
37,577 | sphinx.application | add_crossref_type | Register a new crossref object type.
This method is very similar to :meth:`~Sphinx.add_object_type` except that the
directive it generates must be empty, and will produce no output.
That means that you can add semantic targets to your sources, and refer
to them using custom roles instead of generic ones (like
:rst:role:`ref`). Example call::
app.add_crossref_type('topic', 'topic', 'single: %s',
docutils.nodes.emphasis)
Example usage::
.. topic:: application API
The application API
-------------------
Some random text here.
See also :topic:`this section <application API>`.
(Of course, the element following the ``topic`` directive needn't be a
section.)
:param override: If false, do not install it if another cross-reference type
is already installed as the same name
If true, unconditionally install the cross-reference type.
.. versionchanged:: 1.8
Add *override* keyword.
| def add_crossref_type(self, directivename: str, rolename: str, indextemplate: str = '',
ref_nodeclass: type[TextElement] | None = None, objname: str = '',
override: bool = False) -> None:
"""Register a new crossref object type.
This method is very similar to :meth:`~Sphinx.add_object_type` except that the
directive it generates must be empty, and will produce no output.
That means that you can add semantic targets to your sources, and refer
to them using custom roles instead of generic ones (like
:rst:role:`ref`). Example call::
app.add_crossref_type('topic', 'topic', 'single: %s',
docutils.nodes.emphasis)
Example usage::
.. topic:: application API
The application API
-------------------
Some random text here.
See also :topic:`this section <application API>`.
(Of course, the element following the ``topic`` directive needn't be a
section.)
:param override: If false, do not install it if another cross-reference type
is already installed as the same name
If true, unconditionally install the cross-reference type.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_crossref_type(directivename, rolename,
indextemplate, ref_nodeclass, objname,
override=override)
| (self, directivename: str, rolename: str, indextemplate: str = '', ref_nodeclass: Optional[type[docutils.nodes.TextElement]] = None, objname: str = '', override: bool = False) -> NoneType |
37,578 | sphinx.application | add_css_file | Register a stylesheet to include in the HTML output.
:param filename: The name of a CSS file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme.
:param priority: Files are included in ascending order of priority. If
multiple CSS files have the same priority,
those files will be included in order of registration.
See list of "priority range for CSS files" below.
:param kwargs: Extra keyword arguments are included as attributes of the
``<link>`` tag.
Example::
app.add_css_file('custom.css')
# => <link rel="stylesheet" href="_static/custom.css" type="text/css" />
app.add_css_file('print.css', media='print')
# => <link rel="stylesheet" href="_static/print.css"
# type="text/css" media="print" />
app.add_css_file('fancy.css', rel='alternate stylesheet', title='fancy')
# => <link rel="alternate stylesheet" href="_static/fancy.css"
# type="text/css" title="fancy" />
.. list-table:: priority range for CSS files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in CSS files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_css_files`
A CSS file can be added to the specific HTML page when an extension calls
this method on :event:`html-page-context` event.
.. versionadded:: 1.0
.. versionchanged:: 1.6
Optional ``alternate`` and/or ``title`` attributes can be supplied
with the arguments *alternate* (a Boolean) and *title* (a string).
The default is no title and *alternate* = ``False``. For
more information, refer to the `documentation
<https://mdn.io/Web/CSS/Alternative_style_sheets>`__.
.. versionchanged:: 1.8
Renamed from ``app.add_stylesheet()``.
And it allows keyword arguments as attributes of link tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a CSS file to the specific page.
| def add_css_file(self, filename: str, priority: int = 500, **kwargs: Any) -> None:
"""Register a stylesheet to include in the HTML output.
:param filename: The name of a CSS file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme.
:param priority: Files are included in ascending order of priority. If
multiple CSS files have the same priority,
those files will be included in order of registration.
See list of "priority range for CSS files" below.
:param kwargs: Extra keyword arguments are included as attributes of the
``<link>`` tag.
Example::
app.add_css_file('custom.css')
# => <link rel="stylesheet" href="_static/custom.css" type="text/css" />
app.add_css_file('print.css', media='print')
# => <link rel="stylesheet" href="_static/print.css"
# type="text/css" media="print" />
app.add_css_file('fancy.css', rel='alternate stylesheet', title='fancy')
# => <link rel="alternate stylesheet" href="_static/fancy.css"
# type="text/css" title="fancy" />
.. list-table:: priority range for CSS files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in CSS files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_css_files`
A CSS file can be added to the specific HTML page when an extension calls
this method on :event:`html-page-context` event.
.. versionadded:: 1.0
.. versionchanged:: 1.6
Optional ``alternate`` and/or ``title`` attributes can be supplied
with the arguments *alternate* (a Boolean) and *title* (a string).
The default is no title and *alternate* = ``False``. For
more information, refer to the `documentation
<https://mdn.io/Web/CSS/Alternative_style_sheets>`__.
.. versionchanged:: 1.8
Renamed from ``app.add_stylesheet()``.
And it allows keyword arguments as attributes of link tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a CSS file to the specific page.
"""
logger.debug('[app] adding stylesheet: %r', filename)
self.registry.add_css_files(filename, priority=priority, **kwargs)
with contextlib.suppress(AttributeError):
self.builder.add_css_file( # type: ignore[attr-defined]
filename, priority=priority, **kwargs,
)
| (self, filename: str, priority: int = 500, **kwargs: Any) -> NoneType |
37,579 | sphinx.application | add_directive | Register a Docutils directive.
:param name: The name of the directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
For example, a custom directive named ``my-directive`` would be added
like this:
.. code-block:: python
from docutils.parsers.rst import Directive, directives
class MyDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'class': directives.class_option,
'name': directives.unchanged,
}
def run(self):
...
def setup(app):
app.add_directive('my-directive', MyDirective)
For more details, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-directives.html>`__ .
.. versionchanged:: 0.6
Docutils 0.5-style directive classes are now supported.
.. deprecated:: 1.8
Docutils 0.4-style (function based) directives support is deprecated.
.. versionchanged:: 1.8
Add *override* keyword.
| def add_directive(self, name: str, cls: type[Directive], override: bool = False) -> None:
"""Register a Docutils directive.
:param name: The name of the directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
For example, a custom directive named ``my-directive`` would be added
like this:
.. code-block:: python
from docutils.parsers.rst import Directive, directives
class MyDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {
'class': directives.class_option,
'name': directives.unchanged,
}
def run(self):
...
def setup(app):
app.add_directive('my-directive', MyDirective)
For more details, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-directives.html>`__ .
.. versionchanged:: 0.6
Docutils 0.5-style directive classes are now supported.
.. deprecated:: 1.8
Docutils 0.4-style (function based) directives support is deprecated.
.. versionchanged:: 1.8
Add *override* keyword.
"""
logger.debug('[app] adding directive: %r', (name, cls))
if not override and docutils.is_directive_registered(name):
logger.warning(__('directive %r is already registered, it will be overridden'),
name, type='app', subtype='add_directive')
docutils.register_directive(name, cls)
| (self, name: str, cls: type[docutils.parsers.rst.Directive], override: bool = False) -> NoneType |
37,580 | sphinx.application | add_directive_to_domain | Register a Docutils directive in a domain.
Like :meth:`add_directive`, but the directive is added to the domain
named *domain*.
:param domain: The name of target domain
:param name: A name of directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
| def add_directive_to_domain(self, domain: str, name: str,
cls: type[Directive], override: bool = False) -> None:
"""Register a Docutils directive in a domain.
Like :meth:`add_directive`, but the directive is added to the domain
named *domain*.
:param domain: The name of target domain
:param name: A name of directive
:param cls: A directive class
:param override: If false, do not install it if another directive
is already installed as the same name
If true, unconditionally install the directive.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_directive_to_domain(domain, name, cls, override=override)
| (self, domain: str, name: str, cls: type[docutils.parsers.rst.Directive], override: bool = False) -> NoneType |
37,581 | sphinx.application | add_domain | Register a domain.
:param domain: A domain class
:param override: If false, do not install it if another domain
is already installed as the same name
If true, unconditionally install the domain.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
| def add_domain(self, domain: type[Domain], override: bool = False) -> None:
"""Register a domain.
:param domain: A domain class
:param override: If false, do not install it if another domain
is already installed as the same name
If true, unconditionally install the domain.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_domain(domain, override=override)
| (self, domain: 'type[Domain]', override: 'bool' = False) -> 'None' |
37,582 | sphinx.application | add_enumerable_node | Register a Docutils node class as a numfig target.
Sphinx numbers the node automatically. And then the users can refer it
using :rst:role:`numref`.
:param node: A node class
:param figtype: The type of enumerable nodes. Each figtype has individual numbering
sequences. As system figtypes, ``figure``, ``table`` and
``code-block`` are defined. It is possible to add custom nodes to
these default figtypes. It is also possible to define new custom
figtype if a new figtype is given.
:param title_getter: A getter function to obtain the title of node. It takes an
instance of the enumerable node, and it must return its title as
string. The title is used to the default title of references for
:rst:role:`ref`. By default, Sphinx searches
``docutils.nodes.caption`` or ``docutils.nodes.title`` from the
node as a title.
:param kwargs: Visitor functions for each builder (same as :meth:`add_node`)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
.. versionadded:: 1.4
| def add_enumerable_node(self, node: type[Element], figtype: str,
title_getter: TitleGetter | None = None, override: bool = False,
**kwargs: tuple[Callable, Callable]) -> None:
"""Register a Docutils node class as a numfig target.
Sphinx numbers the node automatically. And then the users can refer it
using :rst:role:`numref`.
:param node: A node class
:param figtype: The type of enumerable nodes. Each figtype has individual numbering
sequences. As system figtypes, ``figure``, ``table`` and
``code-block`` are defined. It is possible to add custom nodes to
these default figtypes. It is also possible to define new custom
figtype if a new figtype is given.
:param title_getter: A getter function to obtain the title of node. It takes an
instance of the enumerable node, and it must return its title as
string. The title is used to the default title of references for
:rst:role:`ref`. By default, Sphinx searches
``docutils.nodes.caption`` or ``docutils.nodes.title`` from the
node as a title.
:param kwargs: Visitor functions for each builder (same as :meth:`add_node`)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
.. versionadded:: 1.4
"""
self.registry.add_enumerable_node(node, figtype, title_getter, override=override)
self.add_node(node, override=override, **kwargs)
| (self, node: 'type[Element]', figtype: 'str', title_getter: 'TitleGetter | None' = None, override: 'bool' = False, **kwargs: 'tuple[Callable, Callable]') -> 'None' |
37,583 | sphinx.application | add_env_collector | Register an environment collector class.
Refer to :ref:`collector-api`.
.. versionadded:: 1.6
| def add_env_collector(self, collector: type[EnvironmentCollector]) -> None:
"""Register an environment collector class.
Refer to :ref:`collector-api`.
.. versionadded:: 1.6
"""
logger.debug('[app] adding environment collector: %r', collector)
collector().enable(self)
| (self, collector: 'type[EnvironmentCollector]') -> 'None' |
37,584 | sphinx.application | add_event | Register an event called *name*.
This is needed to be able to emit it.
:param name: The name of the event
| def add_event(self, name: str) -> None:
"""Register an event called *name*.
This is needed to be able to emit it.
:param name: The name of the event
"""
logger.debug('[app] adding event: %r', name)
self.events.add(name)
| (self, name: str) -> NoneType |
37,585 | sphinx.application | add_generic_role | Register a generic Docutils role.
Register a Docutils role that does nothing but wrap its contents in the
node given by *nodeclass*.
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 0.6
.. versionchanged:: 1.8
Add *override* keyword.
| def add_generic_role(self, name: str, nodeclass: Any, override: bool = False) -> None:
"""Register a generic Docutils role.
Register a Docutils role that does nothing but wrap its contents in the
node given by *nodeclass*.
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 0.6
.. versionchanged:: 1.8
Add *override* keyword.
"""
# Don't use ``roles.register_generic_role`` because it uses
# ``register_canonical_role``.
logger.debug('[app] adding generic role: %r', (name, nodeclass))
if not override and docutils.is_role_registered(name):
logger.warning(__('role %r is already registered, it will be overridden'),
name, type='app', subtype='add_generic_role')
role = roles.GenericRole(name, nodeclass)
docutils.register_role(name, role) # type: ignore[arg-type]
| (self, name: str, nodeclass: Any, override: bool = False) -> NoneType |
37,586 | sphinx.application | add_html_math_renderer | Register a math renderer for HTML.
The *name* is a name of math renderer. Both *inline_renderers* and
*block_renderers* are used as visitor functions for the HTML writer:
the former for inline math node (``nodes.math``), the latter for
block math node (``nodes.math_block``). Regarding visitor functions,
see :meth:`add_node` for details.
.. versionadded:: 1.8
| def add_html_math_renderer(
self,
name: str,
inline_renderers: tuple[Callable, Callable | None] | None = None,
block_renderers: tuple[Callable, Callable | None] | None = None,
) -> None:
"""Register a math renderer for HTML.
The *name* is a name of math renderer. Both *inline_renderers* and
*block_renderers* are used as visitor functions for the HTML writer:
the former for inline math node (``nodes.math``), the latter for
block math node (``nodes.math_block``). Regarding visitor functions,
see :meth:`add_node` for details.
.. versionadded:: 1.8
"""
self.registry.add_html_math_renderer(name, inline_renderers, block_renderers)
| (self, name: str, inline_renderers: Optional[tuple[Callable, Optional[Callable]]] = None, block_renderers: Optional[tuple[Callable, Optional[Callable]]] = None) -> NoneType |
37,587 | sphinx.application | add_html_theme | Register a HTML Theme.
The *name* is a name of theme, and *theme_path* is a full path to the
theme (refs: :ref:`distribute-your-theme`).
.. versionadded:: 1.6
| def add_html_theme(self, name: str, theme_path: str) -> None:
"""Register a HTML Theme.
The *name* is a name of theme, and *theme_path* is a full path to the
theme (refs: :ref:`distribute-your-theme`).
.. versionadded:: 1.6
"""
logger.debug('[app] adding HTML theme: %r, %r', name, theme_path)
self.registry.add_html_theme(name, theme_path)
| (self, name: str, theme_path: str) -> NoneType |
37,588 | sphinx.application | add_index_to_domain | Register a custom index for a domain.
Add a custom *index* class to the domain named *domain*.
:param domain: The name of the target domain
:param index: The index class
:param override: If false, do not install it if another index
is already installed as the same name
If true, unconditionally install the index.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
| def add_index_to_domain(self, domain: str, index: type[Index], override: bool = False,
) -> None:
"""Register a custom index for a domain.
Add a custom *index* class to the domain named *domain*.
:param domain: The name of the target domain
:param index: The index class
:param override: If false, do not install it if another index
is already installed as the same name
If true, unconditionally install the index.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_index_to_domain(domain, index)
| (self, domain: 'str', index: 'type[Index]', override: 'bool' = False) -> 'None' |
37,589 | sphinx.application | add_js_file | Register a JavaScript file to include in the HTML output.
:param filename: The name of a JavaScript file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme, or ``None`` .
The ``None`` value is used to create an inline
``<script>`` tag. See the description of *kwargs*
below.
:param priority: Files are included in ascending order of priority. If
multiple JavaScript files have the same priority,
those files will be included in order of registration.
See list of "priority range for JavaScript files" below.
:param loading_method: The loading method for the JavaScript file.
Either ``'async'`` or ``'defer'`` are allowed.
:param kwargs: Extra keyword arguments are included as attributes of the
``<script>`` tag. If the special keyword argument
``body`` is given, its value will be added as the content
of the ``<script>`` tag.
Example::
app.add_js_file('example.js')
# => <script src="_static/example.js"></script>
app.add_js_file('example.js', loading_method="async")
# => <script src="_static/example.js" async="async"></script>
app.add_js_file(None, body="var myVariable = 'foo';")
# => <script>var myVariable = 'foo';</script>
.. list-table:: priority range for JavaScript files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in JavaScript files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_js_files`
A JavaScript file can be added to the specific HTML page when an extension
calls this method on :event:`html-page-context` event.
.. versionadded:: 0.5
.. versionchanged:: 1.8
Renamed from ``app.add_javascript()``.
And it allows keyword arguments as attributes of script tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a JavaScript file to the specific page.
.. versionchanged:: 4.4
Take loading_method argument. Allow to change the loading method of the
JavaScript file.
| def add_js_file(self, filename: str | None, priority: int = 500,
loading_method: str | None = None, **kwargs: Any) -> None:
"""Register a JavaScript file to include in the HTML output.
:param filename: The name of a JavaScript file that the default HTML
template will include. It must be relative to the HTML
static path, or a full URI with scheme, or ``None`` .
The ``None`` value is used to create an inline
``<script>`` tag. See the description of *kwargs*
below.
:param priority: Files are included in ascending order of priority. If
multiple JavaScript files have the same priority,
those files will be included in order of registration.
See list of "priority range for JavaScript files" below.
:param loading_method: The loading method for the JavaScript file.
Either ``'async'`` or ``'defer'`` are allowed.
:param kwargs: Extra keyword arguments are included as attributes of the
``<script>`` tag. If the special keyword argument
``body`` is given, its value will be added as the content
of the ``<script>`` tag.
Example::
app.add_js_file('example.js')
# => <script src="_static/example.js"></script>
app.add_js_file('example.js', loading_method="async")
# => <script src="_static/example.js" async="async"></script>
app.add_js_file(None, body="var myVariable = 'foo';")
# => <script>var myVariable = 'foo';</script>
.. list-table:: priority range for JavaScript files
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 200
- default priority for built-in JavaScript files
* - 500
- default priority for extensions
* - 800
- default priority for :confval:`html_js_files`
A JavaScript file can be added to the specific HTML page when an extension
calls this method on :event:`html-page-context` event.
.. versionadded:: 0.5
.. versionchanged:: 1.8
Renamed from ``app.add_javascript()``.
And it allows keyword arguments as attributes of script tag.
.. versionchanged:: 3.5
Take priority argument. Allow to add a JavaScript file to the specific page.
.. versionchanged:: 4.4
Take loading_method argument. Allow to change the loading method of the
JavaScript file.
"""
if loading_method == 'async':
kwargs['async'] = 'async'
elif loading_method == 'defer':
kwargs['defer'] = 'defer'
self.registry.add_js_file(filename, priority=priority, **kwargs)
with contextlib.suppress(AttributeError):
self.builder.add_js_file( # type: ignore[attr-defined]
filename, priority=priority, **kwargs,
)
| (self, filename: str | None, priority: int = 500, loading_method: Optional[str] = None, **kwargs: Any) -> NoneType |
37,590 | sphinx.application | add_latex_package | Register a package to include in the LaTeX source code.
Add *packagename* to the list of packages that LaTeX source code will
include. If you provide *options*, it will be taken to the `\usepackage`
declaration. If you set *after_hyperref* truthy, the package will be
loaded after ``hyperref`` package.
.. code-block:: python
app.add_latex_package('mypackage')
# => \usepackage{mypackage}
app.add_latex_package('mypackage', 'foo,bar')
# => \usepackage[foo,bar]{mypackage}
.. versionadded:: 1.3
.. versionadded:: 3.1
*after_hyperref* option.
| def add_latex_package(self, packagename: str, options: str | None = None,
after_hyperref: bool = False) -> None:
r"""Register a package to include in the LaTeX source code.
Add *packagename* to the list of packages that LaTeX source code will
include. If you provide *options*, it will be taken to the `\usepackage`
declaration. If you set *after_hyperref* truthy, the package will be
loaded after ``hyperref`` package.
.. code-block:: python
app.add_latex_package('mypackage')
# => \usepackage{mypackage}
app.add_latex_package('mypackage', 'foo,bar')
# => \usepackage[foo,bar]{mypackage}
.. versionadded:: 1.3
.. versionadded:: 3.1
*after_hyperref* option.
"""
self.registry.add_latex_package(packagename, options, after_hyperref)
| (self, packagename: str, options: Optional[str] = None, after_hyperref: bool = False) -> NoneType |
37,591 | sphinx.application | add_lexer | Register a new lexer for source code.
Use *lexer* to highlight code blocks with the given language *alias*.
.. versionadded:: 0.6
.. versionchanged:: 2.1
Take a lexer class as an argument.
.. versionchanged:: 4.0
Removed support for lexer instances as an argument.
| def add_lexer(self, alias: str, lexer: type[Lexer]) -> None:
"""Register a new lexer for source code.
Use *lexer* to highlight code blocks with the given language *alias*.
.. versionadded:: 0.6
.. versionchanged:: 2.1
Take a lexer class as an argument.
.. versionchanged:: 4.0
Removed support for lexer instances as an argument.
"""
logger.debug('[app] adding lexer: %r', (alias, lexer))
lexer_classes[alias] = lexer
| (self, alias: str, lexer: type[pygments.lexer.Lexer]) -> NoneType |
37,592 | sphinx.application | add_message_catalog | Register a message catalog.
:param catalog: The name of the catalog
:param locale_dir: The base path of the message catalog
For more details, see :func:`sphinx.locale.get_translation()`.
.. versionadded:: 1.8
| def add_message_catalog(self, catalog: str, locale_dir: str) -> None:
"""Register a message catalog.
:param catalog: The name of the catalog
:param locale_dir: The base path of the message catalog
For more details, see :func:`sphinx.locale.get_translation()`.
.. versionadded:: 1.8
"""
locale.init([locale_dir], self.config.language, catalog)
locale.init_console(locale_dir, catalog)
| (self, catalog: str, locale_dir: str) -> NoneType |
37,593 | sphinx.application | add_node | Register a Docutils node class.
This is necessary for Docutils internals. It may also be used in the
future to validate nodes in the parsed documents.
:param node: A node class
:param kwargs: Visitor functions for each builder (see below)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
Node visitor functions for the Sphinx HTML, LaTeX, text and manpage
writers can be given as keyword arguments: the keyword should be one or
more of ``'html'``, ``'latex'``, ``'text'``, ``'man'``, ``'texinfo'``
or any other supported translators, the value a 2-tuple of ``(visit,
depart)`` methods. ``depart`` can be ``None`` if the ``visit``
function raises :exc:`docutils.nodes.SkipNode`. Example:
.. code-block:: python
class math(docutils.nodes.Element): pass
def visit_math_html(self, node):
self.body.append(self.starttag(node, 'math'))
def depart_math_html(self, node):
self.body.append('</math>')
app.add_node(math, html=(visit_math_html, depart_math_html))
Obviously, translators for which you don't specify visitor methods will
choke on the node when encountered in a document to translate.
.. versionchanged:: 0.5
Added the support for keyword arguments giving visit functions.
| def add_node(self, node: type[Element], override: bool = False,
**kwargs: tuple[Callable, Callable | None]) -> None:
"""Register a Docutils node class.
This is necessary for Docutils internals. It may also be used in the
future to validate nodes in the parsed documents.
:param node: A node class
:param kwargs: Visitor functions for each builder (see below)
:param override: If true, install the node forcedly even if another node is already
installed as the same name
Node visitor functions for the Sphinx HTML, LaTeX, text and manpage
writers can be given as keyword arguments: the keyword should be one or
more of ``'html'``, ``'latex'``, ``'text'``, ``'man'``, ``'texinfo'``
or any other supported translators, the value a 2-tuple of ``(visit,
depart)`` methods. ``depart`` can be ``None`` if the ``visit``
function raises :exc:`docutils.nodes.SkipNode`. Example:
.. code-block:: python
class math(docutils.nodes.Element): pass
def visit_math_html(self, node):
self.body.append(self.starttag(node, 'math'))
def depart_math_html(self, node):
self.body.append('</math>')
app.add_node(math, html=(visit_math_html, depart_math_html))
Obviously, translators for which you don't specify visitor methods will
choke on the node when encountered in a document to translate.
.. versionchanged:: 0.5
Added the support for keyword arguments giving visit functions.
"""
logger.debug('[app] adding node: %r', (node, kwargs))
if not override and docutils.is_node_registered(node):
logger.warning(__('node class %r is already registered, '
'its visitors will be overridden'),
node.__name__, type='app', subtype='add_node')
docutils.register_node(node)
self.registry.add_translation_handlers(node, **kwargs)
| (self, node: 'type[Element]', override: 'bool' = False, **kwargs: 'tuple[Callable, Callable | None]') -> 'None' |
37,594 | sphinx.application | add_object_type | Register a new object type.
This method is a very convenient way to add a new :term:`object` type
that can be cross-referenced. It will do this:
- Create a new directive (called *directivename*) for documenting an
object. It will automatically add index entries if *indextemplate*
is nonempty; if given, it must contain exactly one instance of
``%s``. See the example below for how the template will be
interpreted.
- Create a new role (called *rolename*) to cross-reference to these
object descriptions.
- If you provide *parse_node*, it must be a function that takes a
string and a docutils node, and it must populate the node with
children parsed from the string. It must then return the name of the
item to be used in cross-referencing and index entries. See the
:file:`conf.py` file in the source for this documentation for an
example.
- The *objname* (if not given, will default to *directivename*) names
the type of object. It is used when listing objects, e.g. in search
results.
For example, if you have this call in a custom Sphinx extension::
app.add_object_type('directive', 'dir', 'pair: %s; directive')
you can use this markup in your documents::
.. rst:directive:: function
Document a function.
<...>
See also the :rst:dir:`function` directive.
For the directive, an index entry will be generated as if you had prepended ::
.. index:: pair: function; directive
The reference node will be of class ``literal`` (so it will be rendered
in a proportional font, as appropriate for code) unless you give the
*ref_nodeclass* argument, which must be a docutils node class. Most
useful are ``docutils.nodes.emphasis`` or ``docutils.nodes.strong`` --
you can also use ``docutils.nodes.generated`` if you want no further
text decoration. If the text should be treated as literal (e.g. no
smart quote replacement), but not have typewriter styling, use
``sphinx.addnodes.literal_emphasis`` or
``sphinx.addnodes.literal_strong``.
For the role content, you have the same syntactical possibilities as
for standard Sphinx roles (see :ref:`xref-syntax`).
If *override* is True, the given object_type is forcedly installed even if
an object_type having the same name is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
| def add_object_type(self, directivename: str, rolename: str, indextemplate: str = '',
parse_node: Callable | None = None,
ref_nodeclass: type[TextElement] | None = None,
objname: str = '', doc_field_types: Sequence = (),
override: bool = False,
) -> None:
"""Register a new object type.
This method is a very convenient way to add a new :term:`object` type
that can be cross-referenced. It will do this:
- Create a new directive (called *directivename*) for documenting an
object. It will automatically add index entries if *indextemplate*
is nonempty; if given, it must contain exactly one instance of
``%s``. See the example below for how the template will be
interpreted.
- Create a new role (called *rolename*) to cross-reference to these
object descriptions.
- If you provide *parse_node*, it must be a function that takes a
string and a docutils node, and it must populate the node with
children parsed from the string. It must then return the name of the
item to be used in cross-referencing and index entries. See the
:file:`conf.py` file in the source for this documentation for an
example.
- The *objname* (if not given, will default to *directivename*) names
the type of object. It is used when listing objects, e.g. in search
results.
For example, if you have this call in a custom Sphinx extension::
app.add_object_type('directive', 'dir', 'pair: %s; directive')
you can use this markup in your documents::
.. rst:directive:: function
Document a function.
<...>
See also the :rst:dir:`function` directive.
For the directive, an index entry will be generated as if you had prepended ::
.. index:: pair: function; directive
The reference node will be of class ``literal`` (so it will be rendered
in a proportional font, as appropriate for code) unless you give the
*ref_nodeclass* argument, which must be a docutils node class. Most
useful are ``docutils.nodes.emphasis`` or ``docutils.nodes.strong`` --
you can also use ``docutils.nodes.generated`` if you want no further
text decoration. If the text should be treated as literal (e.g. no
smart quote replacement), but not have typewriter styling, use
``sphinx.addnodes.literal_emphasis`` or
``sphinx.addnodes.literal_strong``.
For the role content, you have the same syntactical possibilities as
for standard Sphinx roles (see :ref:`xref-syntax`).
If *override* is True, the given object_type is forcedly installed even if
an object_type having the same name is already installed.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_object_type(directivename, rolename, indextemplate, parse_node,
ref_nodeclass, objname, doc_field_types,
override=override)
| (self, directivename: str, rolename: str, indextemplate: str = '', parse_node: Optional[Callable] = None, ref_nodeclass: Optional[type[docutils.nodes.TextElement]] = None, objname: str = '', doc_field_types: collections.abc.Sequence = (), override: bool = False) -> NoneType |
37,595 | sphinx.application | add_post_transform | Register a Docutils transform to be applied before writing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied before
Sphinx writes a document.
:param transform: A transform class
| def add_post_transform(self, transform: type[Transform]) -> None:
"""Register a Docutils transform to be applied before writing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied before
Sphinx writes a document.
:param transform: A transform class
"""
self.registry.add_post_transform(transform)
| (self, transform: type[docutils.transforms.Transform]) -> NoneType |
37,596 | sphinx.application | add_role | Register a Docutils role.
:param name: The name of role
:param role: A role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
For more details about role functions, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-roles.html>`__ .
.. versionchanged:: 1.8
Add *override* keyword.
| def add_role(self, name: str, role: Any, override: bool = False) -> None:
"""Register a Docutils role.
:param name: The name of role
:param role: A role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
For more details about role functions, see `the Docutils docs
<https://docutils.sourceforge.io/docs/howto/rst-roles.html>`__ .
.. versionchanged:: 1.8
Add *override* keyword.
"""
logger.debug('[app] adding role: %r', (name, role))
if not override and docutils.is_role_registered(name):
logger.warning(__('role %r is already registered, it will be overridden'),
name, type='app', subtype='add_role')
docutils.register_role(name, role)
| (self, name: str, role: Any, override: bool = False) -> NoneType |
37,597 | sphinx.application | add_role_to_domain | Register a Docutils role in a domain.
Like :meth:`add_role`, but the role is added to the domain named
*domain*.
:param domain: The name of the target domain
:param name: The name of the role
:param role: The role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
| def add_role_to_domain(self, domain: str, name: str, role: RoleFunction | XRefRole,
override: bool = False) -> None:
"""Register a Docutils role in a domain.
Like :meth:`add_role`, but the role is added to the domain named
*domain*.
:param domain: The name of the target domain
:param name: The name of the role
:param role: The role function
:param override: If false, do not install it if another role
is already installed as the same name
If true, unconditionally install the role.
.. versionadded:: 1.0
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_role_to_domain(domain, name, role, override=override)
| (self, domain: 'str', name: 'str', role: 'RoleFunction | XRefRole', override: 'bool' = False) -> 'None' |
37,598 | sphinx.application | add_search_language | Register a new language for the HTML search index.
Add *cls*, which must be a subclass of
:class:`sphinx.search.SearchLanguage`, as a support language for
building the HTML full-text search index. The class must have a *lang*
attribute that indicates the language it should be used for. See
:confval:`html_search_language`.
.. versionadded:: 1.1
| def add_search_language(self, cls: Any) -> None:
"""Register a new language for the HTML search index.
Add *cls*, which must be a subclass of
:class:`sphinx.search.SearchLanguage`, as a support language for
building the HTML full-text search index. The class must have a *lang*
attribute that indicates the language it should be used for. See
:confval:`html_search_language`.
.. versionadded:: 1.1
"""
logger.debug('[app] adding search language: %r', cls)
from sphinx.search import SearchLanguage, languages
assert issubclass(cls, SearchLanguage)
languages[cls.lang] = cls
| (self, cls: Any) -> NoneType |
37,599 | sphinx.application | add_source_parser | Register a parser class.
:param override: If false, do not install it if another parser
is already installed for the same suffix.
If true, unconditionally install the parser.
.. versionadded:: 1.4
.. versionchanged:: 1.8
*suffix* argument is deprecated. It only accepts *parser* argument.
Use :meth:`add_source_suffix` API to register suffix instead.
.. versionchanged:: 1.8
Add *override* keyword.
| def add_source_parser(self, parser: type[Parser], override: bool = False) -> None:
"""Register a parser class.
:param override: If false, do not install it if another parser
is already installed for the same suffix.
If true, unconditionally install the parser.
.. versionadded:: 1.4
.. versionchanged:: 1.8
*suffix* argument is deprecated. It only accepts *parser* argument.
Use :meth:`add_source_suffix` API to register suffix instead.
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_source_parser(parser, override=override)
| (self, parser: 'type[Parser]', override: 'bool' = False) -> 'None' |
37,600 | sphinx.application | add_source_suffix | Register a suffix of source files.
Same as :confval:`source_suffix`. The users can override this
using the config setting.
:param override: If false, do not install it the same suffix
is already installed.
If true, unconditionally install the suffix.
.. versionadded:: 1.8
| def add_source_suffix(self, suffix: str, filetype: str, override: bool = False) -> None:
"""Register a suffix of source files.
Same as :confval:`source_suffix`. The users can override this
using the config setting.
:param override: If false, do not install it the same suffix
is already installed.
If true, unconditionally install the suffix.
.. versionadded:: 1.8
"""
self.registry.add_source_suffix(suffix, filetype, override=override)
| (self, suffix: str, filetype: str, override: bool = False) -> NoneType |
37,601 | sphinx.application | add_transform | Register a Docutils transform to be applied after parsing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied after
Sphinx parses a reST document.
:param transform: A transform class
.. list-table:: priority range categories for Sphinx transforms
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 0-99
- Fix invalid nodes by docutils. Translate a doctree.
* - 100-299
- Preparation
* - 300-399
- early
* - 400-699
- main
* - 700-799
- Post processing. Deadline to modify text and referencing.
* - 800-899
- Collect referencing and referenced nodes. Domain processing.
* - 900-999
- Finalize and clean up.
refs: `Transform Priority Range Categories`__
__ https://docutils.sourceforge.io/docs/ref/transforms.html#transform-priority-range-categories
| def add_transform(self, transform: type[Transform]) -> None:
"""Register a Docutils transform to be applied after parsing.
Add the standard docutils :class:`~docutils.transforms.Transform`
subclass *transform* to the list of transforms that are applied after
Sphinx parses a reST document.
:param transform: A transform class
.. list-table:: priority range categories for Sphinx transforms
:widths: 20,80
* - Priority
- Main purpose in Sphinx
* - 0-99
- Fix invalid nodes by docutils. Translate a doctree.
* - 100-299
- Preparation
* - 300-399
- early
* - 400-699
- main
* - 700-799
- Post processing. Deadline to modify text and referencing.
* - 800-899
- Collect referencing and referenced nodes. Domain processing.
* - 900-999
- Finalize and clean up.
refs: `Transform Priority Range Categories`__
__ https://docutils.sourceforge.io/docs/ref/transforms.html#transform-priority-range-categories
""" # NoQA: E501,RUF100 # Flake8 thinks the URL is too long, Ruff special cases URLs.
self.registry.add_transform(transform)
| (self, transform: type[docutils.transforms.Transform]) -> NoneType |
37,602 | sphinx.application | build | null | def build(self, force_all: bool = False, filenames: list[str] | None = None) -> None:
self.phase = BuildPhase.READING
try:
if force_all:
self.builder.build_all()
elif filenames:
self.builder.build_specific(filenames)
else:
self.builder.build_update()
self.events.emit('build-finished', None)
except Exception as err:
# delete the saved env to force a fresh build next time
envfile = path.join(self.doctreedir, ENV_PICKLE_FILENAME)
if path.isfile(envfile):
os.unlink(envfile)
self.events.emit('build-finished', err)
raise
if self._warncount and self.keep_going:
self.statuscode = 1
status = (__('succeeded') if self.statuscode == 0
else __('finished with problems'))
if self._warncount:
if self.warningiserror:
if self._warncount == 1:
msg = __('build %s, %s warning (with warnings treated as errors).')
else:
msg = __('build %s, %s warnings (with warnings treated as errors).')
else:
if self._warncount == 1:
msg = __('build %s, %s warning.')
else:
msg = __('build %s, %s warnings.')
logger.info(bold(msg % (status, self._warncount)))
else:
logger.info(bold(__('build %s.') % status))
if self.statuscode == 0 and self.builder.epilog:
logger.info('')
logger.info(self.builder.epilog % {
'outdir': relpath(self.outdir),
'project': self.config.project,
})
self.builder.cleanup()
| (self, force_all: bool = False, filenames: Optional[list[str]] = None) -> NoneType |
37,603 | sphinx.application | connect | Register *callback* to be called when *event* is emitted.
For details on available core events and the arguments of callback
functions, please see :ref:`events`.
:param event: The name of target event
:param callback: Callback function for the event
:param priority: The priority of the callback. The callbacks will be invoked
in order of *priority* (ascending).
:return: A listener ID. It can be used for :meth:`disconnect`.
.. versionchanged:: 3.0
Support *priority*
| def connect(self, event: str, callback: Callable, priority: int = 500) -> int:
"""Register *callback* to be called when *event* is emitted.
For details on available core events and the arguments of callback
functions, please see :ref:`events`.
:param event: The name of target event
:param callback: Callback function for the event
:param priority: The priority of the callback. The callbacks will be invoked
in order of *priority* (ascending).
:return: A listener ID. It can be used for :meth:`disconnect`.
.. versionchanged:: 3.0
Support *priority*
"""
listener_id = self.events.connect(event, callback, priority)
logger.debug('[app] connecting event %r (%d): %r [id=%s]',
event, priority, callback, listener_id)
return listener_id
| (self, event: str, callback: Callable, priority: int = 500) -> int |
37,604 | sphinx.application | create_builder | null | def create_builder(self, name: str) -> Builder:
if name is None:
logger.info(__('No builder selected, using default: html'))
name = 'html'
return self.registry.create_builder(self, name, self.env)
| (self, name: 'str') -> 'Builder' |
37,605 | sphinx.application | disconnect | Unregister callback by *listener_id*.
:param listener_id: A listener_id that :meth:`connect` returns
| def disconnect(self, listener_id: int) -> None:
"""Unregister callback by *listener_id*.
:param listener_id: A listener_id that :meth:`connect` returns
"""
logger.debug('[app] disconnecting event: [id=%s]', listener_id)
self.events.disconnect(listener_id)
| (self, listener_id: int) -> NoneType |
37,606 | sphinx.application | emit | Emit *event* and pass *arguments* to the callback functions.
Return the return values of all callbacks as a list. Do not emit core
Sphinx events in extensions!
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
| def emit(self, event: str, *args: Any,
allowed_exceptions: tuple[type[Exception], ...] = ()) -> list:
"""Emit *event* and pass *arguments* to the callback functions.
Return the return values of all callbacks as a list. Do not emit core
Sphinx events in extensions!
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
"""
return self.events.emit(event, *args, allowed_exceptions=allowed_exceptions)
| (self, event: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()) -> list |
37,607 | sphinx.application | emit_firstresult | Emit *event* and pass *arguments* to the callback functions.
Return the result of the first callback that doesn't return ``None``.
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionadded:: 0.5
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
| def emit_firstresult(self, event: str, *args: Any,
allowed_exceptions: tuple[type[Exception], ...] = ()) -> Any:
"""Emit *event* and pass *arguments* to the callback functions.
Return the result of the first callback that doesn't return ``None``.
:param event: The name of event that will be emitted
:param args: The arguments for the event
:param allowed_exceptions: The list of exceptions that are allowed in the callbacks
.. versionadded:: 0.5
.. versionchanged:: 3.1
Added *allowed_exceptions* to specify path-through exceptions
"""
return self.events.emit_firstresult(event, *args,
allowed_exceptions=allowed_exceptions)
| (self, event: str, *args: Any, allowed_exceptions: tuple[type[Exception], ...] = ()) -> Any |
37,608 | sphinx.application | is_parallel_allowed | Check whether parallel processing is allowed or not.
:param typ: A type of processing; ``'read'`` or ``'write'``.
| def is_parallel_allowed(self, typ: str) -> bool:
"""Check whether parallel processing is allowed or not.
:param typ: A type of processing; ``'read'`` or ``'write'``.
"""
if typ == 'read':
attrname = 'parallel_read_safe'
message_not_declared = __("the %s extension does not declare if it "
"is safe for parallel reading, assuming "
"it isn't - please ask the extension author "
"to check and make it explicit")
message_not_safe = __("the %s extension is not safe for parallel reading")
elif typ == 'write':
attrname = 'parallel_write_safe'
message_not_declared = __("the %s extension does not declare if it "
"is safe for parallel writing, assuming "
"it isn't - please ask the extension author "
"to check and make it explicit")
message_not_safe = __("the %s extension is not safe for parallel writing")
else:
raise ValueError('parallel type %s is not supported' % typ)
for ext in self.extensions.values():
allowed = getattr(ext, attrname, None)
if allowed is None:
logger.warning(message_not_declared, ext.name)
logger.warning(__('doing serial %s'), typ)
return False
elif not allowed:
logger.warning(message_not_safe, ext.name)
logger.warning(__('doing serial %s'), typ)
return False
return True
| (self, typ: str) -> bool |
37,609 | sphinx.application | preload_builder | null | def preload_builder(self, name: str) -> None:
self.registry.preload_builder(self, name)
| (self, name: str) -> NoneType |
37,610 | sphinx.application | require_sphinx | Check the Sphinx version if requested.
Compare *version* with the version of the running Sphinx, and abort the
build when it is too old.
:param version: The required version in the form of ``major.minor`` or
``(major, minor)``.
.. versionadded:: 1.0
.. versionchanged:: 7.1
Type of *version* now allows ``(major, minor)`` form.
| @staticmethod
def require_sphinx(version: tuple[int, int] | str) -> None:
"""Check the Sphinx version if requested.
Compare *version* with the version of the running Sphinx, and abort the
build when it is too old.
:param version: The required version in the form of ``major.minor`` or
``(major, minor)``.
.. versionadded:: 1.0
.. versionchanged:: 7.1
Type of *version* now allows ``(major, minor)`` form.
"""
if isinstance(version, tuple):
major, minor = version
else:
major, minor = map(int, version.split('.')[:2])
if (major, minor) > sphinx.version_info[:2]:
req = f'{major}.{minor}'
raise VersionRequirementError(req)
| (version: tuple[int, int] | str) -> NoneType |
37,611 | sphinx.application | set_html_assets_policy | Set the policy to include assets in HTML pages.
- always: include the assets in all the pages
- per_page: include the assets only in pages where they are used
.. versionadded: 4.1
| def set_html_assets_policy(self, policy: Literal['always', 'per_page']) -> None:
"""Set the policy to include assets in HTML pages.
- always: include the assets in all the pages
- per_page: include the assets only in pages where they are used
.. versionadded: 4.1
"""
if policy not in ('always', 'per_page'):
raise ValueError('policy %s is not supported' % policy)
self.registry.html_assets_policy = policy
| (self, policy: Literal['always', 'per_page']) -> NoneType |
37,612 | sphinx.application | set_translator | Register or override a Docutils translator class.
This is used to register a custom output translator or to replace a
builtin translator. This allows extensions to use a custom translator
and define custom nodes for the translator (see :meth:`add_node`).
:param name: The name of the builder for the translator
:param translator_class: A translator class
:param override: If true, install the translator forcedly even if another translator
is already installed as the same name
.. versionadded:: 1.3
.. versionchanged:: 1.8
Add *override* keyword.
| def set_translator(self, name: str, translator_class: type[nodes.NodeVisitor],
override: bool = False) -> None:
"""Register or override a Docutils translator class.
This is used to register a custom output translator or to replace a
builtin translator. This allows extensions to use a custom translator
and define custom nodes for the translator (see :meth:`add_node`).
:param name: The name of the builder for the translator
:param translator_class: A translator class
:param override: If true, install the translator forcedly even if another translator
is already installed as the same name
.. versionadded:: 1.3
.. versionchanged:: 1.8
Add *override* keyword.
"""
self.registry.add_translator(name, translator_class, override=override)
| (self, name: 'str', translator_class: 'type[nodes.NodeVisitor]', override: 'bool' = False) -> 'None' |
37,613 | sphinx.application | setup_extension | Import and setup a Sphinx extension module.
Load the extension given by the module *name*. Use this if your
extension needs the features provided by another extension. No-op if
called twice.
| def setup_extension(self, extname: str) -> None:
"""Import and setup a Sphinx extension module.
Load the extension given by the module *name*. Use this if your
extension needs the features provided by another extension. No-op if
called twice.
"""
logger.debug('[app] setting up extension: %r', extname)
self.registry.load_extension(self, extname)
| (self, extname: str) -> NoneType |
37,614 | sphinx_favicon | _sizes | Compute the size of the favicon if its size is not explicitly defined.
If the file is a SUPPORTED_MIME_TYPES, then the size is computed on the fly and added
to the favicon attributes. Don't do anything if the favicon is not a link tag.
Args:
favicon: The favicon description as set in the conf.py file
static_path: The static_path registered in the application
confdir: The source directory of the documentation
Returns:
The favicon with a fully qualified size
| def _sizes(
favicon: Dict[str, str], static_path: List[str], confdir: str
) -> Dict[str, str]:
"""Compute the size of the favicon if its size is not explicitly defined.
If the file is a SUPPORTED_MIME_TYPES, then the size is computed on the fly and added
to the favicon attributes. Don't do anything if the favicon is not a link tag.
Args:
favicon: The favicon description as set in the conf.py file
static_path: The static_path registered in the application
confdir: The source directory of the documentation
Returns:
The favicon with a fully qualified size
"""
# exit if the favicon tag has no href (like meta)
if not (FILE_FIELD in favicon or "href" in favicon):
return favicon
# init the parameters
link: Optional[str] = favicon.get("href") or favicon.get(FILE_FIELD)
extension: Optional[str] = link.split(".")[-1] if link else None
sizes: Optional[str] = favicon.get("sizes")
# get the size automatically if not supplied
if link and sizes is None and extension in SUPPORTED_SIZE_TYPES:
file: Optional[Union[BytesIO, Path]] = None
if bool(urlparse(link).netloc):
try:
response = requests.get(link)
except RequestException:
response = requests.Response()
response.status_code = -1
if response.status_code == 200:
file = BytesIO(response.content)
else:
logger.warning(
f"The provided link ({link}) cannot be read. "
"Size will not be computed."
)
else:
for folder in static_path:
path = Path(confdir) / folder / link
if path.is_file():
file = path
break
if file is None:
logger.warning(
f"The provided path ({link}) is not part of any of the static path. "
"Size will not be computed."
)
# compute the image size if image file is found
if file is not None:
w, h = imagesize.get(file)
size = f"{int(w)}x{int(h)}"
favicon["sizes"] = size
return favicon
| (favicon: Dict[str, str], static_path: List[str], confdir: str) -> Dict[str, str] |
37,615 | sphinx_favicon | _static_to_href | Replace static ref to fully qualified href.
if the ``href`` is a relative path then it's replaced with the correct ``href``. We keep checking for ``static-file`` for legacy reasons.
If both ``static-file`` and ``href`` are provided, ``href`` will be ignored.
If the favicon has no ``href`` nor ``static-file`` then do nothing.
Args:
pathto: Sphinx helper_ function to handle relative URLs
init_favicon: The favicon description as set in the conf.py file
Returns:
The favicon with a fully qualified href
| def _static_to_href(pathto: Callable, init_favicon: Dict[str, str]) -> Dict[str, str]:
"""Replace static ref to fully qualified href.
if the ``href`` is a relative path then it's replaced with the correct ``href``. We keep checking for ``static-file`` for legacy reasons.
If both ``static-file`` and ``href`` are provided, ``href`` will be ignored.
If the favicon has no ``href`` nor ``static-file`` then do nothing.
Args:
pathto: Sphinx helper_ function to handle relative URLs
init_favicon: The favicon description as set in the conf.py file
Returns:
The favicon with a fully qualified href
"""
# work on a copy of the favicon (mutable issue)
favicon = init_favicon.copy()
# exit if the favicon tag has no href (like meta)
if not (FILE_FIELD in favicon or "href" in favicon):
return favicon
# legacy check for "static-file"
if FILE_FIELD in favicon:
favicon["href"] = favicon.pop(FILE_FIELD)
# check if link is absolute
link = favicon["href"]
is_absolute = bool(urlparse(link).netloc) or link.startswith("/")
# if the link is absolute do nothing, else replace it with a full one
if not is_absolute:
favicon["href"] = pathto(f"{OUTPUT_STATIC_DIR}/{link}", resource=True)
return favicon
| (pathto: Callable, init_favicon: Dict[str, str]) -> Dict[str, str] |
37,616 | sphinx_favicon | create_favicons_meta | Create ``<link>`` elements for favicons defined in configuration.
Args:
pathto: Sphinx helper_ function to handle relative URLs
favicons: Favicon data from configuration. Can be a single dict or a list of dicts.
static_path: the static_path registered in the application
confdir: the source directory of the documentation
Returns:
``<link>`` elements for all favicons.
See Also:
https://www.sphinx-doc.org/en/master/templating.html#path
| def create_favicons_meta(
pathto: Callable, favicons: FaviconsDef, static_path: List[str], confdir: str
) -> Optional[str]:
"""Create ``<link>`` elements for favicons defined in configuration.
Args:
pathto: Sphinx helper_ function to handle relative URLs
favicons: Favicon data from configuration. Can be a single dict or a list of dicts.
static_path: the static_path registered in the application
confdir: the source directory of the documentation
Returns:
``<link>`` elements for all favicons.
See Also:
https://www.sphinx-doc.org/en/master/templating.html#path
"""
# force cast the favicon config as a list
if isinstance(favicons, dict):
favicons = [favicons]
# read this list and create the links for each item
meta_favicons = []
for favicon in favicons:
if isinstance(favicon, str):
favicon = {"href": favicon}
if not isinstance(favicon, dict):
logger.warning(
f"Invalid config value for favicon extension: {favicon}."
"Custom favicons will not be included in build."
)
continue
favicon = _sizes(favicon, static_path, confdir)
tag = generate_meta(_static_to_href(pathto, favicon))
meta_favicons.append(tag)
return "\n".join(meta_favicons)
| (pathto: Callable, favicons: Union[Dict[str, str], List[Dict[str, str]]], static_path: List[str], confdir: str) -> Optional[str] |
37,617 | sphinx_favicon | generate_meta | Generate metatag based on favicon data.
Default behavior:
- If favicon data contains no ``rel`` attribute, sets ``rel="icon"``
- If no ``sizes`` attribute is provided, ``sizes`` will be computed from the file
- If no favicon MIME type is provided, the value for ``type`` will be
based on the favicon's file name extension (for BMP, GIF, ICO, JPG, JPEG,
SVG, or PNG files)
Args:
favicon: Favicon data
Returns:
Favicon link or meta tag
| def generate_meta(favicon: Dict[str, str]) -> str:
"""Generate metatag based on favicon data.
Default behavior:
- If favicon data contains no ``rel`` attribute, sets ``rel="icon"``
- If no ``sizes`` attribute is provided, ``sizes`` will be computed from the file
- If no favicon MIME type is provided, the value for ``type`` will be
based on the favicon's file name extension (for BMP, GIF, ICO, JPG, JPEG,
SVG, or PNG files)
Args:
favicon: Favicon data
Returns:
Favicon link or meta tag
"""
# get the tag of the output
tag = "meta" if "name" in favicon else "link"
# default to "icon" for link elements
if tag == "link":
favicon.setdefault("rel", "icon")
favicon["href"] # to raise an error if not set
extension = favicon["href"].split(".")[-1]
# set the type for link elements.
# if type is not set, try to guess it from the file extension
type_ = favicon.get("type")
if not type_ and tag == "link" and extension in SUPPORTED_MIME_TYPES:
type_ = SUPPORTED_MIME_TYPES[extension]
favicon["type"] = type_
# build the html element
parameters = [f'{k}="{v}"' for k, v in favicon.items() if v is not None]
html_element = f" <{tag} {' '.join(parameters)}>"
return html_element
| (favicon: Dict[str, str]) -> str |
37,618 | sphinx_favicon | html_page_context | Update the html page context by adding the favicons.
Args:
app: The sphinx application
pagename: the name of the page as string
templatename: the name of the template as string
context: the html context dictionary
doctree: the docutils document tree
| def html_page_context(
app: Sphinx,
pagename: str,
templatename: str,
context: Dict[str, Any],
doctree: nodes.document,
) -> None:
"""Update the html page context by adding the favicons.
Args:
app: The sphinx application
pagename: the name of the page as string
templatename: the name of the template as string
context: the html context dictionary
doctree: the docutils document tree
"""
# extract parameters from app
favicons: Optional[Dict[str, str]] = app.config["favicons"]
pathto: Callable = context["pathto"]
static_path: List[str] = app.config["html_static_path"]
confdir: str = app.confdir
if not (doctree and favicons):
return
favicons_meta = create_favicons_meta(pathto, favicons, static_path, confdir)
context["metatags"] += favicons_meta
| (app: sphinx.application.Sphinx, pagename: str, templatename: str, context: Dict[str, Any], doctree: docutils.nodes.document) -> NoneType |
37,623 | sphinx_favicon | setup | Add custom configuration to sphinx app.
Args:
app: the Sphinx application
Returns:
the 2 parralel parameters set to ``True``
| def setup(app: Sphinx) -> Dict[str, Any]:
"""Add custom configuration to sphinx app.
Args:
app: the Sphinx application
Returns:
the 2 parralel parameters set to ``True``
"""
app.add_config_value("favicons", None, "html")
app.connect("html-page-context", html_page_context)
return {
"parallel_read_safe": True,
"parallel_write_safe": True,
}
| (app: sphinx.application.Sphinx) -> Dict[str, Any] |
37,625 | healpy.sphtfunc | Alm | This class provides some static methods for alm index computation.
Methods
-------
getlm
getidx
getsize
getlmax
| class Alm(object):
"""This class provides some static methods for alm index computation.
Methods
-------
getlm
getidx
getsize
getlmax
"""
def __init__(self):
pass
@staticmethod
def getlm(lmax, i=None):
"""Get the l and m from index and lmax.
Parameters
----------
lmax : int
The maximum l defining the alm layout
i : int or None
The index for which to compute the l and m.
If None, the function return l and m for i=0..Alm.getsize(lmax)
"""
szalm = Alm.getsize(lmax, lmax)
if i is None:
i = np.arange(szalm)
assert (
np.max(i) < szalm
), "Invalid index, it should less than the max alm array length of {}".format(
szalm
)
with np.errstate(all="raise"):
m = (
np.ceil(
((2 * lmax + 1) - np.sqrt((2 * lmax + 1) ** 2 - 8 * (i - lmax))) / 2
)
).astype(int)
l = i - m * (2 * lmax + 1 - m) // 2
return (l, m)
@staticmethod
def getidx(lmax, l, m):
r"""Returns index corresponding to (l,m) in an array describing alm up to lmax.
In HEALPix C++ and healpy, :math:`a_{lm}` coefficients are stored ordered by
:math:`m`. I.e. if :math:`\ell_{max}` is 16, the first 16 elements are
:math:`m=0, \ell=0-16`, then the following 15 elements are :math:`m=1, \ell=1-16`,
then :math:`m=2, \ell=2-16` and so on until the last element, the 153th, is
:math:`m=16, \ell=16`.
Parameters
----------
lmax : int
The maximum l, defines the alm layout
l : int
The l for which to get the index
m : int
The m for which to get the index
Returns
-------
idx : int
The index corresponding to (l,m)
"""
return m * (2 * lmax + 1 - m) // 2 + l
@staticmethod
def getsize(lmax, mmax=None):
"""Returns the size of the array needed to store alm up to *lmax* and *mmax*
Parameters
----------
lmax : int
The maximum l, defines the alm layout
mmax : int, optional
The maximum m, defines the alm layout. Default: lmax.
Returns
-------
size : int
The size of the array needed to store alm up to lmax, mmax.
"""
if mmax is None or mmax < 0 or mmax > lmax:
mmax = lmax
return mmax * (2 * lmax + 1 - mmax) // 2 + lmax + 1
@staticmethod
def getlmax(s, mmax=None):
"""Returns the lmax corresponding to a given array size.
Parameters
----------
s : int
Size of the array
mmax : None or int, optional
The maximum m, defines the alm layout. Default: lmax.
Returns
-------
lmax : int
The maximum l of the array, or -1 if it is not a valid size.
"""
if mmax is not None and mmax >= 0:
x = (2 * s + mmax ** 2 - mmax - 2) / (2 * mmax + 2)
else:
x = (-3 + np.sqrt(1 + 8 * s)) / 2
if x != np.floor(x):
return -1
else:
return int(x)
| () |
37,627 | healpy.sphtfunc | getidx | Returns index corresponding to (l,m) in an array describing alm up to lmax.
In HEALPix C++ and healpy, :math:`a_{lm}` coefficients are stored ordered by
:math:`m`. I.e. if :math:`\ell_{max}` is 16, the first 16 elements are
:math:`m=0, \ell=0-16`, then the following 15 elements are :math:`m=1, \ell=1-16`,
then :math:`m=2, \ell=2-16` and so on until the last element, the 153th, is
:math:`m=16, \ell=16`.
Parameters
----------
lmax : int
The maximum l, defines the alm layout
l : int
The l for which to get the index
m : int
The m for which to get the index
Returns
-------
idx : int
The index corresponding to (l,m)
| @staticmethod
def getidx(lmax, l, m):
r"""Returns index corresponding to (l,m) in an array describing alm up to lmax.
In HEALPix C++ and healpy, :math:`a_{lm}` coefficients are stored ordered by
:math:`m`. I.e. if :math:`\ell_{max}` is 16, the first 16 elements are
:math:`m=0, \ell=0-16`, then the following 15 elements are :math:`m=1, \ell=1-16`,
then :math:`m=2, \ell=2-16` and so on until the last element, the 153th, is
:math:`m=16, \ell=16`.
Parameters
----------
lmax : int
The maximum l, defines the alm layout
l : int
The l for which to get the index
m : int
The m for which to get the index
Returns
-------
idx : int
The index corresponding to (l,m)
"""
return m * (2 * lmax + 1 - m) // 2 + l
| (lmax, l, m) |
37,628 | healpy.sphtfunc | getlm | Get the l and m from index and lmax.
Parameters
----------
lmax : int
The maximum l defining the alm layout
i : int or None
The index for which to compute the l and m.
If None, the function return l and m for i=0..Alm.getsize(lmax)
| @staticmethod
def getlm(lmax, i=None):
"""Get the l and m from index and lmax.
Parameters
----------
lmax : int
The maximum l defining the alm layout
i : int or None
The index for which to compute the l and m.
If None, the function return l and m for i=0..Alm.getsize(lmax)
"""
szalm = Alm.getsize(lmax, lmax)
if i is None:
i = np.arange(szalm)
assert (
np.max(i) < szalm
), "Invalid index, it should less than the max alm array length of {}".format(
szalm
)
with np.errstate(all="raise"):
m = (
np.ceil(
((2 * lmax + 1) - np.sqrt((2 * lmax + 1) ** 2 - 8 * (i - lmax))) / 2
)
).astype(int)
l = i - m * (2 * lmax + 1 - m) // 2
return (l, m)
| (lmax, i=None) |
37,629 | healpy.sphtfunc | getlmax | Returns the lmax corresponding to a given array size.
Parameters
----------
s : int
Size of the array
mmax : None or int, optional
The maximum m, defines the alm layout. Default: lmax.
Returns
-------
lmax : int
The maximum l of the array, or -1 if it is not a valid size.
| @staticmethod
def getlmax(s, mmax=None):
"""Returns the lmax corresponding to a given array size.
Parameters
----------
s : int
Size of the array
mmax : None or int, optional
The maximum m, defines the alm layout. Default: lmax.
Returns
-------
lmax : int
The maximum l of the array, or -1 if it is not a valid size.
"""
if mmax is not None and mmax >= 0:
x = (2 * s + mmax ** 2 - mmax - 2) / (2 * mmax + 2)
else:
x = (-3 + np.sqrt(1 + 8 * s)) / 2
if x != np.floor(x):
return -1
else:
return int(x)
| (s, mmax=None) |
37,630 | healpy.sphtfunc | getsize | Returns the size of the array needed to store alm up to *lmax* and *mmax*
Parameters
----------
lmax : int
The maximum l, defines the alm layout
mmax : int, optional
The maximum m, defines the alm layout. Default: lmax.
Returns
-------
size : int
The size of the array needed to store alm up to lmax, mmax.
| @staticmethod
def getsize(lmax, mmax=None):
"""Returns the size of the array needed to store alm up to *lmax* and *mmax*
Parameters
----------
lmax : int
The maximum l, defines the alm layout
mmax : int, optional
The maximum m, defines the alm layout. Default: lmax.
Returns
-------
size : int
The size of the array needed to store alm up to lmax, mmax.
"""
if mmax is None or mmax < 0 or mmax > lmax:
mmax = lmax
return mmax * (2 * lmax + 1 - mmax) // 2 + lmax + 1
| (lmax, mmax=None) |
37,631 | healpy.rotator | Rotator | Rotation operator, including astronomical coordinate systems.
This class provides tools for spherical rotations. It is meant to be used
in the healpy library for plotting, and for this reason reflects the
convention used in the Healpix IDL library.
Parameters
----------
rot : None or sequence
Describe the rotation by its euler angle. See :func:`euler_matrix_new`.
coord : None or sequence of str
Describe the coordinate system transform. If *rot* is also given, the
coordinate transform is applied first, and then the rotation.
inv : bool
If True, the inverse rotation is defined. (Default: False)
deg : bool
If True, angles are assumed to be in degree. (Default: True)
eulertype : str
The Euler angle convention used. See :func:`euler_matrix_new`.
Attributes
----------
mat
coordin
coordout
coordinstr
coordoutstr
rots
coords
Examples
--------
>>> r = Rotator(coord=['G','E']) # Transforms galactic to ecliptic coordinates
>>> theta_gal, phi_gal = np.pi/2., 0.
>>> theta_ecl, phi_ecl = r(theta_gal, phi_gal) # Apply the conversion
>>> print(theta_ecl)
1.66742347999
>>> print(phi_ecl)
-1.6259571125
>>> theta_ecl, phi_ecl = Rotator(coord='ge')(theta_gal, phi_gal) # In one line
>>> print(theta_ecl)
1.66742347999
>>> print(phi_ecl)
-1.6259571125
>>> vec_gal = np.array([1, 0, 0]) #Using vectors
>>> vec_ecl = r(vec_gal)
>>> print(vec_ecl)
[-0.05487563 -0.99382135 -0.09647686]
| class Rotator(object):
"""Rotation operator, including astronomical coordinate systems.
This class provides tools for spherical rotations. It is meant to be used
in the healpy library for plotting, and for this reason reflects the
convention used in the Healpix IDL library.
Parameters
----------
rot : None or sequence
Describe the rotation by its euler angle. See :func:`euler_matrix_new`.
coord : None or sequence of str
Describe the coordinate system transform. If *rot* is also given, the
coordinate transform is applied first, and then the rotation.
inv : bool
If True, the inverse rotation is defined. (Default: False)
deg : bool
If True, angles are assumed to be in degree. (Default: True)
eulertype : str
The Euler angle convention used. See :func:`euler_matrix_new`.
Attributes
----------
mat
coordin
coordout
coordinstr
coordoutstr
rots
coords
Examples
--------
>>> r = Rotator(coord=['G','E']) # Transforms galactic to ecliptic coordinates
>>> theta_gal, phi_gal = np.pi/2., 0.
>>> theta_ecl, phi_ecl = r(theta_gal, phi_gal) # Apply the conversion
>>> print(theta_ecl)
1.66742347999
>>> print(phi_ecl)
-1.6259571125
>>> theta_ecl, phi_ecl = Rotator(coord='ge')(theta_gal, phi_gal) # In one line
>>> print(theta_ecl)
1.66742347999
>>> print(phi_ecl)
-1.6259571125
>>> vec_gal = np.array([1, 0, 0]) #Using vectors
>>> vec_ecl = r(vec_gal)
>>> print(vec_ecl)
[-0.05487563 -0.99382135 -0.09647686]
"""
ErrMessWrongPar = (
"rot and coord must be single elements or " "sequence of same size."
)
def __init__(self, rot=None, coord=None, inv=None, deg=True, eulertype="ZYX"):
"""Create a rotator with given parameters.
- rot: a float, a tuple of 1,2 or 3 floats or a sequence of tuples.
If it is a sequence of tuple, it must have the same length as coord.
- coord: a string or a tuple of 1 or 2 strings or a sequence of tuple
If it is a sequence of tuple, it must have same length as rot.
- inv: whether to use inverse rotation or not
- deg: if True, angles in rot are assumed in degree (default: True)
- eulertype: the convention for Euler angles in rot.
Note: the coord system conversion is applied first, then the rotation.
"""
rot_is_seq = hasattr(rot, "__len__") and hasattr(rot[0], "__len__")
coord_is_seq = (
hasattr(coord, "__len__")
and hasattr(coord[0], "__len__")
and type(coord[0]) is not str
)
if rot_is_seq and coord_is_seq:
if len(rot) != len(coord):
raise ValueError(Rotator.ErrMessWrongPar)
else:
rots = rot
coords = coord
elif (rot_is_seq or coord_is_seq) and (rot is not None and coord is not None):
raise ValueError(Rotator.ErrMessWrongPar)
else:
rots = [rot]
coords = [coord]
inv_is_seq = hasattr(inv, "__len__")
if inv_is_seq:
if len(inv) != len(rots):
raise ValueError("inv must have same length as rot and/or coord")
invs = inv
else:
invs = [inv] * len(rots)
# check the argument and normalize them
if eulertype in ["ZYX", "X", "Y"]:
self._eultype = eulertype
else:
self._eultype = "ZYX"
self._rots = []
self._coords = []
self._invs = []
for r, c, i in zip(rots, coords, invs):
rn = normalise_rot(r, deg=deg)
# if self._eultype in ['X','Y']:
# rn[1] = -rn[1]
cn = normalise_coord(c)
self._rots.append(rn) # append(rn) or insert(0, rn) ?
self._coords.append(cn) # append(cn) or insert(0, cn) ?
self._invs.append(bool(i))
if not self.consistent:
log.warning("The chain of coord system rotations is not consistent")
self._update_matrix()
def _update_matrix(self):
self._matrix = np.identity(3)
self._do_rotation = False
for r, c, i in zip(self._rots, self._coords, self._invs):
rotmat, do_rot, rotnorm = get_rotation_matrix(r, eulertype=self._eultype)
convmat, do_conv, coordnorm = get_coordconv_matrix(c)
r = np.dot(rotmat, convmat)
if i:
r = r.T
self._matrix = np.dot(self._matrix, r)
self._do_rotation = self._do_rotation or (do_rot or do_conv)
def _is_coords_consistent(self):
for c, i in zip(self._coords, self._invs):
break
for cnext, inext in zip(self._coords[1:], self._invs[1:]):
if c[i] != cnext[not inext]:
return False
c, i = cnext, inext
return True
consistent = property(
_is_coords_consistent, doc="consistency of the coords transform chain"
)
def __eq__(self, a):
if type(a) is not type(self):
return False
# compare the _rots
v = [np.allclose(x, y, rtol=0, atol=1e-15) for x, y in zip(self._rots, a._rots)]
return (
np.array(v).all()
and (self._coords == a._coords)
and (self._invs == a._invs)
)
def __call__(self, *args, **kwds):
"""Use the rotator to rotate either spherical coordinates (theta, phi)
or a vector (x,y,z). You can use lonla keyword to use longitude, latitude
(in degree) instead of theta, phi (in radian). In this case, returns
longitude, latitude in degree.
Accepted forms:
r(x,y,z) # x,y,z either scalars or arrays
r(theta,phi) # theta, phi scalars or arrays
r(lon,lat,lonlat=True) # lon, lat scalars or arrays
r(vec) # vec 1-D array with 3 elements, or 2-D array 3xN
r(direction) # direction 1-D array with 2 elements, or 2xN array
Parameters
----------
vec_or_dir : array or multiple arrays
The direction to rotate. See above for accepted formats.
lonlat : bool, optional
If True, assumes the input direction is longitude/latitude in degrees.
Otherwise, assumes co-latitude/longitude in radians. Default: False
inv : bool, optional
If True, applies the inverse rotation. Default: False.
"""
if kwds.pop("inv", False):
m = self._matrix.T
else:
m = self._matrix
lonlat = kwds.pop("lonlat", False)
if len(args) == 1:
arg = args[0]
if not hasattr(arg, "__len__") or len(arg) < 2 or len(arg) > 3:
raise TypeError("Argument must be a sequence of 2 or 3 " "elements")
if len(arg) == 2:
return rotateDirection(
m, arg[0], arg[1], self._do_rotation, lonlat=lonlat
)
else:
return rotateVector(m, arg[0], arg[1], arg[2], self._do_rotation)
elif len(args) == 2:
return rotateDirection(
m, args[0], args[1], self._do_rotation, lonlat=lonlat
)
elif len(args) == 3:
return rotateVector(m, args[0], args[1], args[2], self._do_rotation)
else:
raise TypeError("Either 1, 2 or 3 arguments accepted")
def __mul__(self, a):
"""Composition of rotation."""
if not isinstance(a, Rotator):
raise TypeError(
"A Rotator can only multiply another Rotator "
"(composition of rotations)"
)
rots = self._rots + a._rots
coords = self._coords + a._coords
invs = self._invs + a._invs
return Rotator(rot=rots, coord=coords, inv=invs, deg=False)
def __rmul__(self, b):
if not isinstance(b, Rotator):
raise TypeError(
"A Rotator can only be multiplied by another Rotator "
"(composition of rotations)"
)
rots = b._rots + self._rots
coords = b._coords + self._coords
invs = self._invs + b._invs
return Rotator(rot=rots, coord=coords, inv=invs, deg=False)
def __nonzero__(self):
return self._do_rotation
def get_inverse(self):
rots = self._rots[::-1]
coords = self._coords[::-1]
invs = [not i for i in self._invs[::-1]]
return Rotator(rot=rots, coord=coords, inv=invs, deg=False)
# I = property(get_inverse,doc='Return a new rotator representing the '
# 'inverse rotation')
def I(self, *args, **kwds):
"""Rotate the given vector or direction using the inverse matrix.
rot.I(vec) <==> rot(vec,inv=True)
"""
kwds["inv"] = True
return self.__call__(*args, **kwds)
@property
def mat(self):
"""The matrix representing the rotation."""
return np.asarray(self._matrix)
@property
def coordin(self):
"""The input coordinate system."""
if not self.consistent:
return None
for c, i in zip(self._coords, self._invs):
pass
return c[i]
@property
def coordout(self):
"""The output coordinate system."""
if not self.consistent:
return None
for c, i in zip(self._coords, self._invs):
pass
return c[not i]
@property
def coordinstr(self):
"""The input coordinate system in str."""
return coordname.get(self.coordin, "")
@property
def coordoutstr(self):
"""The output coordinate system in str."""
return coordname.get(self.coordout, "")
@property
def rots(self):
"""The sequence of rots defining the rotation."""
return self._rots
@property
def coords(self):
"""The sequence of coords defining the rotation."""
return self._coords
def do_rot(self, i):
"""Returns True if rotation is not (close to) identity."""
return not np.allclose(self.rots[i], np.zeros(3), rtol=0.0, atol=1.0e-15)
def angle_ref(self, *args, **kwds):
"""Compute the angle between transverse reference direction of initial and final frames
For example, if angle of polarisation is psi in initial frame, it will be psi+angle_ref in final
frame.
Parameters
----------
dir_or_vec : array
Direction or vector (see Rotator.__call__)
lonlat: bool, optional
If True, assume input is longitude,latitude in degrees. Otherwise,
theta,phi in radian. Default: False
inv : bool, optional
If True, use the inverse transforms. Default: False
Returns
-------
angle : float, scalar or array
Angle in radian (a scalar or an array if input is a sequence of direction/vector)
"""
R = self
lonlat = kwds.get("lonlat", False)
inv = kwds.get("inv", False)
if len(args) == 1:
arg = args[0]
if not hasattr(arg, "__len__") or len(arg) < 2 or len(arg) > 3:
raise TypeError("Argument must be a sequence of 2 or 3 " "elements")
if len(arg) == 2:
v = dir2vec(arg[0], arg[1], lonlat=lonlat)
else:
v = arg
elif len(args) == 2:
v = dir2vec(args[0], args[1], lonlat=lonlat)
elif len(args) == 3:
v = args
else:
raise TypeError("Either 1, 2 or 3 arguments accepted")
vp = R(v, inv=inv)
north_pole = R([0.0, 0.0, 1.0], inv=inv)
sinalpha = north_pole[0] * vp[1] - north_pole[1] * vp[0]
cosalpha = north_pole[2] - vp[2] * np.dot(north_pole, vp)
return np.arctan2(sinalpha, cosalpha)
def rotate_alm(self, alm, lmax=None, mmax=None, inplace=False):
"""Rotate Alms with the transform defined in the Rotator object
see the docstring of the rotate_alm function defined
in the healpy package, this function **returns** the rotated alms,
does not rotate in place"""
if not inplace:
rotated_alm = alm.copy() # rotate_alm works inplace
else:
rotated_alm = alm
rotate_alm(rotated_alm, matrix=self.mat, lmax=lmax, mmax=mmax)
if not inplace:
return rotated_alm
@deprecated_renamed_argument("verbose", None, "1.15.0")
def rotate_map_alms(
self,
m,
use_pixel_weights=True,
lmax=None,
mmax=None,
datapath=None,
verbose=None,
):
"""Rotate a HEALPix map to a new reference frame in spherical harmonics space
This is generally the best strategy to rotate/change reference frame of maps.
If the input map is band-limited, i.e. it can be represented exactly by
a spherical harmonics transform under a specific lmax, the map rotation
will be invertible.
Parameters
----------
m : np.ndarray
Input map, single array is considered I, array with 3 rows:[I,Q,U]
other arguments : see map2alm
Returns
-------
m_rotated : np.ndarray
Map in the new reference frame
"""
alm = sphtfunc.map2alm(
m,
use_pixel_weights=use_pixel_weights,
lmax=lmax,
mmax=mmax,
datapath=datapath,
)
rotated_alm = self.rotate_alm(alm, lmax=lmax, mmax=mmax)
return sphtfunc.alm2map(
rotated_alm,
lmax=lmax,
mmax=mmax,
nside=pixelfunc.get_nside(m),
)
def rotate_map_pixel(self, m):
"""Rotate a HEALPix map to a new reference frame in pixel space
It is generally better to rotate in spherical harmonics space, see
the rotate_map_alms method. A case where pixel space rotation is
better is for heavily masked maps where the spherical harmonics
transform is not well defined.
This function first rotates the pixels centers of the new reference
frame to the original reference frame, then uses hp.get_interp_val
to interpolate bilinearly the pixel values, finally fixes Q and U
polarization by the modification to the psi angle caused by
the Rotator using Rotator.angle_ref.
Due to interpolation, this function generally suppresses the signal at
high angular scales.
Parameters
----------
m : np.ndarray
Input map, 1 map is considered I, 2 maps:[Q,U], 3 maps:[I,Q,U]
Returns
-------
m_rotated : np.ndarray
Map in the new reference frame
"""
if pixelfunc.maptype(m) == 0: # a single map is converted to a list
m = [m]
npix = len(m[0])
nside = pixelfunc.npix2nside(npix)
theta_pix_center, phi_pix_center = pixelfunc.pix2ang(
nside=nside, ipix=np.arange(npix)
)
# Rotate the pixels center of the new reference frame to the original frame
theta_pix_center_rot, phi_pix_center_rot = self.I(
theta_pix_center, phi_pix_center
)
# Interpolate the original map to the pixels centers in the new ref frame
m_rotated = [
pixelfunc.get_interp_val(each, theta_pix_center_rot, phi_pix_center_rot)
for each in m
]
# Rotate polarization
if len(m_rotated) > 1:
# Create a complex map from QU and apply the rotation in psi due to the rotation
# Slice from the end of the array so that it works both for QU and IQU
L_map = (m_rotated[-2] + m_rotated[-1] * 1j) * np.exp(
1j * 2 * self.angle_ref(theta_pix_center_rot, phi_pix_center_rot)
)
# Overwrite the Q and U maps with the correct values
m_rotated[-2] = np.real(L_map)
m_rotated[-1] = np.imag(L_map)
else:
m_rotated = m_rotated[0]
return m_rotated
def __repr__(self):
return (
"[ "
+ ", ".join([str(self._coords), str(self._rots), str(self._invs)])
+ " ]"
)
__str__ = __repr__
| (rot=None, coord=None, inv=None, deg=True, eulertype='ZYX') |
37,632 | healpy.rotator | I | Rotate the given vector or direction using the inverse matrix.
rot.I(vec) <==> rot(vec,inv=True)
| def I(self, *args, **kwds):
"""Rotate the given vector or direction using the inverse matrix.
rot.I(vec) <==> rot(vec,inv=True)
"""
kwds["inv"] = True
return self.__call__(*args, **kwds)
| (self, *args, **kwds) |
37,633 | healpy.rotator | __call__ | Use the rotator to rotate either spherical coordinates (theta, phi)
or a vector (x,y,z). You can use lonla keyword to use longitude, latitude
(in degree) instead of theta, phi (in radian). In this case, returns
longitude, latitude in degree.
Accepted forms:
r(x,y,z) # x,y,z either scalars or arrays
r(theta,phi) # theta, phi scalars or arrays
r(lon,lat,lonlat=True) # lon, lat scalars or arrays
r(vec) # vec 1-D array with 3 elements, or 2-D array 3xN
r(direction) # direction 1-D array with 2 elements, or 2xN array
Parameters
----------
vec_or_dir : array or multiple arrays
The direction to rotate. See above for accepted formats.
lonlat : bool, optional
If True, assumes the input direction is longitude/latitude in degrees.
Otherwise, assumes co-latitude/longitude in radians. Default: False
inv : bool, optional
If True, applies the inverse rotation. Default: False.
| def __call__(self, *args, **kwds):
"""Use the rotator to rotate either spherical coordinates (theta, phi)
or a vector (x,y,z). You can use lonla keyword to use longitude, latitude
(in degree) instead of theta, phi (in radian). In this case, returns
longitude, latitude in degree.
Accepted forms:
r(x,y,z) # x,y,z either scalars or arrays
r(theta,phi) # theta, phi scalars or arrays
r(lon,lat,lonlat=True) # lon, lat scalars or arrays
r(vec) # vec 1-D array with 3 elements, or 2-D array 3xN
r(direction) # direction 1-D array with 2 elements, or 2xN array
Parameters
----------
vec_or_dir : array or multiple arrays
The direction to rotate. See above for accepted formats.
lonlat : bool, optional
If True, assumes the input direction is longitude/latitude in degrees.
Otherwise, assumes co-latitude/longitude in radians. Default: False
inv : bool, optional
If True, applies the inverse rotation. Default: False.
"""
if kwds.pop("inv", False):
m = self._matrix.T
else:
m = self._matrix
lonlat = kwds.pop("lonlat", False)
if len(args) == 1:
arg = args[0]
if not hasattr(arg, "__len__") or len(arg) < 2 or len(arg) > 3:
raise TypeError("Argument must be a sequence of 2 or 3 " "elements")
if len(arg) == 2:
return rotateDirection(
m, arg[0], arg[1], self._do_rotation, lonlat=lonlat
)
else:
return rotateVector(m, arg[0], arg[1], arg[2], self._do_rotation)
elif len(args) == 2:
return rotateDirection(
m, args[0], args[1], self._do_rotation, lonlat=lonlat
)
elif len(args) == 3:
return rotateVector(m, args[0], args[1], args[2], self._do_rotation)
else:
raise TypeError("Either 1, 2 or 3 arguments accepted")
| (self, *args, **kwds) |
37,634 | healpy.rotator | __eq__ | null | def __eq__(self, a):
if type(a) is not type(self):
return False
# compare the _rots
v = [np.allclose(x, y, rtol=0, atol=1e-15) for x, y in zip(self._rots, a._rots)]
return (
np.array(v).all()
and (self._coords == a._coords)
and (self._invs == a._invs)
)
| (self, a) |
37,635 | healpy.rotator | __init__ | Create a rotator with given parameters.
- rot: a float, a tuple of 1,2 or 3 floats or a sequence of tuples.
If it is a sequence of tuple, it must have the same length as coord.
- coord: a string or a tuple of 1 or 2 strings or a sequence of tuple
If it is a sequence of tuple, it must have same length as rot.
- inv: whether to use inverse rotation or not
- deg: if True, angles in rot are assumed in degree (default: True)
- eulertype: the convention for Euler angles in rot.
Note: the coord system conversion is applied first, then the rotation.
| def __init__(self, rot=None, coord=None, inv=None, deg=True, eulertype="ZYX"):
"""Create a rotator with given parameters.
- rot: a float, a tuple of 1,2 or 3 floats or a sequence of tuples.
If it is a sequence of tuple, it must have the same length as coord.
- coord: a string or a tuple of 1 or 2 strings or a sequence of tuple
If it is a sequence of tuple, it must have same length as rot.
- inv: whether to use inverse rotation or not
- deg: if True, angles in rot are assumed in degree (default: True)
- eulertype: the convention for Euler angles in rot.
Note: the coord system conversion is applied first, then the rotation.
"""
rot_is_seq = hasattr(rot, "__len__") and hasattr(rot[0], "__len__")
coord_is_seq = (
hasattr(coord, "__len__")
and hasattr(coord[0], "__len__")
and type(coord[0]) is not str
)
if rot_is_seq and coord_is_seq:
if len(rot) != len(coord):
raise ValueError(Rotator.ErrMessWrongPar)
else:
rots = rot
coords = coord
elif (rot_is_seq or coord_is_seq) and (rot is not None and coord is not None):
raise ValueError(Rotator.ErrMessWrongPar)
else:
rots = [rot]
coords = [coord]
inv_is_seq = hasattr(inv, "__len__")
if inv_is_seq:
if len(inv) != len(rots):
raise ValueError("inv must have same length as rot and/or coord")
invs = inv
else:
invs = [inv] * len(rots)
# check the argument and normalize them
if eulertype in ["ZYX", "X", "Y"]:
self._eultype = eulertype
else:
self._eultype = "ZYX"
self._rots = []
self._coords = []
self._invs = []
for r, c, i in zip(rots, coords, invs):
rn = normalise_rot(r, deg=deg)
# if self._eultype in ['X','Y']:
# rn[1] = -rn[1]
cn = normalise_coord(c)
self._rots.append(rn) # append(rn) or insert(0, rn) ?
self._coords.append(cn) # append(cn) or insert(0, cn) ?
self._invs.append(bool(i))
if not self.consistent:
log.warning("The chain of coord system rotations is not consistent")
self._update_matrix()
| (self, rot=None, coord=None, inv=None, deg=True, eulertype='ZYX') |
37,636 | healpy.rotator | __mul__ | Composition of rotation. | def __mul__(self, a):
"""Composition of rotation."""
if not isinstance(a, Rotator):
raise TypeError(
"A Rotator can only multiply another Rotator "
"(composition of rotations)"
)
rots = self._rots + a._rots
coords = self._coords + a._coords
invs = self._invs + a._invs
return Rotator(rot=rots, coord=coords, inv=invs, deg=False)
| (self, a) |
37,637 | healpy.rotator | __nonzero__ | null | def __nonzero__(self):
return self._do_rotation
| (self) |
37,638 | healpy.rotator | __repr__ | null | def __repr__(self):
return (
"[ "
+ ", ".join([str(self._coords), str(self._rots), str(self._invs)])
+ " ]"
)
| (self) |
37,639 | healpy.rotator | __rmul__ | null | def __rmul__(self, b):
if not isinstance(b, Rotator):
raise TypeError(
"A Rotator can only be multiplied by another Rotator "
"(composition of rotations)"
)
rots = b._rots + self._rots
coords = b._coords + self._coords
invs = self._invs + b._invs
return Rotator(rot=rots, coord=coords, inv=invs, deg=False)
| (self, b) |
37,641 | healpy.rotator | _is_coords_consistent | null | def _is_coords_consistent(self):
for c, i in zip(self._coords, self._invs):
break
for cnext, inext in zip(self._coords[1:], self._invs[1:]):
if c[i] != cnext[not inext]:
return False
c, i = cnext, inext
return True
| (self) |
37,642 | healpy.rotator | _update_matrix | null | def _update_matrix(self):
self._matrix = np.identity(3)
self._do_rotation = False
for r, c, i in zip(self._rots, self._coords, self._invs):
rotmat, do_rot, rotnorm = get_rotation_matrix(r, eulertype=self._eultype)
convmat, do_conv, coordnorm = get_coordconv_matrix(c)
r = np.dot(rotmat, convmat)
if i:
r = r.T
self._matrix = np.dot(self._matrix, r)
self._do_rotation = self._do_rotation or (do_rot or do_conv)
| (self) |
37,643 | healpy.rotator | angle_ref | Compute the angle between transverse reference direction of initial and final frames
For example, if angle of polarisation is psi in initial frame, it will be psi+angle_ref in final
frame.
Parameters
----------
dir_or_vec : array
Direction or vector (see Rotator.__call__)
lonlat: bool, optional
If True, assume input is longitude,latitude in degrees. Otherwise,
theta,phi in radian. Default: False
inv : bool, optional
If True, use the inverse transforms. Default: False
Returns
-------
angle : float, scalar or array
Angle in radian (a scalar or an array if input is a sequence of direction/vector)
| def angle_ref(self, *args, **kwds):
"""Compute the angle between transverse reference direction of initial and final frames
For example, if angle of polarisation is psi in initial frame, it will be psi+angle_ref in final
frame.
Parameters
----------
dir_or_vec : array
Direction or vector (see Rotator.__call__)
lonlat: bool, optional
If True, assume input is longitude,latitude in degrees. Otherwise,
theta,phi in radian. Default: False
inv : bool, optional
If True, use the inverse transforms. Default: False
Returns
-------
angle : float, scalar or array
Angle in radian (a scalar or an array if input is a sequence of direction/vector)
"""
R = self
lonlat = kwds.get("lonlat", False)
inv = kwds.get("inv", False)
if len(args) == 1:
arg = args[0]
if not hasattr(arg, "__len__") or len(arg) < 2 or len(arg) > 3:
raise TypeError("Argument must be a sequence of 2 or 3 " "elements")
if len(arg) == 2:
v = dir2vec(arg[0], arg[1], lonlat=lonlat)
else:
v = arg
elif len(args) == 2:
v = dir2vec(args[0], args[1], lonlat=lonlat)
elif len(args) == 3:
v = args
else:
raise TypeError("Either 1, 2 or 3 arguments accepted")
vp = R(v, inv=inv)
north_pole = R([0.0, 0.0, 1.0], inv=inv)
sinalpha = north_pole[0] * vp[1] - north_pole[1] * vp[0]
cosalpha = north_pole[2] - vp[2] * np.dot(north_pole, vp)
return np.arctan2(sinalpha, cosalpha)
| (self, *args, **kwds) |
37,644 | healpy.rotator | do_rot | Returns True if rotation is not (close to) identity. | def do_rot(self, i):
"""Returns True if rotation is not (close to) identity."""
return not np.allclose(self.rots[i], np.zeros(3), rtol=0.0, atol=1.0e-15)
| (self, i) |
37,645 | healpy.rotator | get_inverse | null | def get_inverse(self):
rots = self._rots[::-1]
coords = self._coords[::-1]
invs = [not i for i in self._invs[::-1]]
return Rotator(rot=rots, coord=coords, inv=invs, deg=False)
| (self) |
37,646 | healpy.rotator | rotate_alm | Rotate Alms with the transform defined in the Rotator object
see the docstring of the rotate_alm function defined
in the healpy package, this function **returns** the rotated alms,
does not rotate in place | def rotate_alm(self, alm, lmax=None, mmax=None, inplace=False):
"""Rotate Alms with the transform defined in the Rotator object
see the docstring of the rotate_alm function defined
in the healpy package, this function **returns** the rotated alms,
does not rotate in place"""
if not inplace:
rotated_alm = alm.copy() # rotate_alm works inplace
else:
rotated_alm = alm
rotate_alm(rotated_alm, matrix=self.mat, lmax=lmax, mmax=mmax)
if not inplace:
return rotated_alm
| (self, alm, lmax=None, mmax=None, inplace=False) |
37,647 | healpy.rotator | rotate_map_alms | Rotate a HEALPix map to a new reference frame in spherical harmonics space
This is generally the best strategy to rotate/change reference frame of maps.
If the input map is band-limited, i.e. it can be represented exactly by
a spherical harmonics transform under a specific lmax, the map rotation
will be invertible.
Parameters
----------
m : np.ndarray
Input map, single array is considered I, array with 3 rows:[I,Q,U]
other arguments : see map2alm
Returns
-------
m_rotated : np.ndarray
Map in the new reference frame
| def rotateVector(rotmat, vec, vy=None, vz=None, do_rot=True):
"""Rotate a vector (or a list of vectors) using the rotation matrix
given as first argument.
Parameters
----------
rotmat : float, array-like shape (3,3)
The rotation matrix
vec : float, scalar or array-like
The vector to transform (shape (3,) or (3,N)),
or x component (scalar or shape (N,)) if vy and vz are given
vy : float, scalar or array-like, optional
The y component of the vector (scalar or shape (N,))
vz : float, scalar or array-like, optional
The z component of the vector (scalar or shape (N,))
do_rot : bool, optional
if True, really perform the operation, if False do nothing.
Returns
-------
vec : float, array
The component of the rotated vector(s).
See Also
--------
Rotator
"""
if vy is None and vz is None:
if do_rot:
return np.tensordot(rotmat, vec, axes=(1, 0))
else:
return vec
elif vy is not None and vz is not None:
if do_rot:
return np.tensordot(rotmat, np.array([vec, vy, vz]), axes=(1, 0))
else:
return vec, vy, vz
else:
raise TypeError("You must give either vec only or vec, vy " "and vz parameters")
| (self, m, use_pixel_weights=True, lmax=None, mmax=None, datapath=None, verbose=None) |
37,648 | healpy.rotator | rotate_map_pixel | Rotate a HEALPix map to a new reference frame in pixel space
It is generally better to rotate in spherical harmonics space, see
the rotate_map_alms method. A case where pixel space rotation is
better is for heavily masked maps where the spherical harmonics
transform is not well defined.
This function first rotates the pixels centers of the new reference
frame to the original reference frame, then uses hp.get_interp_val
to interpolate bilinearly the pixel values, finally fixes Q and U
polarization by the modification to the psi angle caused by
the Rotator using Rotator.angle_ref.
Due to interpolation, this function generally suppresses the signal at
high angular scales.
Parameters
----------
m : np.ndarray
Input map, 1 map is considered I, 2 maps:[Q,U], 3 maps:[I,Q,U]
Returns
-------
m_rotated : np.ndarray
Map in the new reference frame
| def rotate_map_pixel(self, m):
"""Rotate a HEALPix map to a new reference frame in pixel space
It is generally better to rotate in spherical harmonics space, see
the rotate_map_alms method. A case where pixel space rotation is
better is for heavily masked maps where the spherical harmonics
transform is not well defined.
This function first rotates the pixels centers of the new reference
frame to the original reference frame, then uses hp.get_interp_val
to interpolate bilinearly the pixel values, finally fixes Q and U
polarization by the modification to the psi angle caused by
the Rotator using Rotator.angle_ref.
Due to interpolation, this function generally suppresses the signal at
high angular scales.
Parameters
----------
m : np.ndarray
Input map, 1 map is considered I, 2 maps:[Q,U], 3 maps:[I,Q,U]
Returns
-------
m_rotated : np.ndarray
Map in the new reference frame
"""
if pixelfunc.maptype(m) == 0: # a single map is converted to a list
m = [m]
npix = len(m[0])
nside = pixelfunc.npix2nside(npix)
theta_pix_center, phi_pix_center = pixelfunc.pix2ang(
nside=nside, ipix=np.arange(npix)
)
# Rotate the pixels center of the new reference frame to the original frame
theta_pix_center_rot, phi_pix_center_rot = self.I(
theta_pix_center, phi_pix_center
)
# Interpolate the original map to the pixels centers in the new ref frame
m_rotated = [
pixelfunc.get_interp_val(each, theta_pix_center_rot, phi_pix_center_rot)
for each in m
]
# Rotate polarization
if len(m_rotated) > 1:
# Create a complex map from QU and apply the rotation in psi due to the rotation
# Slice from the end of the array so that it works both for QU and IQU
L_map = (m_rotated[-2] + m_rotated[-1] * 1j) * np.exp(
1j * 2 * self.angle_ref(theta_pix_center_rot, phi_pix_center_rot)
)
# Overwrite the Q and U maps with the correct values
m_rotated[-2] = np.real(L_map)
m_rotated[-1] = np.imag(L_map)
else:
m_rotated = m_rotated[0]
return m_rotated
| (self, m) |
37,657 | healpy.sphtfunc | alm2cl | Computes (cross-)spectra from alm(s). If alm2 is given, cross-spectra between
alm and alm2 are computed. If alm (and alm2 if provided) contains n alm,
then n(n+1)/2 auto and cross-spectra are returned.
Parameters
----------
alm : complex, array or sequence of arrays
The alm from which to compute the power spectrum. If n>=2 arrays are given,
computes both auto- and cross-spectra.
alms2 : complex, array or sequence of 3 arrays, optional
If provided, computes cross-spectra between alm and alm2.
Default: alm2=alm, so auto-spectra are computed.
lmax : None or int, optional
The maximum l of the input alm. Default: computed from size of alm
and mmax_in
mmax : None or int, optional
The maximum m of the input alm. Default: assume mmax_in = lmax_in
lmax_out : None or int, optional
The maximum l of the returned spectra. By default: the lmax of the given
alm(s).
nspec : None or int, optional
The number of spectra to return. None means all, otherwise returns cl[:nspec]
Returns
-------
cl : array or tuple of n(n+1)/2 arrays
the spectrum <*alm* x *alm2*> if *alm* (and *alm2*) is one alm, or
the auto- and cross-spectra <*alm*[i] x *alm2*[j]> if alm (and alm2)
contains more than one spectra.
If more than one spectrum is returned, they are ordered by diagonal.
For example, if *alm* is almT, almE, almB, then the returned spectra are:
TT, EE, BB, TE, EB, TB.
| def alm2cl(alms1, alms2=None, lmax=None, mmax=None, lmax_out=None, nspec=None):
"""Computes (cross-)spectra from alm(s). If alm2 is given, cross-spectra between
alm and alm2 are computed. If alm (and alm2 if provided) contains n alm,
then n(n+1)/2 auto and cross-spectra are returned.
Parameters
----------
alm : complex, array or sequence of arrays
The alm from which to compute the power spectrum. If n>=2 arrays are given,
computes both auto- and cross-spectra.
alms2 : complex, array or sequence of 3 arrays, optional
If provided, computes cross-spectra between alm and alm2.
Default: alm2=alm, so auto-spectra are computed.
lmax : None or int, optional
The maximum l of the input alm. Default: computed from size of alm
and mmax_in
mmax : None or int, optional
The maximum m of the input alm. Default: assume mmax_in = lmax_in
lmax_out : None or int, optional
The maximum l of the returned spectra. By default: the lmax of the given
alm(s).
nspec : None or int, optional
The number of spectra to return. None means all, otherwise returns cl[:nspec]
Returns
-------
cl : array or tuple of n(n+1)/2 arrays
the spectrum <*alm* x *alm2*> if *alm* (and *alm2*) is one alm, or
the auto- and cross-spectra <*alm*[i] x *alm2*[j]> if alm (and alm2)
contains more than one spectra.
If more than one spectrum is returned, they are ordered by diagonal.
For example, if *alm* is almT, almE, almB, then the returned spectra are:
TT, EE, BB, TE, EB, TB.
"""
cls = _sphtools.alm2cl(alms1, alms2=alms2, lmax=lmax, mmax=mmax, lmax_out=lmax_out)
if nspec is None:
return np.array(cls)
else:
return np.array(cls[:nspec])
| (alms1, alms2=None, lmax=None, mmax=None, lmax_out=None, nspec=None) |
37,658 | healpy.sphtfunc | alm2map | Computes a Healpix map given the alm.
The alm are given as a complex array. You can specify lmax
and mmax, or they will be computed from array size (assuming
lmax==mmax).
Parameters
----------
alms : complex, array or sequence of arrays
A complex array or a sequence of complex arrays.
Each array must have a size of the form: mmax * (2 * lmax + 1 - mmax) / 2 + lmax + 1
nside : int, scalar
The nside of the output map.
lmax : None or int, scalar, optional
Explicitly define lmax (needed if mmax!=lmax)
mmax : None or int, scalar, optional
Explicitly define mmax (needed if mmax!=lmax)
pixwin : bool, optional
Smooth the alm using the pixel window functions. Default: False.
fwhm : float, scalar, optional
The fwhm of the Gaussian used to smooth the map (applied on alm)
[in radians]
sigma : float, scalar, optional
The sigma of the Gaussian used to smooth the map (applied on alm)
[in radians]
pol : bool, optional
If True, assumes input alms are TEB. Output will be TQU maps.
(input must be 1 or 3 alms)
If False, apply spin 0 harmonic transform to each alm.
(input can be any number of alms)
If there is only one input alm, it has no effect. Default: True.
inplace : bool, optional
If True, input alms may be modified by pixel window function and beam
smoothing (if alm(s) are complex128 contiguous arrays).
Otherwise, input alms are not modified. A copy is made if needed to
apply beam smoothing or pixel window.
Returns
-------
maps : array or list of arrays
A Healpix map in RING scheme at nside or a list of T,Q,U maps (if
polarized input)
Notes
-----
Running map2alm then alm2map will not return exactly the same map if the discretized field you construct on the sphere is not band-limited (for example, if you have a map containing pixel-based noise rather than beam-smoothed noise). If you need a band-limited map, you have to start with random numbers in lm space and transform these via alm2map. With such an input, the accuracy of map2alm->alm2map should be quite good, depending on your choices of lmax, mmax and nside (for some typical values, see e.g., section 5.1 of https://arxiv.org/pdf/1010.2084).
| def synalm(cls, lmax=None, mmax=None, new=False, verbose=True):
"""Generate a set of alm given cl.
The cl are given as a float array. Corresponding alm are generated.
If lmax is None, it is assumed lmax=cl.size-1
If mmax is None, it is assumed mmax=lmax.
Parameters
----------
cls : float, array or tuple of arrays
Either one cl (1D array) or a tuple of either 4 cl
or of n*(n+1)/2 cl.
Some of the cl may be None, implying no
cross-correlation. See *new* parameter.
lmax : int, scalar, optional
The lmax (if None or <0, the largest size-1 of cls)
mmax : int, scalar, optional
The mmax (if None or <0, =lmax)
new : bool, optional
If True, use the new ordering of cl's, ie by diagonal
(e.g. TT, EE, BB, TE, EB, TB or TT, EE, BB, TE if 4 cl as input).
If False, use the old ordering, ie by row
(e.g. TT, TE, TB, EE, EB, BB or TT, TE, EE, BB if 4 cl as input).
Returns
-------
alms : array or list of arrays
the generated alm if one spectrum is given, or a list of n alms
(with n(n+1)/2 the number of input cl, or n=3 if there are 4 input cl).
Notes
-----
We don't plan to change the default order anymore, that would break old
code in a way difficult to debug.
"""
if not cb.is_seq(cls):
raise TypeError("cls must be an array or a sequence of arrays")
if not cb.is_seq_of_seq(cls, True):
# Only one spectrum
if lmax is None or lmax < 0:
lmax = len(cls) - 1
if mmax is None or mmax < 0:
mmax = lmax
cls_list = [np.asarray(cls, dtype=np.float64)]
szalm = Alm.getsize(lmax, mmax)
alm = np.zeros(szalm, "D")
alm.real = np.random.standard_normal(szalm)
alm.imag = np.random.standard_normal(szalm)
alms_list = [alm]
sphtlib._synalm(cls_list, alms_list, lmax, mmax)
return alm
# From here, we interpret cls as a list of spectra
cls_list = list(cls)
maxsize = max([len(c) for c in cls if c is not None])
if lmax is None or lmax < 0:
lmax = maxsize - 1
if mmax is None or mmax < 0:
mmax = lmax
Nspec = sphtlib._getn(len(cls_list))
if Nspec <= 0:
if len(cls_list) == 4:
if new: ## new input order: TT EE BB TE -> TT EE BB TE 0 0
cls_list = [cls[0], cls[1], cls[2], cls[3], None, None]
else: ## old input order: TT TE EE BB -> TT TE 0 EE 0 BB
cls_list = [cls[0], cls[1], None, cls[2], None, cls[3]]
Nspec = 3
else:
raise TypeError(
"The sequence of arrays must have either 4 elements "
"or n(n+1)/2 elements (some may be None)"
)
szalm = Alm.getsize(lmax, mmax)
alms_list = []
for i in range(Nspec):
alm = np.zeros(szalm, "D")
alm.real = np.random.standard_normal(szalm)
alm.imag = np.random.standard_normal(szalm)
alms_list.append(alm)
if new: # new input order: input given by diagonal, should be given by row
cls_list = new_to_old_spectra_order(cls_list)
# ensure cls are float64
cls_list = [
(np.asarray(cl, dtype=np.float64) if cl is not None else None)
for cl in cls_list
]
sphtlib._synalm(cls_list, alms_list, lmax, mmax)
return np.array(alms_list)
| (alms, nside, lmax=None, mmax=None, pixwin=False, fwhm=0.0, sigma=None, pol=True, inplace=False, verbose=True) |
37,659 | healpy.sphtfunc | alm2map_der1 | Computes a Healpix map and its first derivatives given the alm.
The alm are given as a complex array. You can specify lmax
and mmax, or they will be computed from array size (assuming
lmax==mmax).
Parameters
----------
alm : array, complex
A complex array of alm. Size must be of the form mmax(lmax-mmax+1)/2+lmax
nside : int
The nside of the output map.
lmax : None or int, optional
Explicitly define lmax (needed if mmax!=lmax)
mmax : None or int, optional
Explicitly define mmax (needed if mmax!=lmax)
Returns
-------
m, d_theta, d_phi : tuple of arrays
The maps correponding to alm, and its derivatives with respect to
theta and phi. d_phi is already divided by sin(theta)
| def alm2map_der1(alm, nside, lmax=None, mmax=None):
"""Computes a Healpix map and its first derivatives given the alm.
The alm are given as a complex array. You can specify lmax
and mmax, or they will be computed from array size (assuming
lmax==mmax).
Parameters
----------
alm : array, complex
A complex array of alm. Size must be of the form mmax(lmax-mmax+1)/2+lmax
nside : int
The nside of the output map.
lmax : None or int, optional
Explicitly define lmax (needed if mmax!=lmax)
mmax : None or int, optional
Explicitly define mmax (needed if mmax!=lmax)
Returns
-------
m, d_theta, d_phi : tuple of arrays
The maps correponding to alm, and its derivatives with respect to
theta and phi. d_phi is already divided by sin(theta)
"""
check_max_nside(nside)
if lmax is None:
lmax = -1
if mmax is None:
mmax = -1
return np.array(sphtlib._alm2map_der1(alm, nside, lmax=lmax, mmax=mmax))
| (alm, nside, lmax=None, mmax=None) |
37,660 | healpy.sphtfunc | almxfl | Multiply alm by a function of l. The function is assumed
to be zero where not defined.
Parameters
----------
alm : array
The alm to multiply
fl : array
The function (at l=0..fl.size-1) by which alm must be multiplied.
mmax : None or int, optional
The maximum m defining the alm layout. Default: lmax.
inplace : bool, optional
If True, modify the given alm, otherwise make a copy before multiplying.
Returns
-------
alm : array
The modified alm, either a new array or a reference to input alm,
if inplace is True.
| def almxfl(alm, fl, mmax=None, inplace=False):
"""Multiply alm by a function of l. The function is assumed
to be zero where not defined.
Parameters
----------
alm : array
The alm to multiply
fl : array
The function (at l=0..fl.size-1) by which alm must be multiplied.
mmax : None or int, optional
The maximum m defining the alm layout. Default: lmax.
inplace : bool, optional
If True, modify the given alm, otherwise make a copy before multiplying.
Returns
-------
alm : array
The modified alm, either a new array or a reference to input alm,
if inplace is True.
"""
# FIXME: Should handle multidimensional input
almout = _sphtools.almxfl(alm, fl, mmax=mmax, inplace=inplace)
return almout
| (alm, fl, mmax=None, inplace=False) |
37,661 | healpy.sphtfunc | anafast | Computes the power spectrum of a Healpix map, or the cross-spectrum
between two maps if *map2* is given.
No removal of monopole or dipole is performed. The input maps must be
in ring-ordering.
Spherical harmonics transforms in HEALPix are always on the full sky,
if the map is masked, those pixels are set to 0. It is recommended to
remove monopole from the map before running `anafast` to reduce
boundary effects.
For recommendations about how to set `lmax`, `iter`, and weights, see the
`Anafast documentation <https://healpix.sourceforge.io/html/fac_anafast.htm>`_
Parameters
----------
map1 : float, array-like shape (Npix,) or (3, Npix)
Either an array representing a map, or a sequence of 3 arrays
representing I, Q, U maps. Must be in ring ordering.
map2 : float, array-like shape (Npix,) or (3, Npix)
Either an array representing a map, or a sequence of 3 arrays
representing I, Q, U maps. Must be in ring ordering.
nspec : None or int, optional
The number of spectra to return. If None, returns all, otherwise
returns cls[:nspec]
lmax : int, scalar, optional
Maximum l of the power spectrum (default: 3*nside-1)
mmax : int, scalar, optional
Maximum m of the alm (default: lmax)
iter : int, scalar, optional
Number of iteration (default: 3)
alm : bool, scalar, optional
If True, returns both cl and alm, otherwise only cl is returned
pol : bool, optional
If True, assumes input maps are TQU. Output will be TEB cl's and
correlations (input must be 1 or 3 maps).
If False, maps are assumed to be described by spin 0 spherical harmonics.
(input can be any number of maps)
If there is only one input map, it has no effect. Default: True.
datapath : None or str, optional
If given, the directory where to find the weights data.
See the docstring of `map2alm` for details on how to set it up
gal_cut : float [degrees]
pixels at latitude in [-gal_cut;+gal_cut] are not taken into account
use_pixel_weights: bool, optional
If True, use pixel by pixel weighting, healpy will automatically download the weights, if needed
See the map2alm docs for details about weighting
Returns
-------
res : array or sequence of arrays
If *alm* is False, returns cl or a list of cl's (TT, EE, BB, TE, EB, TB for
polarized input map)
Otherwise, returns a tuple (cl, alm), where cl is as above and
alm is the spherical harmonic transform or a list of almT, almE, almB
for polarized input
| def anafast(
map1,
map2=None,
nspec=None,
lmax=None,
mmax=None,
iter=3,
alm=False,
pol=True,
use_weights=False,
datapath=None,
gal_cut=0,
use_pixel_weights=False,
):
"""Computes the power spectrum of a Healpix map, or the cross-spectrum
between two maps if *map2* is given.
No removal of monopole or dipole is performed. The input maps must be
in ring-ordering.
Spherical harmonics transforms in HEALPix are always on the full sky,
if the map is masked, those pixels are set to 0. It is recommended to
remove monopole from the map before running `anafast` to reduce
boundary effects.
For recommendations about how to set `lmax`, `iter`, and weights, see the
`Anafast documentation <https://healpix.sourceforge.io/html/fac_anafast.htm>`_
Parameters
----------
map1 : float, array-like shape (Npix,) or (3, Npix)
Either an array representing a map, or a sequence of 3 arrays
representing I, Q, U maps. Must be in ring ordering.
map2 : float, array-like shape (Npix,) or (3, Npix)
Either an array representing a map, or a sequence of 3 arrays
representing I, Q, U maps. Must be in ring ordering.
nspec : None or int, optional
The number of spectra to return. If None, returns all, otherwise
returns cls[:nspec]
lmax : int, scalar, optional
Maximum l of the power spectrum (default: 3*nside-1)
mmax : int, scalar, optional
Maximum m of the alm (default: lmax)
iter : int, scalar, optional
Number of iteration (default: 3)
alm : bool, scalar, optional
If True, returns both cl and alm, otherwise only cl is returned
pol : bool, optional
If True, assumes input maps are TQU. Output will be TEB cl's and
correlations (input must be 1 or 3 maps).
If False, maps are assumed to be described by spin 0 spherical harmonics.
(input can be any number of maps)
If there is only one input map, it has no effect. Default: True.
datapath : None or str, optional
If given, the directory where to find the weights data.
See the docstring of `map2alm` for details on how to set it up
gal_cut : float [degrees]
pixels at latitude in [-gal_cut;+gal_cut] are not taken into account
use_pixel_weights: bool, optional
If True, use pixel by pixel weighting, healpy will automatically download the weights, if needed
See the map2alm docs for details about weighting
Returns
-------
res : array or sequence of arrays
If *alm* is False, returns cl or a list of cl's (TT, EE, BB, TE, EB, TB for
polarized input map)
Otherwise, returns a tuple (cl, alm), where cl is as above and
alm is the spherical harmonic transform or a list of almT, almE, almB
for polarized input
"""
map1 = ma_to_array(map1)
alms1 = map2alm(
map1,
lmax=lmax,
mmax=mmax,
pol=pol,
iter=iter,
use_weights=use_weights,
datapath=datapath,
gal_cut=gal_cut,
use_pixel_weights=use_pixel_weights,
)
if map2 is not None:
map2 = ma_to_array(map2)
alms2 = map2alm(
map2,
lmax=lmax,
mmax=mmax,
pol=pol,
iter=iter,
use_weights=use_weights,
datapath=datapath,
gal_cut=gal_cut,
use_pixel_weights=use_pixel_weights,
)
else:
alms2 = None
cls = alm2cl(alms1, alms2=alms2, lmax=lmax, mmax=mmax, lmax_out=lmax, nspec=nspec)
if alm:
if map2 is not None:
return (cls, alms1, alms2)
else:
return (cls, alms1)
else:
return cls
| (map1, map2=None, nspec=None, lmax=None, mmax=None, iter=3, alm=False, pol=True, use_weights=False, datapath=None, gal_cut=0, use_pixel_weights=False) |
37,662 | healpy.pixelfunc | ang2pix | ang2pix : nside,theta[rad],phi[rad],nest=False,lonlat=False -> ipix (default:RING)
Parameters
----------
nside : int, scalar or array-like
The healpix nside parameter, must be a power of 2, less than 2**30
theta, phi : float, scalars or array-like
Angular coordinates of a point on the sphere
nest : bool, optional
if True, assume NESTED pixel ordering, otherwise, RING pixel ordering
lonlat : bool
If True, input angles are assumed to be longitude and latitude in degree,
otherwise, they are co-latitude and longitude in radians.
Returns
-------
pix : int or array of int
The healpix pixel numbers. Scalar if all input are scalar, array otherwise.
Usual numpy broadcasting rules apply.
See Also
--------
pix2ang, pix2vec, vec2pix
Examples
--------
Note that some of the test inputs below that are on pixel boundaries
such as theta=pi/2, phi=pi/2, have a tiny value of 1e-15 added to them
to make them reproducible on i386 machines using x87 floating point
instruction set (see https://github.com/healpy/healpy/issues/528).
>>> import healpy as hp
>>> hp.ang2pix(16, np.pi/2, 0)
1440
>>> print(hp.ang2pix(16, [np.pi/2, np.pi/4, np.pi/2, 0, np.pi], [0., np.pi/4, np.pi/2 + 1e-15, 0, 0]))
[1440 427 1520 0 3068]
>>> print(hp.ang2pix(16, np.pi/2, [0, np.pi/2 + 1e-15]))
[1440 1520]
>>> print(hp.ang2pix([1, 2, 4, 8, 16], np.pi/2, 0))
[ 4 12 72 336 1440]
>>> print(hp.ang2pix([1, 2, 4, 8, 16], 0, 0, lonlat=True))
[ 4 12 72 336 1440]
| def ang2pix(nside, theta, phi, nest=False, lonlat=False):
"""ang2pix : nside,theta[rad],phi[rad],nest=False,lonlat=False -> ipix (default:RING)
Parameters
----------
nside : int, scalar or array-like
The healpix nside parameter, must be a power of 2, less than 2**30
theta, phi : float, scalars or array-like
Angular coordinates of a point on the sphere
nest : bool, optional
if True, assume NESTED pixel ordering, otherwise, RING pixel ordering
lonlat : bool
If True, input angles are assumed to be longitude and latitude in degree,
otherwise, they are co-latitude and longitude in radians.
Returns
-------
pix : int or array of int
The healpix pixel numbers. Scalar if all input are scalar, array otherwise.
Usual numpy broadcasting rules apply.
See Also
--------
pix2ang, pix2vec, vec2pix
Examples
--------
Note that some of the test inputs below that are on pixel boundaries
such as theta=pi/2, phi=pi/2, have a tiny value of 1e-15 added to them
to make them reproducible on i386 machines using x87 floating point
instruction set (see https://github.com/healpy/healpy/issues/528).
>>> import healpy as hp
>>> hp.ang2pix(16, np.pi/2, 0)
1440
>>> print(hp.ang2pix(16, [np.pi/2, np.pi/4, np.pi/2, 0, np.pi], [0., np.pi/4, np.pi/2 + 1e-15, 0, 0]))
[1440 427 1520 0 3068]
>>> print(hp.ang2pix(16, np.pi/2, [0, np.pi/2 + 1e-15]))
[1440 1520]
>>> print(hp.ang2pix([1, 2, 4, 8, 16], np.pi/2, 0))
[ 4 12 72 336 1440]
>>> print(hp.ang2pix([1, 2, 4, 8, 16], 0, 0, lonlat=True))
[ 4 12 72 336 1440]
"""
check_nside(nside, nest=nest)
if lonlat:
theta, phi = lonlat2thetaphi(theta, phi)
check_theta_valid(theta)
check_nside(nside, nest=nest)
if nest:
return pixlib._ang2pix_nest(nside, theta, phi)
else:
return pixlib._ang2pix_ring(nside, theta, phi)
| (nside, theta, phi, nest=False, lonlat=False) |
37,663 | healpy.pixelfunc | ang2vec | ang2vec : convert angles to 3D position vector
Parameters
----------
theta : float, scalar or arry-like
colatitude in radians measured southward from north pole (in [0,pi]).
phi : float, scalar or array-like
longitude in radians measured eastward (in [0, 2*pi]).
lonlat : bool
If True, input angles are assumed to be longitude and latitude in degree,
otherwise, they are co-latitude and longitude in radians.
Returns
-------
vec : float, array
if theta and phi are vectors, the result is a 2D array with a vector per row
otherwise, it is a 1D array of shape (3,)
See Also
--------
vec2ang, rotator.dir2vec, rotator.vec2dir
| def ang2vec(theta, phi, lonlat=False):
"""ang2vec : convert angles to 3D position vector
Parameters
----------
theta : float, scalar or arry-like
colatitude in radians measured southward from north pole (in [0,pi]).
phi : float, scalar or array-like
longitude in radians measured eastward (in [0, 2*pi]).
lonlat : bool
If True, input angles are assumed to be longitude and latitude in degree,
otherwise, they are co-latitude and longitude in radians.
Returns
-------
vec : float, array
if theta and phi are vectors, the result is a 2D array with a vector per row
otherwise, it is a 1D array of shape (3,)
See Also
--------
vec2ang, rotator.dir2vec, rotator.vec2dir
"""
if lonlat:
theta, phi = lonlat2thetaphi(theta, phi)
check_theta_valid(theta)
sintheta = np.sin(theta)
return np.array([sintheta * np.cos(phi), sintheta * np.sin(phi), np.cos(theta)]).T
| (theta, phi, lonlat=False) |
37,664 | healpy.visufunc | azeqview | Plot a healpix map (given as an array) in Azimuthal equidistant projection
or Lambert azimuthal equal-area projection.
Parameters
----------
map : float, array-like or None
An array containing the map,
supports masked maps, see the `ma` function.
If None, will display a blank map, useful for overplotting.
fig : int or None, optional
The figure number to use. Default: create a new figure
rot : scalar or sequence, optional
Describe the rotation to apply.
In the form (lon, lat, psi) (unit: degrees) : the point at
longitude *lon* and latitude *lat* will be at the center. An additional rotation
of angle *psi* around this direction is applied.
coord : sequence of character, optional
Either one of 'G', 'E' or 'C' to describe the coordinate
system of the map, or a sequence of 2 of these to rotate
the map from the first to the second coordinate system.
unit : str, optional
A text describing the unit of the data. Default: ''
xsize : int, optional
The size of the image. Default: 800
ysize : None or int, optional
The size of the image. Default: None= xsize
reso : float, optional
Resolution (in arcmin). Default: 1.5 arcmin
lamb : bool, optional
If True, plot Lambert azimuthal equal area instead of azimuthal
equidistant. Default: False (az equidistant)
half_sky : bool, optional
Plot only one side of the sphere. Default: False
title : str, optional
The title of the plot. Default: 'Azimuthal equidistant view'
or 'Lambert azimuthal equal-area view' (if lamb is True)
nest : bool, optional
If True, ordering scheme is NESTED. Default: False (RING)
min : float, optional
The minimum range value
max : float, optional
The maximum range value
flip : {'astro', 'geo'}, optional
Defines the convention of projection : 'astro' (default, east towards left, west towards right)
or 'geo' (east towards roght, west towards left)
remove_dip : bool, optional
If :const:`True`, remove the dipole+monopole
remove_mono : bool, optional
If :const:`True`, remove the monopole
gal_cut : float, scalar, optional
Symmetric galactic cut for the dipole/monopole fit.
Removes points in latitude range [-gal_cut, +gal_cut]
format : str, optional
The format of the scale label. Default: '%g'
cbar : bool, optional
Display the colorbar. Default: True
notext : bool, optional
If True, no text is printed around the map
norm : {'hist', 'log', None}
Color normalization, hist= histogram equalized color mapping,
log= logarithmic color mapping, default: None (linear color mapping)
cmap : a color map
The colormap to use (see matplotlib.cm)
badcolor : str
Color to use to plot bad values
bgcolor : str
Color to use for background
hold : bool, optional
If True, replace the current Axes by an Equidistant AzimuthalAxes.
use this if you want to have multiple maps on the same
figure. Default: False
sub : int, scalar or sequence, optional
Use only a zone of the current figure (same syntax as subplot).
Default: None
reuse_axes : bool, optional
If True, reuse the current Axes (should be a AzimuthalAxes). This is
useful if you want to overplot with a partially transparent colormap,
such as for plotting a line integral convolution. Default: False
margins : None or sequence, optional
Either None, or a sequence (left,bottom,right,top)
giving the margins on left,bottom,right and top
of the axes. Values are relative to figure (0-1).
Default: None
return_projected_map : bool
if True returns the projected map in a 2d numpy array
alpha : float, array-like or None
An array containing the alpha channel, supports masked maps, see the `ma` function.
If None, no transparency will be applied.
See Also
--------
mollview, gnomview, cartview, orthview
| def azeqview(
map=None,
fig=None,
rot=None,
zat=None,
coord=None,
unit="",
xsize=800,
ysize=None,
reso=1.5,
lamb=False,
half_sky=False,
title=None,
nest=False,
remove_dip=False,
remove_mono=False,
gal_cut=0,
min=None,
max=None,
flip="astro",
format="%.3g",
cbar=True,
cmap=None,
badcolor="gray",
bgcolor="white",
norm=None,
aspect=None,
hold=False,
sub=None,
reuse_axes=False,
margins=None,
notext=False,
return_projected_map=False,
alpha=None,
):
"""Plot a healpix map (given as an array) in Azimuthal equidistant projection
or Lambert azimuthal equal-area projection.
Parameters
----------
map : float, array-like or None
An array containing the map,
supports masked maps, see the `ma` function.
If None, will display a blank map, useful for overplotting.
fig : int or None, optional
The figure number to use. Default: create a new figure
rot : scalar or sequence, optional
Describe the rotation to apply.
In the form (lon, lat, psi) (unit: degrees) : the point at
longitude *lon* and latitude *lat* will be at the center. An additional rotation
of angle *psi* around this direction is applied.
coord : sequence of character, optional
Either one of 'G', 'E' or 'C' to describe the coordinate
system of the map, or a sequence of 2 of these to rotate
the map from the first to the second coordinate system.
unit : str, optional
A text describing the unit of the data. Default: ''
xsize : int, optional
The size of the image. Default: 800
ysize : None or int, optional
The size of the image. Default: None= xsize
reso : float, optional
Resolution (in arcmin). Default: 1.5 arcmin
lamb : bool, optional
If True, plot Lambert azimuthal equal area instead of azimuthal
equidistant. Default: False (az equidistant)
half_sky : bool, optional
Plot only one side of the sphere. Default: False
title : str, optional
The title of the plot. Default: 'Azimuthal equidistant view'
or 'Lambert azimuthal equal-area view' (if lamb is True)
nest : bool, optional
If True, ordering scheme is NESTED. Default: False (RING)
min : float, optional
The minimum range value
max : float, optional
The maximum range value
flip : {'astro', 'geo'}, optional
Defines the convention of projection : 'astro' (default, east towards left, west towards right)
or 'geo' (east towards roght, west towards left)
remove_dip : bool, optional
If :const:`True`, remove the dipole+monopole
remove_mono : bool, optional
If :const:`True`, remove the monopole
gal_cut : float, scalar, optional
Symmetric galactic cut for the dipole/monopole fit.
Removes points in latitude range [-gal_cut, +gal_cut]
format : str, optional
The format of the scale label. Default: '%g'
cbar : bool, optional
Display the colorbar. Default: True
notext : bool, optional
If True, no text is printed around the map
norm : {'hist', 'log', None}
Color normalization, hist= histogram equalized color mapping,
log= logarithmic color mapping, default: None (linear color mapping)
cmap : a color map
The colormap to use (see matplotlib.cm)
badcolor : str
Color to use to plot bad values
bgcolor : str
Color to use for background
hold : bool, optional
If True, replace the current Axes by an Equidistant AzimuthalAxes.
use this if you want to have multiple maps on the same
figure. Default: False
sub : int, scalar or sequence, optional
Use only a zone of the current figure (same syntax as subplot).
Default: None
reuse_axes : bool, optional
If True, reuse the current Axes (should be a AzimuthalAxes). This is
useful if you want to overplot with a partially transparent colormap,
such as for plotting a line integral convolution. Default: False
margins : None or sequence, optional
Either None, or a sequence (left,bottom,right,top)
giving the margins on left,bottom,right and top
of the axes. Values are relative to figure (0-1).
Default: None
return_projected_map : bool
if True returns the projected map in a 2d numpy array
alpha : float, array-like or None
An array containing the alpha channel, supports masked maps, see the `ma` function.
If None, no transparency will be applied.
See Also
--------
mollview, gnomview, cartview, orthview
"""
# Create the figure
import pylab
if map is None:
map = np.zeros(12) + np.inf
cbar = False
# Ensure that the nside is valid
nside = pixelfunc.get_nside(map)
pixelfunc.check_nside(nside, nest=nest)
if not (hold or sub or reuse_axes):
f = pylab.figure(fig, figsize=(8.5, 5.4))
if not margins:
margins = (0.02, 0.05, 0.02, 0.05)
extent = (0.0, 0.0, 1.0, 1.0)
elif hold:
f = pylab.gcf()
left, bottom, right, top = np.array(f.gca().get_position()).ravel()
if not margins:
margins = (0.0, 0.0, 0.0, 0.0)
extent = (left, bottom, right - left, top - bottom)
f.delaxes(f.gca())
elif reuse_axes:
f = pylab.gcf()
else: # using subplot syntax
f = pylab.gcf()
if hasattr(sub, "__len__"):
nrows, ncols, idx = sub
else:
nrows, ncols, idx = sub // 100, (sub % 100) // 10, (sub % 10)
if idx < 1 or idx > ncols * nrows:
raise ValueError("Wrong values for sub: %d, %d, %d" % (nrows, ncols, idx))
c, r = (idx - 1) % ncols, (idx - 1) // ncols
if not margins:
margins = (0.01, 0.0, 0.0, 0.02)
extent = (
c * 1.0 / ncols,
1.0 - (r + 1) * 1.0 / nrows,
1.0 / ncols,
1.0 / nrows,
)
if not reuse_axes:
extent = (
extent[0] + margins[0],
extent[1] + margins[1],
extent[2] - margins[2] - margins[0],
extent[3] - margins[3] - margins[1],
)
# Starting to draw : turn interactive off
wasinteractive = pylab.isinteractive()
pylab.ioff()
try:
if reuse_axes:
ax = f.gca()
else:
ax = PA.HpxAzimuthalAxes(
f, extent, coord=coord, rot=rot, format=format, flipconv=flip
)
f.add_axes(ax)
if remove_dip:
map = pixelfunc.remove_dipole(
map, gal_cut=gal_cut, nest=nest, copy=True
)
elif remove_mono:
map = pixelfunc.remove_monopole(
map, gal_cut=gal_cut, nest=nest, copy=True
)
img = ax.projmap(
map,
nest=nest,
xsize=xsize,
ysize=ysize,
reso=reso,
lamb=lamb,
half_sky=half_sky,
coord=coord,
vmin=min,
vmax=max,
cmap=cmap,
badcolor=badcolor,
bgcolor=bgcolor,
norm=norm,
alpha=alpha,
)
if cbar:
im = ax.get_images()[0]
b = im.norm.inverse(np.linspace(0, 1, im.cmap.N + 1))
v = np.linspace(im.norm.vmin, im.norm.vmax, im.cmap.N)
mappable = plt.cm.ScalarMappable(
norm=matplotlib.colors.Normalize(vmin=im.norm.vmin, vmax=im.norm.vmax),
cmap=cmap,
)
if matplotlib.__version__ >= "0.91.0":
cb = f.colorbar(
mappable,
ax=ax,
orientation="horizontal",
shrink=0.5,
aspect=25,
ticks=PA.BoundaryLocator(),
pad=0.05,
fraction=0.1,
boundaries=b,
values=v,
format=format,
)
else:
# for older matplotlib versions, no ax kwarg
cb = f.colorbar(
mappable,
orientation="horizontal",
shrink=0.5,
aspect=25,
ticks=PA.BoundaryLocator(),
pad=0.05,
fraction=0.1,
boundaries=b,
values=v,
format=format,
)
cb.solids.set_rasterized(True)
if title is None:
if lamb:
title = "Lambert azimuthal equal-area view"
else:
title = "Azimuthal equidistant view"
ax.set_title(title)
if not notext:
ax.text(
0.86,
0.05,
ax.proj.coordsysstr,
fontsize=14,
fontweight="bold",
transform=ax.transAxes,
)
if cbar:
cb.ax.text(
0.5,
-1.0,
unit,
fontsize=14,
transform=cb.ax.transAxes,
ha="center",
va="center",
)
f.sca(ax)
finally:
pylab.draw()
if wasinteractive:
pylab.ion()
# pylab.show()
if return_projected_map:
return img
| (map=None, fig=None, rot=None, zat=None, coord=None, unit='', xsize=800, ysize=None, reso=1.5, lamb=False, half_sky=False, title=None, nest=False, remove_dip=False, remove_mono=False, gal_cut=0, min=None, max=None, flip='astro', format='%.3g', cbar=True, cmap=None, badcolor='gray', bgcolor='white', norm=None, aspect=None, hold=False, sub=None, reuse_axes=False, margins=None, notext=False, return_projected_map=False, alpha=None) |
37,665 | healpy.sphtfunc | beam2bl | Computes a transfer (or window) function b(l) in spherical
harmonic space from its circular beam profile b(theta) in real
space.
Parameters
----------
beam : array
Circular beam profile b(theta).
theta : array
Radius at which the beam profile is given. Has to be given
in radians with same size as beam.
lmax : integer
Maximum multipole moment at which to compute b(l).
Returns
-------
bl : array
Beam window function b(l).
| def beam2bl(beam, theta, lmax):
"""Computes a transfer (or window) function b(l) in spherical
harmonic space from its circular beam profile b(theta) in real
space.
Parameters
----------
beam : array
Circular beam profile b(theta).
theta : array
Radius at which the beam profile is given. Has to be given
in radians with same size as beam.
lmax : integer
Maximum multipole moment at which to compute b(l).
Returns
-------
bl : array
Beam window function b(l).
"""
nx = len(theta)
nb = len(beam)
if nb != nx:
raise ValueError("Beam and theta must have same size!")
x = np.cos(theta)
st = np.sin(theta)
window = np.zeros(lmax + 1)
p0 = np.ones(nx)
p1 = np.copy(x)
window[0] = trapz(beam * p0 * st, theta)
window[1] = trapz(beam * p1 * st, theta)
for l in np.arange(2, lmax + 1):
p2 = x * p1 * (2 * l - 1) / l - p0 * (l - 1) / l
window[l] = trapz(beam * p2 * st, theta)
p0 = p1
p1 = p2
window *= 2 * np.pi
return window
| (beam, theta, lmax) |
37,666 | healpy.sphtfunc | bl2beam | Computes a circular beam profile b(theta) in real space from
its transfer (or window) function b(l) in spherical harmonic space.
Parameters
----------
bl : array
Window function b(l) of the beam.
theta : array
Radius at which the beam profile will be computed.
Has to be given in radians.
Returns
-------
beam : array
(Circular) beam profile b(theta).
| def bl2beam(bl, theta):
"""Computes a circular beam profile b(theta) in real space from
its transfer (or window) function b(l) in spherical harmonic space.
Parameters
----------
bl : array
Window function b(l) of the beam.
theta : array
Radius at which the beam profile will be computed.
Has to be given in radians.
Returns
-------
beam : array
(Circular) beam profile b(theta).
"""
lmax = len(bl) - 1
nx = len(theta)
x = np.cos(theta)
p0 = np.zeros(nx) + 1
p1 = x
beam = bl[0] * p0 + bl[1] * p1 * 3
for l in np.arange(2, lmax + 1):
p2 = x * p1 * (2 * l - 1) / l - p0 * (l - 1) / l
p0 = p1
p1 = p2
beam += bl[l] * p2 * (2 * l + 1)
beam /= 4 * np.pi
return beam
| (bl, theta) |
37,667 | healpy.sphtfunc | blm_gauss | Computes spherical harmonic coefficients of a circular Gaussian beam
pointing towards the North Pole
See an example of usage
`in the documentation <https://healpy.readthedocs.io/en/latest/blm_gauss_plot.html>`_
Parameters
----------
fwhm : float, scalar
desired FWHM of the beam, in radians
lmax : int, scalar
maximum l multipole moment to compute
pol : bool, scalar
if True, E and B coefficients will also be computed
Returns
-------
blm : array with dtype numpy.complex128
lmax will be as specified
mmax is 0 for pol==False, else 2
| def blm_gauss(fwhm, lmax, pol=False):
"""Computes spherical harmonic coefficients of a circular Gaussian beam
pointing towards the North Pole
See an example of usage
`in the documentation <https://healpy.readthedocs.io/en/latest/blm_gauss_plot.html>`_
Parameters
----------
fwhm : float, scalar
desired FWHM of the beam, in radians
lmax : int, scalar
maximum l multipole moment to compute
pol : bool, scalar
if True, E and B coefficients will also be computed
Returns
-------
blm : array with dtype numpy.complex128
lmax will be as specified
mmax is 0 for pol==False, else 2
"""
fwhm = float(fwhm)
lmax = int(lmax)
pol = bool(pol)
mmax = 2 if pol else 0
ncomp = 3 if pol else 1
nval = Alm.getsize(lmax, mmax)
if mmax > lmax:
raise ValueError("lmax value too small")
blm = np.zeros((ncomp, nval), dtype=np.complex128)
sigmasq = fwhm * fwhm / (8 * np.log(2.0))
for l in range(0, lmax + 1):
blm[0, Alm.getidx(lmax, l, 0)] = np.sqrt((2 * l + 1) / (4.0 * np.pi)) * np.exp(
-0.5 * sigmasq * l * l
)
if pol:
for l in range(2, lmax + 1):
blm[1, Alm.getidx(lmax, l, 2)] = np.sqrt(
(2 * l + 1) / (32 * np.pi)
) * np.exp(-0.5 * sigmasq * l * l)
blm[2] = 1j * blm[1]
return blm
| (fwhm, lmax, pol=False) |
37,668 | healpy.visufunc | cartview | Plot a healpix map (given as an array) in Cartesian projection.
Parameters
----------
map : float, array-like or None
An array containing the map,
supports masked maps, see the `ma` function.
If None, will display a blank map, useful for overplotting.
fig : int or None, optional
The figure number to use. Default: create a new figure
rot : scalar or sequence, optional
Describe the rotation to apply.
In the form (lon, lat, psi) (unit: degrees) : the point at
longitude *lon* and latitude *lat* will be at the center. An additional rotation
of angle *psi* around this direction is applied.
coord : sequence of character, optional
Either one of 'G', 'E' or 'C' to describe the coordinate
system of the map, or a sequence of 2 of these to rotate
the map from the first to the second coordinate system.
unit : str, optional
A text describing the unit of the data. Default: ''
xsize : int, optional
The size of the image. Default: 800
lonra : sequence, optional
Range in longitude. Default: [-180,180]
latra : sequence, optional
Range in latitude. Default: [-90,90]
title : str, optional
The title of the plot. Default: 'Mollweide view'
nest : bool, optional
If True, ordering scheme is NESTED. Default: False (RING)
min : float, optional
The minimum range value
max : float, optional
The maximum range value
flip : {'astro', 'geo'}, optional
Defines the convention of projection : 'astro' (default, east towards left, west towards right)
or 'geo' (east towards roght, west towards left)
remove_dip : bool, optional
If :const:`True`, remove the dipole+monopole
remove_mono : bool, optional
If :const:`True`, remove the monopole
gal_cut : float, scalar, optional
Symmetric galactic cut for the dipole/monopole fit.
Removes points in latitude range [-gal_cut, +gal_cut]
format : str, optional
The format of the scale label. Default: '%g'
cbar : bool, optional
Display the colorbar. Default: True
notext : bool, optional
If True, no text is printed around the map
norm : {'hist', 'log', None}, optional
Color normalization, hist= histogram equalized color mapping,
log= logarithmic color mapping, default: None (linear color mapping)
cmap : a color map
The colormap to use (see matplotlib.cm)
badcolor : str
Color to use to plot bad values
bgcolor : str
Color to use for background
hold : bool, optional
If True, replace the current Axes by a CartesianAxes.
use this if you want to have multiple maps on the same
figure. Default: False
sub : int, scalar or sequence, optional
Use only a zone of the current figure (same syntax as subplot).
Default: None
reuse_axes : bool, optional
If True, reuse the current Axes (should be a CartesianAxes). This is
useful if you want to overplot with a partially transparent colormap,
such as for plotting a line integral convolution. Default: False
margins : None or sequence, optional
Either None, or a sequence (left,bottom,right,top)
giving the margins on left,bottom,right and top
of the axes. Values are relative to figure (0-1).
Default: None
return_projected_map : bool
if True returns the projected map in a 2d numpy array
alpha : float, array-like or None
An array containing the alpha channel, supports masked maps, see the `ma` function.
If None, no transparency will be applied.
See Also
--------
mollview, gnomview, orthview, azeqview
| def cartview(
map=None,
fig=None,
rot=None,
zat=None,
coord=None,
unit="",
xsize=800,
ysize=None,
lonra=None,
latra=None,
title="Cartesian view",
nest=False,
remove_dip=False,
remove_mono=False,
gal_cut=0,
min=None,
max=None,
flip="astro",
format="%.3g",
cbar=True,
cmap=None,
badcolor="gray",
bgcolor="white",
norm=None,
aspect=None,
hold=False,
sub=None,
reuse_axes=False,
margins=None,
notext=False,
return_projected_map=False,
alpha=None,
):
"""Plot a healpix map (given as an array) in Cartesian projection.
Parameters
----------
map : float, array-like or None
An array containing the map,
supports masked maps, see the `ma` function.
If None, will display a blank map, useful for overplotting.
fig : int or None, optional
The figure number to use. Default: create a new figure
rot : scalar or sequence, optional
Describe the rotation to apply.
In the form (lon, lat, psi) (unit: degrees) : the point at
longitude *lon* and latitude *lat* will be at the center. An additional rotation
of angle *psi* around this direction is applied.
coord : sequence of character, optional
Either one of 'G', 'E' or 'C' to describe the coordinate
system of the map, or a sequence of 2 of these to rotate
the map from the first to the second coordinate system.
unit : str, optional
A text describing the unit of the data. Default: ''
xsize : int, optional
The size of the image. Default: 800
lonra : sequence, optional
Range in longitude. Default: [-180,180]
latra : sequence, optional
Range in latitude. Default: [-90,90]
title : str, optional
The title of the plot. Default: 'Mollweide view'
nest : bool, optional
If True, ordering scheme is NESTED. Default: False (RING)
min : float, optional
The minimum range value
max : float, optional
The maximum range value
flip : {'astro', 'geo'}, optional
Defines the convention of projection : 'astro' (default, east towards left, west towards right)
or 'geo' (east towards roght, west towards left)
remove_dip : bool, optional
If :const:`True`, remove the dipole+monopole
remove_mono : bool, optional
If :const:`True`, remove the monopole
gal_cut : float, scalar, optional
Symmetric galactic cut for the dipole/monopole fit.
Removes points in latitude range [-gal_cut, +gal_cut]
format : str, optional
The format of the scale label. Default: '%g'
cbar : bool, optional
Display the colorbar. Default: True
notext : bool, optional
If True, no text is printed around the map
norm : {'hist', 'log', None}, optional
Color normalization, hist= histogram equalized color mapping,
log= logarithmic color mapping, default: None (linear color mapping)
cmap : a color map
The colormap to use (see matplotlib.cm)
badcolor : str
Color to use to plot bad values
bgcolor : str
Color to use for background
hold : bool, optional
If True, replace the current Axes by a CartesianAxes.
use this if you want to have multiple maps on the same
figure. Default: False
sub : int, scalar or sequence, optional
Use only a zone of the current figure (same syntax as subplot).
Default: None
reuse_axes : bool, optional
If True, reuse the current Axes (should be a CartesianAxes). This is
useful if you want to overplot with a partially transparent colormap,
such as for plotting a line integral convolution. Default: False
margins : None or sequence, optional
Either None, or a sequence (left,bottom,right,top)
giving the margins on left,bottom,right and top
of the axes. Values are relative to figure (0-1).
Default: None
return_projected_map : bool
if True returns the projected map in a 2d numpy array
alpha : float, array-like or None
An array containing the alpha channel, supports masked maps, see the `ma` function.
If None, no transparency will be applied.
See Also
--------
mollview, gnomview, orthview, azeqview
"""
import pylab
if map is None:
map = np.zeros(12) + np.inf
cbar = False
# Ensure that the nside is valid
nside = pixelfunc.get_nside(map)
pixelfunc.check_nside(nside, nest=nest)
if not (hold or sub or reuse_axes):
f = pylab.figure(fig, figsize=(8.5, 5.4))
if not margins:
margins = (0.075, 0.05, 0.075, 0.05)
extent = (0.0, 0.0, 1.0, 1.0)
elif hold:
f = pylab.gcf()
left, bottom, right, top = np.array(pylab.gca().get_position()).ravel()
if not margins:
margins = (0.0, 0.0, 0.0, 0.0)
extent = (left, bottom, right - left, top - bottom)
f.delaxes(pylab.gca())
elif reuse_axes:
f = pylab.gcf()
else: # using subplot syntax
f = pylab.gcf()
if hasattr(sub, "__len__"):
nrows, ncols, idx = sub
else:
nrows, ncols, idx = sub // 100, (sub % 100) // 10, (sub % 10)
if idx < 1 or idx > ncols * nrows:
raise ValueError("Wrong values for sub: %d, %d, %d" % (nrows, ncols, idx))
c, r = (idx - 1) % ncols, (idx - 1) // ncols
if not margins:
margins = (0.01, 0.0, 0.0, 0.02)
extent = (
c * 1.0 / ncols,
1.0 - (r + 1) * 1.0 / nrows,
1.0 / ncols,
1.0 / nrows,
)
if not reuse_axes:
extent = (
extent[0] + margins[0],
extent[1] + margins[1],
extent[2] - margins[2] - margins[0],
extent[3] - margins[3] - margins[1],
)
# f=pylab.figure(fig,figsize=(5.5,6))
# Starting to draw : turn interactive off
wasinteractive = pylab.isinteractive()
pylab.ioff()
try:
map = pixelfunc.ma_to_array(map)
if zat and rot:
raise ValueError("Only give rot or zat, not both")
if zat:
rot = np.array(zat, dtype=np.float64)
rot.resize(3)
rot[1] -= 90
if reuse_axes:
ax = f.gca()
else:
ax = PA.HpxCartesianAxes(
f, extent, coord=coord, rot=rot, format=format, flipconv=flip
)
f.add_axes(ax)
if remove_dip:
map = pixelfunc.remove_dipole(map, gal_cut=gal_cut, nest=nest, copy=True)
elif remove_mono:
map = pixelfunc.remove_monopole(map, gal_cut=gal_cut, nest=nest, copy=True)
img = ax.projmap(
map,
nest=nest,
coord=coord,
vmin=min,
vmax=max,
xsize=xsize,
ysize=ysize,
lonra=lonra,
latra=latra,
cmap=cmap,
badcolor=badcolor,
bgcolor=bgcolor,
norm=norm,
aspect=aspect,
alpha=alpha,
)
if cbar:
im = ax.get_images()[0]
b = im.norm.inverse(np.linspace(0, 1, im.cmap.N + 1))
v = np.linspace(im.norm.vmin, im.norm.vmax, im.cmap.N)
mappable = plt.cm.ScalarMappable(
norm=matplotlib.colors.Normalize(vmin=im.norm.vmin, vmax=im.norm.vmax),
cmap=cmap,
)
if matplotlib.__version__ >= "0.91.0":
cb = f.colorbar(
mappable,
ax=ax,
orientation="horizontal",
shrink=0.5,
aspect=25,
ticks=PA.BoundaryLocator(),
pad=0.08,
fraction=0.1,
boundaries=b,
values=v,
format=format,
)
else:
cb = f.colorbar(
mappable,
orientation="horizontal",
shrink=0.5,
aspect=25,
ticks=PA.BoundaryLocator(),
pad=0.08,
fraction=0.1,
boundaries=b,
values=v,
format=format,
)
cb.solids.set_rasterized(True)
ax.set_title(title)
if not notext:
ax.text(
-0.07,
0.6,
ax.proj.coordsysstr,
fontsize=14,
fontweight="bold",
rotation=90,
transform=ax.transAxes,
)
if cbar:
cb.ax.text(
1.05,
0.30,
unit,
fontsize=14,
fontweight="bold",
transform=cb.ax.transAxes,
ha="left",
va="center",
)
f.sca(ax)
finally:
if wasinteractive:
pylab.ion()
pylab.draw()
# pylab.show()
if return_projected_map:
return img
| (map=None, fig=None, rot=None, zat=None, coord=None, unit='', xsize=800, ysize=None, lonra=None, latra=None, title='Cartesian view', nest=False, remove_dip=False, remove_mono=False, gal_cut=0, min=None, max=None, flip='astro', format='%.3g', cbar=True, cmap=None, badcolor='gray', bgcolor='white', norm=None, aspect=None, hold=False, sub=None, reuse_axes=False, margins=None, notext=False, return_projected_map=False, alpha=None) |
37,669 | healpy.sphtfunc | check_max_nside | Checks whether the nside used in a certain operation does not exceed the
maximum supported nside. The maximum nside is saved in MAX_NSIDE.
Parameters
----------
nside : int
nside of the map that is being checked
| def check_max_nside(nside):
"""Checks whether the nside used in a certain operation does not exceed the
maximum supported nside. The maximum nside is saved in MAX_NSIDE.
Parameters
----------
nside : int
nside of the map that is being checked
"""
if nside > MAX_NSIDE:
raise ValueError(
"nside {nside} of map cannot be larger than "
"MAX_NSIDE {max_nside}".format(nside=nside, max_nside=MAX_NSIDE)
)
return 0
| (nside) |
37,671 | healpy.visufunc | delgraticules | Delete all graticules previously created on the Axes.
See Also
--------
graticule
| def delgraticules():
"""Delete all graticules previously created on the Axes.
See Also
--------
graticule
"""
import pylab
f = pylab.gcf()
wasinteractive = pylab.isinteractive()
pylab.ioff()
try:
for ax in f.get_axes():
if isinstance(ax, PA.SphericalProjAxes):
ax.delgraticules()
finally:
pylab.draw()
if wasinteractive:
pylab.ion()
# pylab.show()
| () |
37,672 | healpy.utils.deprecation | deprecated |
Used to mark a function or class as deprecated.
Reuses Astropy's deprecated decorator.
Check arguments and usage in `~astropy.utils.decorator.deprecated`
Parameters
----------
since : str
The release at which this API became deprecated. This is required.
| def deprecated(since, **kwargs):
"""
Used to mark a function or class as deprecated.
Reuses Astropy's deprecated decorator.
Check arguments and usage in `~astropy.utils.decorator.deprecated`
Parameters
----------
since : str
The release at which this API became deprecated. This is required.
"""
from astropy.utils import deprecated
kwargs["warning_type"] = HealpyDeprecationWarning
return deprecated(since, **kwargs)
| (since, **kwargs) |
37,673 | healpy.rotator | dir2vec | Transform a direction theta,phi to a unit vector.
Parameters
----------
theta : float, scalar or array-like
The angle theta (scalar or shape (N,))
or both angles (scalar or shape (2, N)) if phi is not given.
phi : float, scalar or array-like, optionnal
The angle phi (scalar or shape (N,)).
lonlat : bool
If True, input angles are assumed to be longitude and latitude in degree,
otherwise, they are co-latitude and longitude in radians.
Returns
-------
vec : array
The vector(s) corresponding to given angles, shape is (3,) or (3, N).
See Also
--------
:func:`vec2dir`, :func:`pixelfunc.ang2vec`, :func:`pixelfunc.vec2ang`
| def dir2vec(theta, phi=None, lonlat=False):
"""Transform a direction theta,phi to a unit vector.
Parameters
----------
theta : float, scalar or array-like
The angle theta (scalar or shape (N,))
or both angles (scalar or shape (2, N)) if phi is not given.
phi : float, scalar or array-like, optionnal
The angle phi (scalar or shape (N,)).
lonlat : bool
If True, input angles are assumed to be longitude and latitude in degree,
otherwise, they are co-latitude and longitude in radians.
Returns
-------
vec : array
The vector(s) corresponding to given angles, shape is (3,) or (3, N).
See Also
--------
:func:`vec2dir`, :func:`pixelfunc.ang2vec`, :func:`pixelfunc.vec2ang`
"""
if phi is None:
theta, phi = theta
if lonlat:
lon, lat = theta, phi
theta, phi = np.pi / 2.0 - np.radians(lat), np.radians(lon)
ct, st, cp, sp = np.cos(theta), np.sin(theta), np.cos(phi), np.sin(phi)
vec = np.empty((3, ct.size), np.float64)
vec[0, :] = st * cp
vec[1, :] = st * sp
vec[2, :] = ct
return vec.squeeze()
| (theta, phi=None, lonlat=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.