index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
36,973 | typing | __repr__ | null | def __repr__(self):
return f'{self.__module__}.{self.__qualname__}'
| (self) |
36,975 | docutils.frontend | OptionParser |
Settings parser for command-line and library use.
The `settings_spec` specification here and in other Docutils components
are merged to build the set of command-line options and runtime settings
for this process.
Common settings (defined below) and component-specific settings must not
conflict. Short options are reserved for common settings, and components
are restricted to using long options.
Deprecated.
Will be replaced by a subclass of `argparse.ArgumentParser`.
| class OptionParser(optparse.OptionParser, docutils.SettingsSpec):
"""
Settings parser for command-line and library use.
The `settings_spec` specification here and in other Docutils components
are merged to build the set of command-line options and runtime settings
for this process.
Common settings (defined below) and component-specific settings must not
conflict. Short options are reserved for common settings, and components
are restricted to using long options.
Deprecated.
Will be replaced by a subclass of `argparse.ArgumentParser`.
"""
standard_config_files = [
'/etc/docutils.conf', # system-wide
'./docutils.conf', # project-specific
'~/.docutils'] # user-specific
"""Docutils configuration files, using ConfigParser syntax.
Filenames will be tilde-expanded later. Later files override earlier ones.
"""
threshold_choices = 'info 1 warning 2 error 3 severe 4 none 5'.split()
"""Possible inputs for for --report and --halt threshold values."""
thresholds = {'info': 1, 'warning': 2, 'error': 3, 'severe': 4, 'none': 5}
"""Lookup table for --report and --halt threshold values."""
booleans = {'1': True, 'on': True, 'yes': True, 'true': True, '0': False,
'off': False, 'no': False, 'false': False, '': False}
"""Lookup table for boolean configuration file settings."""
default_error_encoding = (getattr(sys.stderr, 'encoding', None)
or io._locale_encoding # noqa
or 'ascii')
default_error_encoding_error_handler = 'backslashreplace'
settings_spec = (
'General Docutils Options',
None,
(('Output destination name. Obsoletes the <destination> '
'positional argument. Default: None (stdout).',
['--output'], {'metavar': '<destination>'}),
('Specify the document title as metadata.',
['--title'], {'metavar': '<title>'}),
('Include a "Generated by Docutils" credit and link.',
['--generator', '-g'], {'action': 'store_true',
'validator': validate_boolean}),
('Do not include a generator credit.',
['--no-generator'], {'action': 'store_false', 'dest': 'generator'}),
('Include the date at the end of the document (UTC).',
['--date', '-d'], {'action': 'store_const', 'const': '%Y-%m-%d',
'dest': 'datestamp'}),
('Include the time & date (UTC).',
['--time', '-t'], {'action': 'store_const',
'const': '%Y-%m-%d %H:%M UTC',
'dest': 'datestamp'}),
('Do not include a datestamp of any kind.',
['--no-datestamp'], {'action': 'store_const', 'const': None,
'dest': 'datestamp'}),
('Base directory for absolute paths when reading '
'from the local filesystem. Default "/".',
['--root-prefix'],
{'default': '/', 'metavar': '<path>'}),
('Include a "View document source" link.',
['--source-link', '-s'], {'action': 'store_true',
'validator': validate_boolean}),
('Use <URL> for a source link; implies --source-link.',
['--source-url'], {'metavar': '<URL>'}),
('Do not include a "View document source" link.',
['--no-source-link'],
{'action': 'callback', 'callback': store_multiple,
'callback_args': ('source_link', 'source_url')}),
('Link from section headers to TOC entries. (default)',
['--toc-entry-backlinks'],
{'dest': 'toc_backlinks', 'action': 'store_const', 'const': 'entry',
'default': 'entry'}),
('Link from section headers to the top of the TOC.',
['--toc-top-backlinks'],
{'dest': 'toc_backlinks', 'action': 'store_const', 'const': 'top'}),
('Disable backlinks to the table of contents.',
['--no-toc-backlinks'],
{'dest': 'toc_backlinks', 'action': 'store_false'}),
('Link from footnotes/citations to references. (default)',
['--footnote-backlinks'],
{'action': 'store_true', 'default': True,
'validator': validate_boolean}),
('Disable backlinks from footnotes and citations.',
['--no-footnote-backlinks'],
{'dest': 'footnote_backlinks', 'action': 'store_false'}),
('Enable section numbering by Docutils. (default)',
['--section-numbering'],
{'action': 'store_true', 'dest': 'sectnum_xform',
'default': True, 'validator': validate_boolean}),
('Disable section numbering by Docutils.',
['--no-section-numbering'],
{'action': 'store_false', 'dest': 'sectnum_xform'}),
('Remove comment elements from the document tree.',
['--strip-comments'],
{'action': 'store_true', 'validator': validate_boolean}),
('Leave comment elements in the document tree. (default)',
['--leave-comments'],
{'action': 'store_false', 'dest': 'strip_comments'}),
('Remove all elements with classes="<class>" from the document tree. '
'Warning: potentially dangerous; use with caution. '
'(Multiple-use option.)',
['--strip-elements-with-class'],
{'action': 'append', 'dest': 'strip_elements_with_classes',
'metavar': '<class>', 'validator': validate_strip_class}),
('Remove all classes="<class>" attributes from elements in the '
'document tree. Warning: potentially dangerous; use with caution. '
'(Multiple-use option.)',
['--strip-class'],
{'action': 'append', 'dest': 'strip_classes',
'metavar': '<class>', 'validator': validate_strip_class}),
('Report system messages at or higher than <level>: "info" or "1", '
'"warning"/"2" (default), "error"/"3", "severe"/"4", "none"/"5"',
['--report', '-r'], {'choices': threshold_choices, 'default': 2,
'dest': 'report_level', 'metavar': '<level>',
'validator': validate_threshold}),
('Report all system messages. (Same as "--report=1".)',
['--verbose', '-v'], {'action': 'store_const', 'const': 1,
'dest': 'report_level'}),
('Report no system messages. (Same as "--report=5".)',
['--quiet', '-q'], {'action': 'store_const', 'const': 5,
'dest': 'report_level'}),
('Halt execution at system messages at or above <level>. '
'Levels as in --report. Default: 4 (severe).',
['--halt'], {'choices': threshold_choices, 'dest': 'halt_level',
'default': 4, 'metavar': '<level>',
'validator': validate_threshold}),
('Halt at the slightest problem. Same as "--halt=info".',
['--strict'], {'action': 'store_const', 'const': 1,
'dest': 'halt_level'}),
('Enable a non-zero exit status for non-halting system messages at '
'or above <level>. Default: 5 (disabled).',
['--exit-status'], {'choices': threshold_choices,
'dest': 'exit_status_level',
'default': 5, 'metavar': '<level>',
'validator': validate_threshold}),
('Enable debug-level system messages and diagnostics.',
['--debug'], {'action': 'store_true',
'validator': validate_boolean}),
('Disable debug output. (default)',
['--no-debug'], {'action': 'store_false', 'dest': 'debug'}),
('Send the output of system messages to <file>.',
['--warnings'], {'dest': 'warning_stream', 'metavar': '<file>'}),
('Enable Python tracebacks when Docutils is halted.',
['--traceback'], {'action': 'store_true', 'default': None,
'validator': validate_boolean}),
('Disable Python tracebacks. (default)',
['--no-traceback'], {'dest': 'traceback', 'action': 'store_false'}),
('Specify the encoding and optionally the '
'error handler of input text. Default: <auto-detect>:strict.',
['--input-encoding', '-i'],
{'metavar': '<name[:handler]>',
'validator': validate_encoding_and_error_handler}),
('Specify the error handler for undecodable characters. '
'Choices: "strict" (default), "ignore", and "replace".',
['--input-encoding-error-handler'],
{'default': 'strict', 'validator': validate_encoding_error_handler}),
('Specify the text encoding and optionally the error handler for '
'output. Default: utf-8:strict.',
['--output-encoding', '-o'],
{'metavar': '<name[:handler]>', 'default': 'utf-8',
'validator': validate_encoding_and_error_handler}),
('Specify error handler for unencodable output characters; '
'"strict" (default), "ignore", "replace", '
'"xmlcharrefreplace", "backslashreplace".',
['--output-encoding-error-handler'],
{'default': 'strict', 'validator': validate_encoding_error_handler}),
('Specify text encoding and optionally error handler '
'for error output. Default: %s:%s.'
% (default_error_encoding, default_error_encoding_error_handler),
['--error-encoding', '-e'],
{'metavar': '<name[:handler]>', 'default': default_error_encoding,
'validator': validate_encoding_and_error_handler}),
('Specify the error handler for unencodable characters in '
'error output. Default: %s.'
% default_error_encoding_error_handler,
['--error-encoding-error-handler'],
{'default': default_error_encoding_error_handler,
'validator': validate_encoding_error_handler}),
('Specify the language (as BCP 47 language tag). Default: en.',
['--language', '-l'], {'dest': 'language_code', 'default': 'en',
'metavar': '<name>'}),
('Write output file dependencies to <file>.',
['--record-dependencies'],
{'metavar': '<file>', 'validator': validate_dependency_file,
'default': None}), # default set in Values class
('Read configuration settings from <file>, if it exists.',
['--config'], {'metavar': '<file>', 'type': 'string',
'action': 'callback', 'callback': read_config_file}),
("Show this program's version number and exit.",
['--version', '-V'], {'action': 'version'}),
('Show this help message and exit.',
['--help', '-h'], {'action': 'help'}),
# Typically not useful for non-programmatical use:
(SUPPRESS_HELP, ['--id-prefix'], {'default': ''}),
(SUPPRESS_HELP, ['--auto-id-prefix'], {'default': '%'}),
# Hidden options, for development use only:
(SUPPRESS_HELP, ['--dump-settings'], {'action': 'store_true'}),
(SUPPRESS_HELP, ['--dump-internals'], {'action': 'store_true'}),
(SUPPRESS_HELP, ['--dump-transforms'], {'action': 'store_true'}),
(SUPPRESS_HELP, ['--dump-pseudo-xml'], {'action': 'store_true'}),
(SUPPRESS_HELP, ['--expose-internal-attribute'],
{'action': 'append', 'dest': 'expose_internals',
'validator': validate_colon_separated_string_list}),
(SUPPRESS_HELP, ['--strict-visitor'], {'action': 'store_true'}),
))
"""Runtime settings and command-line options common to all Docutils front
ends. Setting specs specific to individual Docutils components are also
used (see `populate_from_components()`)."""
settings_defaults = {'_disable_config': None,
'_source': None,
'_destination': None,
'_config_files': None}
"""Defaults for settings without command-line option equivalents.
See https://docutils.sourceforge.io/docs/user/config.html#internal-settings
"""
config_section = 'general'
version_template = ('%%prog (Docutils %s%s, Python %s, on %s)'
% (docutils.__version__,
docutils.__version_details__
and ' [%s]'%docutils.__version_details__ or '',
sys.version.split()[0], sys.platform))
"""Default version message."""
def __init__(self, components=(), defaults=None, read_config_files=False,
*args, **kwargs):
"""Set up OptionParser instance.
`components` is a list of Docutils components each containing a
``.settings_spec`` attribute.
`defaults` is a mapping of setting default overrides.
"""
self.lists = {}
"""Set of list-type settings."""
self.config_files = []
"""List of paths of applied configuration files."""
self.relative_path_settings = ['warning_stream'] # will be modified
warnings.warn('The frontend.OptionParser class will be replaced '
'by a subclass of argparse.ArgumentParser '
'in Docutils 0.21 or later.',
DeprecationWarning, stacklevel=2)
super().__init__(option_class=Option, add_help_option=None,
formatter=optparse.TitledHelpFormatter(width=78),
*args, **kwargs)
if not self.version:
self.version = self.version_template
self.components = (self, *components)
self.populate_from_components(self.components)
self.defaults.update(defaults or {})
if read_config_files and not self.defaults['_disable_config']:
try:
config_settings = self.get_standard_config_settings()
except ValueError as err:
self.error(err)
self.defaults.update(config_settings.__dict__)
def populate_from_components(self, components):
"""Collect settings specification from components.
For each component, populate from the `SettingsSpec.settings_spec`
structure, then from the `SettingsSpec.settings_defaults` dictionary.
After all components have been processed, check for and populate from
each component's `SettingsSpec.settings_default_overrides` dictionary.
"""
for component in components:
if component is None:
continue
settings_spec = component.settings_spec
self.relative_path_settings.extend(
component.relative_path_settings)
for i in range(0, len(settings_spec), 3):
title, description, option_spec = settings_spec[i:i+3]
if title:
group = optparse.OptionGroup(self, title, description)
self.add_option_group(group)
else:
group = self # single options
for (help_text, option_strings, kwargs) in option_spec:
option = group.add_option(help=help_text, *option_strings,
**kwargs)
if kwargs.get('action') == 'append':
self.lists[option.dest] = True
if component.settings_defaults:
self.defaults.update(component.settings_defaults)
for component in components:
if component and component.settings_default_overrides:
self.defaults.update(component.settings_default_overrides)
@classmethod
def get_standard_config_files(cls):
"""Return list of config files, from environment or standard."""
if 'DOCUTILSCONFIG' in os.environ:
config_files = os.environ['DOCUTILSCONFIG'].split(os.pathsep)
else:
config_files = cls.standard_config_files
return [os.path.expanduser(f) for f in config_files if f.strip()]
def get_standard_config_settings(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
settings = Values()
for filename in self.get_standard_config_files():
settings.update(self.get_config_file_settings(filename), self)
return settings
def get_config_file_settings(self, config_file):
"""Returns a dictionary containing appropriate config file settings."""
config_parser = ConfigParser()
# parse config file, add filename if found and successfully read.
applied = set()
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
self.config_files += config_parser.read(config_file, self)
settings = Values()
for component in self.components:
if not component:
continue
for section in (tuple(component.config_section_dependencies or ())
+ (component.config_section,)):
if section in applied:
continue
applied.add(section)
if config_parser.has_section(section):
settings.update(config_parser[section], self)
make_paths_absolute(settings.__dict__,
self.relative_path_settings,
os.path.dirname(config_file))
return settings.__dict__
def check_values(self, values, args):
"""Store positional arguments as runtime settings."""
values._source, values._destination = self.check_args(args)
make_paths_absolute(values.__dict__, self.relative_path_settings)
values._config_files = self.config_files
return values
def check_args(self, args):
source = destination = None
if args:
source = args.pop(0)
if source == '-': # means stdin
source = None
if args:
destination = args.pop(0)
if destination == '-': # means stdout
destination = None
if args:
self.error('Maximum 2 arguments allowed.')
if source and source == destination:
self.error('Do not specify the same file for both source and '
'destination. It will clobber the source file.')
return source, destination
def set_defaults_from_dict(self, defaults):
# deprecated, will be removed
warnings.warn('OptionParser.set_defaults_from_dict() will be removed '
'in Docutils 0.22 or with the switch to ArgumentParser.',
DeprecationWarning, stacklevel=2)
self.defaults.update(defaults)
def get_default_values(self):
"""Needed to get custom `Values` instances."""
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
defaults = Values(self.defaults)
defaults._config_files = self.config_files
return defaults
def get_option_by_dest(self, dest):
"""
Get an option by its dest.
If you're supplying a dest which is shared by several options,
it is undefined which option of those is returned.
A KeyError is raised if there is no option with the supplied
dest.
"""
for group in self.option_groups + [self]:
for option in group.option_list:
if option.dest == dest:
return option
raise KeyError('No option with dest == %r.' % dest)
| (components=(), defaults=None, read_config_files=False, *args, **kwargs) |
36,976 | docutils.frontend | __init__ | Set up OptionParser instance.
`components` is a list of Docutils components each containing a
``.settings_spec`` attribute.
`defaults` is a mapping of setting default overrides.
| def __init__(self, components=(), defaults=None, read_config_files=False,
*args, **kwargs):
"""Set up OptionParser instance.
`components` is a list of Docutils components each containing a
``.settings_spec`` attribute.
`defaults` is a mapping of setting default overrides.
"""
self.lists = {}
"""Set of list-type settings."""
self.config_files = []
"""List of paths of applied configuration files."""
self.relative_path_settings = ['warning_stream'] # will be modified
warnings.warn('The frontend.OptionParser class will be replaced '
'by a subclass of argparse.ArgumentParser '
'in Docutils 0.21 or later.',
DeprecationWarning, stacklevel=2)
super().__init__(option_class=Option, add_help_option=None,
formatter=optparse.TitledHelpFormatter(width=78),
*args, **kwargs)
if not self.version:
self.version = self.version_template
self.components = (self, *components)
self.populate_from_components(self.components)
self.defaults.update(defaults or {})
if read_config_files and not self.defaults['_disable_config']:
try:
config_settings = self.get_standard_config_settings()
except ValueError as err:
self.error(err)
self.defaults.update(config_settings.__dict__)
| (self, components=(), defaults=None, read_config_files=False, *args, **kwargs) |
36,977 | optparse | _add_help_option | null | def _add_help_option(self):
self.add_option("-h", "--help",
action="help",
help=_("show this help message and exit"))
| (self) |
36,978 | optparse | _add_version_option | null | def _add_version_option(self):
self.add_option("--version",
action="version",
help=_("show program's version number and exit"))
| (self) |
36,979 | optparse | _check_conflict | null | def _check_conflict(self, option):
conflict_opts = []
for opt in option._short_opts:
if opt in self._short_opt:
conflict_opts.append((opt, self._short_opt[opt]))
for opt in option._long_opts:
if opt in self._long_opt:
conflict_opts.append((opt, self._long_opt[opt]))
if conflict_opts:
handler = self.conflict_handler
if handler == "error":
raise OptionConflictError(
"conflicting option string(s): %s"
% ", ".join([co[0] for co in conflict_opts]),
option)
elif handler == "resolve":
for (opt, c_option) in conflict_opts:
if opt.startswith("--"):
c_option._long_opts.remove(opt)
del self._long_opt[opt]
else:
c_option._short_opts.remove(opt)
del self._short_opt[opt]
if not (c_option._short_opts or c_option._long_opts):
c_option.container.option_list.remove(c_option)
| (self, option) |
36,980 | optparse | _create_option_list | null | def _create_option_list(self):
self.option_list = []
self.option_groups = []
self._create_option_mappings()
| (self) |
36,981 | optparse | _create_option_mappings | null | def _create_option_mappings(self):
# For use by OptionParser constructor -- create the main
# option mappings used by this OptionParser and all
# OptionGroups that it owns.
self._short_opt = {} # single letter -> Option instance
self._long_opt = {} # long option -> Option instance
self.defaults = {} # maps option dest -> default value
| (self) |
36,982 | optparse | _get_all_options | null | def _get_all_options(self):
options = self.option_list[:]
for group in self.option_groups:
options.extend(group.option_list)
return options
| (self) |
36,983 | optparse | _get_args | null | def _get_args(self, args):
if args is None:
return sys.argv[1:]
else:
return args[:] # don't modify caller's list
| (self, args) |
36,984 | optparse | _init_parsing_state | null | def _init_parsing_state(self):
# These are set in parse_args() for the convenience of callbacks.
self.rargs = None
self.largs = None
self.values = None
| (self) |
36,985 | optparse | _match_long_opt | _match_long_opt(opt : string) -> string
Determine which long option string 'opt' matches, ie. which one
it is an unambiguous abbreviation for. Raises BadOptionError if
'opt' doesn't unambiguously match any long option string.
| def _match_long_opt(self, opt):
"""_match_long_opt(opt : string) -> string
Determine which long option string 'opt' matches, ie. which one
it is an unambiguous abbreviation for. Raises BadOptionError if
'opt' doesn't unambiguously match any long option string.
"""
return _match_abbrev(opt, self._long_opt)
| (self, opt) |
36,986 | optparse | _populate_option_list | null | def _populate_option_list(self, option_list, add_help=True):
if self.standard_option_list:
self.add_options(self.standard_option_list)
if option_list:
self.add_options(option_list)
if self.version:
self._add_version_option()
if add_help:
self._add_help_option()
| (self, option_list, add_help=True) |
36,987 | optparse | _process_args | _process_args(largs : [string],
rargs : [string],
values : Values)
Process command-line arguments and populate 'values', consuming
options and arguments from 'rargs'. If 'allow_interspersed_args' is
false, stop at the first non-option argument. If true, accumulate any
interspersed non-option arguments in 'largs'.
| def _process_args(self, largs, rargs, values):
"""_process_args(largs : [string],
rargs : [string],
values : Values)
Process command-line arguments and populate 'values', consuming
options and arguments from 'rargs'. If 'allow_interspersed_args' is
false, stop at the first non-option argument. If true, accumulate any
interspersed non-option arguments in 'largs'.
"""
while rargs:
arg = rargs[0]
# We handle bare "--" explicitly, and bare "-" is handled by the
# standard arg handler since the short arg case ensures that the
# len of the opt string is greater than 1.
if arg == "--":
del rargs[0]
return
elif arg[0:2] == "--":
# process a single long option (possibly with value(s))
self._process_long_opt(rargs, values)
elif arg[:1] == "-" and len(arg) > 1:
# process a cluster of short options (possibly with
# value(s) for the last one only)
self._process_short_opts(rargs, values)
elif self.allow_interspersed_args:
largs.append(arg)
del rargs[0]
else:
return # stop now, leave this arg in rargs
# Say this is the original argument list:
# [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
# ^
# (we are about to process arg(i)).
#
# Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
# [arg0, ..., arg(i-1)] (any options and their arguments will have
# been removed from largs).
#
# The while loop will usually consume 1 or more arguments per pass.
# If it consumes 1 (eg. arg is an option that takes no arguments),
# then after _process_arg() is done the situation is:
#
# largs = subset of [arg0, ..., arg(i)]
# rargs = [arg(i+1), ..., arg(N-1)]
#
# If allow_interspersed_args is false, largs will always be
# *empty* -- still a subset of [arg0, ..., arg(i-1)], but
# not a very interesting subset!
| (self, largs, rargs, values) |
36,988 | optparse | _process_long_opt | null | def _process_long_opt(self, rargs, values):
arg = rargs.pop(0)
# Value explicitly attached to arg? Pretend it's the next
# argument.
if "=" in arg:
(opt, next_arg) = arg.split("=", 1)
rargs.insert(0, next_arg)
had_explicit_value = True
else:
opt = arg
had_explicit_value = False
opt = self._match_long_opt(opt)
option = self._long_opt[opt]
if option.takes_value():
nargs = option.nargs
if len(rargs) < nargs:
self.error(ngettext(
"%(option)s option requires %(number)d argument",
"%(option)s option requires %(number)d arguments",
nargs) % {"option": opt, "number": nargs})
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
elif had_explicit_value:
self.error(_("%s option does not take a value") % opt)
else:
value = None
option.process(opt, value, values, self)
| (self, rargs, values) |
36,989 | optparse | _process_short_opts | null | def _process_short_opts(self, rargs, values):
arg = rargs.pop(0)
stop = False
i = 1
for ch in arg[1:]:
opt = "-" + ch
option = self._short_opt.get(opt)
i += 1 # we have consumed a character
if not option:
raise BadOptionError(opt)
if option.takes_value():
# Any characters left in arg? Pretend they're the
# next arg, and stop consuming characters of arg.
if i < len(arg):
rargs.insert(0, arg[i:])
stop = True
nargs = option.nargs
if len(rargs) < nargs:
self.error(ngettext(
"%(option)s option requires %(number)d argument",
"%(option)s option requires %(number)d arguments",
nargs) % {"option": opt, "number": nargs})
elif nargs == 1:
value = rargs.pop(0)
else:
value = tuple(rargs[0:nargs])
del rargs[0:nargs]
else: # option doesn't take a value
value = None
option.process(opt, value, values, self)
if stop:
break
| (self, rargs, values) |
36,990 | optparse | _share_option_mappings | null | def _share_option_mappings(self, parser):
# For use by OptionGroup constructor -- use shared option
# mappings from the OptionParser that owns this OptionGroup.
self._short_opt = parser._short_opt
self._long_opt = parser._long_opt
self.defaults = parser.defaults
| (self, parser) |
36,991 | optparse | add_option | add_option(Option)
add_option(opt_str, ..., kwarg=val, ...)
| def add_option(self, *args, **kwargs):
"""add_option(Option)
add_option(opt_str, ..., kwarg=val, ...)
"""
if isinstance(args[0], str):
option = self.option_class(*args, **kwargs)
elif len(args) == 1 and not kwargs:
option = args[0]
if not isinstance(option, Option):
raise TypeError("not an Option instance: %r" % option)
else:
raise TypeError("invalid arguments")
self._check_conflict(option)
self.option_list.append(option)
option.container = self
for opt in option._short_opts:
self._short_opt[opt] = option
for opt in option._long_opts:
self._long_opt[opt] = option
if option.dest is not None: # option has a dest, we need a default
if option.default is not NO_DEFAULT:
self.defaults[option.dest] = option.default
elif option.dest not in self.defaults:
self.defaults[option.dest] = None
return option
| (self, *args, **kwargs) |
36,992 | optparse | add_option_group | null | def add_option_group(self, *args, **kwargs):
# XXX lots of overlap with OptionContainer.add_option()
if isinstance(args[0], str):
group = OptionGroup(self, *args, **kwargs)
elif len(args) == 1 and not kwargs:
group = args[0]
if not isinstance(group, OptionGroup):
raise TypeError("not an OptionGroup instance: %r" % group)
if group.parser is not self:
raise ValueError("invalid OptionGroup (wrong parser)")
else:
raise TypeError("invalid arguments")
self.option_groups.append(group)
return group
| (self, *args, **kwargs) |
36,993 | optparse | add_options | null | def add_options(self, option_list):
for option in option_list:
self.add_option(option)
| (self, option_list) |
36,994 | docutils.frontend | check_args | null | def check_args(self, args):
source = destination = None
if args:
source = args.pop(0)
if source == '-': # means stdin
source = None
if args:
destination = args.pop(0)
if destination == '-': # means stdout
destination = None
if args:
self.error('Maximum 2 arguments allowed.')
if source and source == destination:
self.error('Do not specify the same file for both source and '
'destination. It will clobber the source file.')
return source, destination
| (self, args) |
36,995 | docutils.frontend | check_values | Store positional arguments as runtime settings. | def check_values(self, values, args):
"""Store positional arguments as runtime settings."""
values._source, values._destination = self.check_args(args)
make_paths_absolute(values.__dict__, self.relative_path_settings)
values._config_files = self.config_files
return values
| (self, values, args) |
36,996 | optparse | destroy |
Declare that you are done with this OptionParser. This cleans up
reference cycles so the OptionParser (and all objects referenced by
it) can be garbage-collected promptly. After calling destroy(), the
OptionParser is unusable.
| def destroy(self):
"""
Declare that you are done with this OptionParser. This cleans up
reference cycles so the OptionParser (and all objects referenced by
it) can be garbage-collected promptly. After calling destroy(), the
OptionParser is unusable.
"""
OptionContainer.destroy(self)
for group in self.option_groups:
group.destroy()
del self.option_list
del self.option_groups
del self.formatter
| (self) |
36,997 | optparse | disable_interspersed_args | Set parsing to stop on the first non-option. Use this if
you have a command processor which runs another command that
has options of its own and you want to make sure these options
don't get confused.
| def disable_interspersed_args(self):
"""Set parsing to stop on the first non-option. Use this if
you have a command processor which runs another command that
has options of its own and you want to make sure these options
don't get confused.
"""
self.allow_interspersed_args = False
| (self) |
36,998 | optparse | enable_interspersed_args | Set parsing to not stop on the first non-option, allowing
interspersing switches with command arguments. This is the
default behavior. See also disable_interspersed_args() and the
class documentation description of the attribute
allow_interspersed_args. | def enable_interspersed_args(self):
"""Set parsing to not stop on the first non-option, allowing
interspersing switches with command arguments. This is the
default behavior. See also disable_interspersed_args() and the
class documentation description of the attribute
allow_interspersed_args."""
self.allow_interspersed_args = True
| (self) |
36,999 | optparse | error | error(msg : string)
Print a usage message incorporating 'msg' to stderr and exit.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
| def error(self, msg):
"""error(msg : string)
Print a usage message incorporating 'msg' to stderr and exit.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_usage(sys.stderr)
self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg))
| (self, msg) |
37,000 | optparse | exit | null | def exit(self, status=0, msg=None):
if msg:
sys.stderr.write(msg)
sys.exit(status)
| (self, status=0, msg=None) |
37,001 | optparse | expand_prog_name | null | def expand_prog_name(self, s):
return s.replace("%prog", self.get_prog_name())
| (self, s) |
37,002 | optparse | format_description | null | def format_description(self, formatter):
return formatter.format_description(self.get_description())
| (self, formatter) |
37,003 | optparse | format_epilog | null | def format_epilog(self, formatter):
return formatter.format_epilog(self.epilog)
| (self, formatter) |
37,004 | optparse | format_help | null | def format_help(self, formatter=None):
if formatter is None:
formatter = self.formatter
result = []
if self.usage:
result.append(self.get_usage() + "\n")
if self.description:
result.append(self.format_description(formatter) + "\n")
result.append(self.format_option_help(formatter))
result.append(self.format_epilog(formatter))
return "".join(result)
| (self, formatter=None) |
37,005 | optparse | format_option_help | null | def format_option_help(self, formatter=None):
if formatter is None:
formatter = self.formatter
formatter.store_option_strings(self)
result = []
result.append(formatter.format_heading(_("Options")))
formatter.indent()
if self.option_list:
result.append(OptionContainer.format_option_help(self, formatter))
result.append("\n")
for group in self.option_groups:
result.append(group.format_help(formatter))
result.append("\n")
formatter.dedent()
# Drop the last "\n", or the header if no options or option groups:
return "".join(result[:-1])
| (self, formatter=None) |
37,006 | docutils.frontend | get_config_file_settings | Returns a dictionary containing appropriate config file settings. | def get_config_file_settings(self, config_file):
"""Returns a dictionary containing appropriate config file settings."""
config_parser = ConfigParser()
# parse config file, add filename if found and successfully read.
applied = set()
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
self.config_files += config_parser.read(config_file, self)
settings = Values()
for component in self.components:
if not component:
continue
for section in (tuple(component.config_section_dependencies or ())
+ (component.config_section,)):
if section in applied:
continue
applied.add(section)
if config_parser.has_section(section):
settings.update(config_parser[section], self)
make_paths_absolute(settings.__dict__,
self.relative_path_settings,
os.path.dirname(config_file))
return settings.__dict__
| (self, config_file) |
37,007 | docutils.frontend | get_default_values | Needed to get custom `Values` instances. | def get_default_values(self):
"""Needed to get custom `Values` instances."""
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
defaults = Values(self.defaults)
defaults._config_files = self.config_files
return defaults
| (self) |
37,008 | optparse | get_description | null | def get_description(self):
return self.expand_prog_name(self.description)
| (self) |
37,009 | optparse | get_option | null | def get_option(self, opt_str):
return (self._short_opt.get(opt_str) or
self._long_opt.get(opt_str))
| (self, opt_str) |
37,010 | docutils.frontend | get_option_by_dest |
Get an option by its dest.
If you're supplying a dest which is shared by several options,
it is undefined which option of those is returned.
A KeyError is raised if there is no option with the supplied
dest.
| def get_option_by_dest(self, dest):
"""
Get an option by its dest.
If you're supplying a dest which is shared by several options,
it is undefined which option of those is returned.
A KeyError is raised if there is no option with the supplied
dest.
"""
for group in self.option_groups + [self]:
for option in group.option_list:
if option.dest == dest:
return option
raise KeyError('No option with dest == %r.' % dest)
| (self, dest) |
37,011 | optparse | get_option_group | null | def get_option_group(self, opt_str):
option = (self._short_opt.get(opt_str) or
self._long_opt.get(opt_str))
if option and option.container is not self:
return option.container
return None
| (self, opt_str) |
37,012 | optparse | get_prog_name | null | def get_prog_name(self):
if self.prog is None:
return os.path.basename(sys.argv[0])
else:
return self.prog
| (self) |
37,013 | docutils.frontend | get_standard_config_settings | null | def get_standard_config_settings(self):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
settings = Values()
for filename in self.get_standard_config_files():
settings.update(self.get_config_file_settings(filename), self)
return settings
| (self) |
37,014 | optparse | get_usage | null | def get_usage(self):
if self.usage:
return self.formatter.format_usage(
self.expand_prog_name(self.usage))
else:
return ""
| (self) |
37,015 | optparse | get_version | null | def get_version(self):
if self.version:
return self.expand_prog_name(self.version)
else:
return ""
| (self) |
37,016 | optparse | has_option | null | def has_option(self, opt_str):
return (opt_str in self._short_opt or
opt_str in self._long_opt)
| (self, opt_str) |
37,017 | optparse | parse_args |
parse_args(args : [string] = sys.argv[1:],
values : Values = None)
-> (values : Values, args : [string])
Parse the command-line options found in 'args' (default:
sys.argv[1:]). Any errors result in a call to 'error()', which
by default prints the usage message to stderr and calls
sys.exit() with an error message. On success returns a pair
(values, args) where 'values' is a Values instance (with all
your option values) and 'args' is the list of arguments left
over after parsing options.
| def parse_args(self, args=None, values=None):
"""
parse_args(args : [string] = sys.argv[1:],
values : Values = None)
-> (values : Values, args : [string])
Parse the command-line options found in 'args' (default:
sys.argv[1:]). Any errors result in a call to 'error()', which
by default prints the usage message to stderr and calls
sys.exit() with an error message. On success returns a pair
(values, args) where 'values' is a Values instance (with all
your option values) and 'args' is the list of arguments left
over after parsing options.
"""
rargs = self._get_args(args)
if values is None:
values = self.get_default_values()
# Store the halves of the argument list as attributes for the
# convenience of callbacks:
# rargs
# the rest of the command-line (the "r" stands for
# "remaining" or "right-hand")
# largs
# the leftover arguments -- ie. what's left after removing
# options and their arguments (the "l" stands for "leftover"
# or "left-hand")
self.rargs = rargs
self.largs = largs = []
self.values = values
try:
stop = self._process_args(largs, rargs, values)
except (BadOptionError, OptionValueError) as err:
self.error(str(err))
args = largs + rargs
return self.check_values(values, args)
| (self, args=None, values=None) |
37,018 | docutils.frontend | populate_from_components | Collect settings specification from components.
For each component, populate from the `SettingsSpec.settings_spec`
structure, then from the `SettingsSpec.settings_defaults` dictionary.
After all components have been processed, check for and populate from
each component's `SettingsSpec.settings_default_overrides` dictionary.
| def populate_from_components(self, components):
"""Collect settings specification from components.
For each component, populate from the `SettingsSpec.settings_spec`
structure, then from the `SettingsSpec.settings_defaults` dictionary.
After all components have been processed, check for and populate from
each component's `SettingsSpec.settings_default_overrides` dictionary.
"""
for component in components:
if component is None:
continue
settings_spec = component.settings_spec
self.relative_path_settings.extend(
component.relative_path_settings)
for i in range(0, len(settings_spec), 3):
title, description, option_spec = settings_spec[i:i+3]
if title:
group = optparse.OptionGroup(self, title, description)
self.add_option_group(group)
else:
group = self # single options
for (help_text, option_strings, kwargs) in option_spec:
option = group.add_option(help=help_text, *option_strings,
**kwargs)
if kwargs.get('action') == 'append':
self.lists[option.dest] = True
if component.settings_defaults:
self.defaults.update(component.settings_defaults)
for component in components:
if component and component.settings_default_overrides:
self.defaults.update(component.settings_default_overrides)
| (self, components) |
37,019 | optparse | print_help | print_help(file : file = stdout)
Print an extended help message, listing all options and any
help text provided with them, to 'file' (default stdout).
| def print_help(self, file=None):
"""print_help(file : file = stdout)
Print an extended help message, listing all options and any
help text provided with them, to 'file' (default stdout).
"""
if file is None:
file = sys.stdout
file.write(self.format_help())
| (self, file=None) |
37,020 | optparse | print_usage | print_usage(file : file = stdout)
Print the usage message for the current program (self.usage) to
'file' (default stdout). Any occurrence of the string "%prog" in
self.usage is replaced with the name of the current program
(basename of sys.argv[0]). Does nothing if self.usage is empty
or not defined.
| def print_usage(self, file=None):
"""print_usage(file : file = stdout)
Print the usage message for the current program (self.usage) to
'file' (default stdout). Any occurrence of the string "%prog" in
self.usage is replaced with the name of the current program
(basename of sys.argv[0]). Does nothing if self.usage is empty
or not defined.
"""
if self.usage:
print(self.get_usage(), file=file)
| (self, file=None) |
37,021 | optparse | print_version | print_version(file : file = stdout)
Print the version message for this program (self.version) to
'file' (default stdout). As with print_usage(), any occurrence
of "%prog" in self.version is replaced by the current program's
name. Does nothing if self.version is empty or undefined.
| def print_version(self, file=None):
"""print_version(file : file = stdout)
Print the version message for this program (self.version) to
'file' (default stdout). As with print_usage(), any occurrence
of "%prog" in self.version is replaced by the current program's
name. Does nothing if self.version is empty or undefined.
"""
if self.version:
print(self.get_version(), file=file)
| (self, file=None) |
37,022 | optparse | remove_option | null | def remove_option(self, opt_str):
option = self._short_opt.get(opt_str)
if option is None:
option = self._long_opt.get(opt_str)
if option is None:
raise ValueError("no such option %r" % opt_str)
for opt in option._short_opts:
del self._short_opt[opt]
for opt in option._long_opts:
del self._long_opt[opt]
option.container.option_list.remove(option)
| (self, opt_str) |
37,023 | optparse | set_conflict_handler | null | def set_conflict_handler(self, handler):
if handler not in ("error", "resolve"):
raise ValueError("invalid conflict_resolution value %r" % handler)
self.conflict_handler = handler
| (self, handler) |
37,024 | optparse | set_default | null | def set_default(self, dest, value):
self.defaults[dest] = value
| (self, dest, value) |
37,025 | optparse | set_defaults | null | def set_defaults(self, **kwargs):
self.defaults.update(kwargs)
| (self, **kwargs) |
37,026 | docutils.frontend | set_defaults_from_dict | null | def set_defaults_from_dict(self, defaults):
# deprecated, will be removed
warnings.warn('OptionParser.set_defaults_from_dict() will be removed '
'in Docutils 0.22 or with the switch to ArgumentParser.',
DeprecationWarning, stacklevel=2)
self.defaults.update(defaults)
| (self, defaults) |
37,027 | optparse | set_description | null | def set_description(self, description):
self.description = description
| (self, description) |
37,028 | optparse | set_process_default_values | null | def set_process_default_values(self, process):
self.process_default_values = process
| (self, process) |
37,029 | optparse | set_usage | null | def set_usage(self, usage):
if usage is None:
self.usage = _("%prog [options]")
elif usage is SUPPRESS_USAGE:
self.usage = None
# For backwards compatibility with Optik 1.3 and earlier.
elif usage.lower().startswith("usage: "):
self.usage = usage[7:]
else:
self.usage = usage
| (self, usage) |
37,030 | sphinx.parsers | RSTParser | A reST parser for Sphinx. | class RSTParser(docutils.parsers.rst.Parser, Parser):
"""A reST parser for Sphinx."""
def get_transforms(self) -> list[type[Transform]]:
"""
Sphinx's reST parser replaces a transform class for smart-quotes by its own
refs: sphinx.io.SphinxStandaloneReader
"""
transforms = super().get_transforms()
transforms.remove(SmartQuotes)
return transforms
def parse(self, inputstring: str | StringList, document: nodes.document) -> None:
"""Parse text and generate a document tree."""
self.setup_parse(inputstring, document) # type: ignore[arg-type]
self.statemachine = states.RSTStateMachine(
state_classes=self.state_classes,
initial_state=self.initial_state,
debug=document.reporter.debug_flag,
)
# preprocess inputstring
if isinstance(inputstring, str):
lines = docutils.statemachine.string2lines(
inputstring, tab_width=document.settings.tab_width, convert_whitespace=True
)
inputlines = StringList(lines, document.current_source)
else:
inputlines = inputstring
self.decorate(inputlines)
self.statemachine.run(inputlines, document, inliner=self.inliner)
self.finish_parse()
def decorate(self, content: StringList) -> None:
"""Preprocess reST content before parsing."""
prepend_prolog(content, self.config.rst_prolog)
append_epilog(content, self.config.rst_epilog)
| (rfc2822=False, inliner=None) |
37,031 | docutils.parsers.rst | __init__ | null | def __init__(self, rfc2822=False, inliner=None):
if rfc2822:
self.initial_state = 'RFC2822Body'
else:
self.initial_state = 'Body'
self.state_classes = states.state_classes
self.inliner = inliner
| (self, rfc2822=False, inliner=None) |
37,032 | sphinx.parsers | decorate | Preprocess reST content before parsing. | def decorate(self, content: StringList) -> None:
"""Preprocess reST content before parsing."""
prepend_prolog(content, self.config.rst_prolog)
append_epilog(content, self.config.rst_epilog)
| (self, content: docutils.statemachine.StringList) -> NoneType |
37,033 | docutils.parsers | finish_parse | Finalize parse details. Call at end of `self.parse()`. | def finish_parse(self):
"""Finalize parse details. Call at end of `self.parse()`."""
self.document.reporter.detach_observer(
self.document.note_parse_message)
| (self) |
37,034 | sphinx.parsers | get_transforms |
Sphinx's reST parser replaces a transform class for smart-quotes by its own
refs: sphinx.io.SphinxStandaloneReader
| def get_transforms(self) -> list[type[Transform]]:
"""
Sphinx's reST parser replaces a transform class for smart-quotes by its own
refs: sphinx.io.SphinxStandaloneReader
"""
transforms = super().get_transforms()
transforms.remove(SmartQuotes)
return transforms
| (self) -> 'list[type[Transform]]' |
37,035 | sphinx.parsers | parse | Parse text and generate a document tree. | def parse(self, inputstring: str | StringList, document: nodes.document) -> None:
"""Parse text and generate a document tree."""
self.setup_parse(inputstring, document) # type: ignore[arg-type]
self.statemachine = states.RSTStateMachine(
state_classes=self.state_classes,
initial_state=self.initial_state,
debug=document.reporter.debug_flag,
)
# preprocess inputstring
if isinstance(inputstring, str):
lines = docutils.statemachine.string2lines(
inputstring, tab_width=document.settings.tab_width, convert_whitespace=True
)
inputlines = StringList(lines, document.current_source)
else:
inputlines = inputstring
self.decorate(inputlines)
self.statemachine.run(inputlines, document, inliner=self.inliner)
self.finish_parse()
| (self, inputstring: str | docutils.statemachine.StringList, document: docutils.nodes.document) -> NoneType |
37,036 | sphinx.parsers | set_application | set_application will be called from Sphinx to set app and other instance variables
:param sphinx.application.Sphinx app: Sphinx application object
| def set_application(self, app: Sphinx) -> None:
"""set_application will be called from Sphinx to set app and other instance variables
:param sphinx.application.Sphinx app: Sphinx application object
"""
self._app = app
self.config = app.config
self.env = app.env
| (self, app: 'Sphinx') -> 'None' |
37,037 | docutils.parsers | setup_parse | Initial parse setup. Call at start of `self.parse()`. | def setup_parse(self, inputstring, document):
"""Initial parse setup. Call at start of `self.parse()`."""
self.inputstring = inputstring
# provide fallbacks in case the document has only generic settings
document.settings.setdefault('file_insertion_enabled', False)
document.settings.setdefault('raw_enabled', False)
document.settings.setdefault('line_length_limit', 10000)
self.document = document
document.reporter.attach_observer(document.note_parse_message)
| (self, inputstring, document) |
37,038 | docutils | supports |
Is `format` supported by this component?
To be used by transforms to ask the dependent component if it supports
a certain input context or output format.
| def supports(self, format):
"""
Is `format` supported by this component?
To be used by transforms to ask the dependent component if it supports
a certain input context or output format.
"""
return format in self.supported
| (self, format) |
37,047 | sphinx_autodoc_typehints | _execute_guarded_code | null | def _execute_guarded_code(autodoc_mock_imports: list[str], obj: Any, module_code: str) -> None:
for _, part in _TYPE_GUARD_IMPORT_RE.findall(module_code):
guarded_code = textwrap.dedent(part)
try:
try:
with mock(autodoc_mock_imports):
exec(guarded_code, getattr(obj, "__globals__", obj.__dict__)) # noqa: S102
except ImportError as exc:
# ImportError might have occurred because the module has guarded code as well,
# so we recurse on the module.
if exc.name:
_resolve_type_guarded_imports(autodoc_mock_imports, importlib.import_module(exc.name))
# Retry the guarded code and see if it works now after resolving all nested type guards.
with mock(autodoc_mock_imports):
exec(guarded_code, getattr(obj, "__globals__", obj.__dict__)) # noqa: S102
except Exception as exc: # noqa: BLE001
_LOGGER.warning("Failed guarded type import with %r", exc)
| (autodoc_mock_imports: list[str], obj: Any, module_code: str) -> NoneType |
37,048 | sphinx_autodoc_typehints | _future_annotations_imported | null | def _future_annotations_imported(obj: Any) -> bool:
_annotations = getattr(inspect.getmodule(obj), "annotations", None)
if _annotations is None:
return False
# Make sure that annotations is imported from __future__ - defined in cpython/Lib/__future__.py
# annotations become strings at runtime
future_annotations = 0x100000 if sys.version_info[0:2] == (3, 7) else 0x1000000
return bool(_annotations.compiler_flag == future_annotations)
| (obj: Any) -> bool |
37,049 | sphinx_autodoc_typehints | _get_sphinx_line_keyword_and_argument |
Extract a keyword, and its optional argument out of a sphinx field option line.
For example
>>> _get_sphinx_line_keyword_and_argument(":param parameter:")
("param", "parameter")
>>> _get_sphinx_line_keyword_and_argument(":return:")
("return", None)
>>> _get_sphinx_line_keyword_and_argument("some invalid line")
None
| def _get_sphinx_line_keyword_and_argument(line: str) -> tuple[str, str | None] | None:
"""
Extract a keyword, and its optional argument out of a sphinx field option line.
For example
>>> _get_sphinx_line_keyword_and_argument(":param parameter:")
("param", "parameter")
>>> _get_sphinx_line_keyword_and_argument(":return:")
("return", None)
>>> _get_sphinx_line_keyword_and_argument("some invalid line")
None
"""
param_line_without_description = line.split(":", maxsplit=2)
if len(param_line_without_description) != 3: # noqa: PLR2004
return None
split_directive_and_name = param_line_without_description[1].split(maxsplit=1)
if len(split_directive_and_name) != 2: # noqa: PLR2004
if not len(split_directive_and_name):
return None
return split_directive_and_name[0], None
return tuple(split_directive_and_name) # type: ignore[return-value]
| (line: str) -> tuple[str, str | None] | None |
37,050 | sphinx_autodoc_typehints | _get_type_hint | null | def _get_type_hint(autodoc_mock_imports: list[str], name: str, obj: Any) -> dict[str, Any]:
_resolve_type_guarded_imports(autodoc_mock_imports, obj)
try:
result = get_type_hints(obj)
except (AttributeError, TypeError, RecursionError) as exc:
# TypeError - slot wrapper, PEP-563 when part of new syntax not supported
# RecursionError - some recursive type definitions https://github.com/python/typing/issues/574
if isinstance(exc, TypeError) and _future_annotations_imported(obj) and "unsupported operand type" in str(exc):
result = obj.__annotations__
else:
result = {}
except NameError as exc:
_LOGGER.warning('Cannot resolve forward reference in type annotations of "%s": %s', name, exc)
result = obj.__annotations__
return result
| (autodoc_mock_imports: list[str], name: str, obj: Any) -> dict[str, typing.Any] |
37,051 | sphinx_autodoc_typehints | _get_types_type | null | def _get_types_type(obj: Any) -> str | None:
try:
return _TYPES_DICT.get(obj)
except Exception: # noqa: BLE001
# e.g. exception: unhashable type
return None
| (obj: Any) -> str | None |
37,052 | sphinx_autodoc_typehints | _inject_rtype | null | def _inject_rtype( # noqa: PLR0913, PLR0917
type_hints: dict[str, Any],
original_obj: Any,
app: Sphinx,
what: str,
name: str,
lines: list[str],
) -> None:
if inspect.isclass(original_obj) or inspect.isdatadescriptor(original_obj):
return
if what == "method" and name.endswith(".__init__"): # avoid adding a return type for data class __init__
return
if not app.config.typehints_document_rtype:
return
r = get_insert_index(app, lines)
if r is None:
return
insert_index = r.insert_index
if not app.config.typehints_use_rtype and r.found_return and " -- " in lines[insert_index]:
return
formatted_annotation = add_type_css_class(format_annotation(type_hints["return"], app.config))
if r.found_param and insert_index < len(lines) and lines[insert_index].strip():
insert_index -= 1
if insert_index == len(lines) and not r.found_param:
# ensure that :rtype: doesn't get joined with a paragraph of text
lines.append("")
insert_index += 1
if app.config.typehints_use_rtype or not r.found_return:
line = f":rtype: {formatted_annotation}"
lines.insert(insert_index, line)
if r.found_directive:
lines.insert(insert_index + 1, "")
else:
line = lines[insert_index]
lines[insert_index] = f":return: {formatted_annotation} --{line[line.find(' ') :]}"
| (type_hints: 'dict[str, Any]', original_obj: 'Any', app: 'Sphinx', what: 'str', name: 'str', lines: 'list[str]') -> 'None' |
37,053 | sphinx_autodoc_typehints | _inject_signature | null | def _inject_signature( # noqa: C901
type_hints: dict[str, Any],
signature: inspect.Signature,
app: Sphinx,
lines: list[str],
) -> None:
for arg_name in signature.parameters:
annotation = type_hints.get(arg_name)
default = signature.parameters[arg_name].default
if arg_name.endswith("_"):
arg_name = f"{arg_name[:-1]}\\_" # noqa: PLW2901
insert_index = None
for at, line in enumerate(lines):
if _line_is_param_line_for_arg(line, arg_name):
# Get the arg_name from the doc to match up for type in case it has a star prefix.
# Line is in the correct format so this is guaranteed to return tuple[str, str].
func = _get_sphinx_line_keyword_and_argument
_, arg_name = func(line) # type: ignore[assignment, misc] # noqa: PLW2901
insert_index = at
break
if annotation is not None and insert_index is None and app.config.always_document_param_types:
lines.append(f":param {arg_name}:")
insert_index = len(lines)
if insert_index is not None:
if annotation is None:
type_annotation = f":type {arg_name}: "
else:
formatted_annotation = add_type_css_class(format_annotation(annotation, app.config))
type_annotation = f":type {arg_name}: {formatted_annotation}"
if app.config.typehints_defaults:
formatted_default = format_default(app, default, annotation is not None)
if formatted_default:
if app.config.typehints_defaults.endswith("after"):
lines[insert_index] += formatted_default
else: # add to last param doc line
type_annotation += formatted_default
lines.insert(insert_index, type_annotation)
| (type_hints: 'dict[str, Any]', signature: 'inspect.Signature', app: 'Sphinx', lines: 'list[str]') -> 'None' |
37,054 | sphinx_autodoc_typehints | _inject_types_to_docstring | null | def _inject_types_to_docstring( # noqa: PLR0913, PLR0917
type_hints: dict[str, Any],
signature: inspect.Signature | None,
original_obj: Any,
app: Sphinx,
what: str,
name: str,
lines: list[str],
) -> None:
if signature is not None:
_inject_signature(type_hints, signature, app, lines)
if "return" in type_hints:
_inject_rtype(type_hints, original_obj, app, what, name, lines)
| (type_hints: 'dict[str, Any]', signature: 'inspect.Signature | None', original_obj: 'Any', app: 'Sphinx', what: 'str', name: 'str', lines: 'list[str]') -> 'None' |
37,055 | sphinx_autodoc_typehints | _is_dataclass | null | def _is_dataclass(name: str, what: str, qualname: str) -> bool:
# generated dataclass __init__() and class need extra checks, as the function operates on the generated class
# and methods (not an instantiated dataclass object) it cannot be replaced by a call to
# `dataclasses.is_dataclass()` => check manually for either generated __init__ or generated class
return (what == "method" and name.endswith(".__init__")) or (what == "class" and qualname.endswith(".__init__"))
| (name: str, what: str, qualname: str) -> bool |
37,056 | sphinx_autodoc_typehints | _is_newtype | null | def _is_newtype(annotation: Any) -> bool:
if sys.version_info < (3, 10):
return inspect.isfunction(annotation) and hasattr(annotation, "__supertype__")
return isinstance(annotation, NewType)
| (annotation: Any) -> bool |
37,057 | sphinx_autodoc_typehints | _line_is_param_line_for_arg | Return True if `line` is a valid parameter line for `arg_name`, false otherwise. | def _line_is_param_line_for_arg(line: str, arg_name: str) -> bool:
"""Return True if `line` is a valid parameter line for `arg_name`, false otherwise."""
keyword_and_name = _get_sphinx_line_keyword_and_argument(line)
if keyword_and_name is None:
return False
keyword, doc_name = keyword_and_name
if doc_name is None:
return False
if keyword not in {"param", "parameter", "arg", "argument"}:
return False
return any(doc_name == prefix + arg_name for prefix in ("", "\\*", "\\**", "\\*\\*"))
| (line: str, arg_name: str) -> bool |
37,058 | sphinx_autodoc_typehints | _resolve_type_guarded_imports | null | def _resolve_type_guarded_imports(autodoc_mock_imports: list[str], obj: Any) -> None:
if _should_skip_guarded_import_resolution(obj):
return
if hasattr(obj, "__globals__"):
_TYPE_GUARD_IMPORTS_RESOLVED_GLOBALS_ID.add(id(obj.__globals__))
module = inspect.getmodule(obj)
if module:
try:
module_code = inspect.getsource(module)
except (TypeError, OSError):
... # no source code => no type guards
else:
_TYPE_GUARD_IMPORTS_RESOLVED.add(module.__name__)
_execute_guarded_code(autodoc_mock_imports, obj, module_code)
| (autodoc_mock_imports: list[str], obj: Any) -> NoneType |
37,059 | sphinx_autodoc_typehints | _should_skip_guarded_import_resolution | null | def _should_skip_guarded_import_resolution(obj: Any) -> bool:
if isinstance(obj, types.ModuleType):
return False # Don't skip modules
if not hasattr(obj, "__globals__"):
return True # Skip objects without __globals__
if hasattr(obj, "__module__"):
return obj.__module__ in _TYPE_GUARD_IMPORTS_RESOLVED or obj.__module__ in sys.builtin_module_names
return id(obj.__globals__) in _TYPE_GUARD_IMPORTS_RESOLVED_GLOBALS_ID
| (obj: Any) -> bool |
37,060 | sphinx_autodoc_typehints | add_type_css_class | null | def add_type_css_class(type_rst: str) -> str:
return f":sphinx_autodoc_typehints_type:`{rst.escape(type_rst)}`"
| (type_rst: str) -> str |
37,063 | sphinx_autodoc_typehints | backfill_type_hints |
Backfill type hints.
:param obj: the object
:param name: the name
:return: backfilled value
| def backfill_type_hints(obj: Any, name: str) -> dict[str, Any]: # noqa: C901, PLR0911, PLR0912
"""
Backfill type hints.
:param obj: the object
:param name: the name
:return: backfilled value
"""
parse_kwargs = {"type_comments": True}
def _one_child(module: Module) -> stmt | None:
children = module.body # use the body to ignore type comments
if len(children) != 1:
_LOGGER.warning('Did not get exactly one node from AST for "%s", got %s', name, len(children))
return None
return children[0]
try:
code = textwrap.dedent(normalize_source_lines(inspect.getsource(obj)))
obj_ast = ast.parse(code, **parse_kwargs) # type: ignore[call-overload] # dynamic kwargs
except (OSError, TypeError, SyntaxError):
return {}
obj_ast = _one_child(obj_ast)
if obj_ast is None:
return {}
try:
type_comment = obj_ast.type_comment
except AttributeError:
return {}
if not type_comment:
return {}
try:
comment_args_str, comment_returns = type_comment.split(" -> ")
except ValueError:
_LOGGER.warning('Unparseable type hint comment for "%s": Expected to contain ` -> `', name)
return {}
rv = {}
if comment_returns:
rv["return"] = comment_returns
args = load_args(obj_ast)
comment_args = split_type_comment_args(comment_args_str)
is_inline = len(comment_args) == 1 and comment_args[0] == "..."
if not is_inline:
if args and args[0].arg in {"self", "cls"} and len(comment_args) != len(args):
comment_args.insert(0, None) # self/cls may be omitted in type comments, insert blank
if len(args) != len(comment_args):
_LOGGER.warning('Not enough type comments found on "%s"', name)
return rv
for at, arg in enumerate(args):
arg_key = getattr(arg, "arg", None)
if arg_key is None:
continue
value = getattr(arg, "type_comment", None) if is_inline else comment_args[at]
if value is not None:
rv[arg_key] = value
return rv
| (obj: Any, name: str) -> dict[str, typing.Any] |
37,065 | sphinx_autodoc_typehints | format_annotation |
Format the annotation.
:param annotation:
:param config:
:return:
| def format_annotation(annotation: Any, config: Config) -> str: # noqa: C901, PLR0911, PLR0912, PLR0915, PLR0914
"""
Format the annotation.
:param annotation:
:param config:
:return:
"""
typehints_formatter: Callable[..., str] | None = getattr(config, "typehints_formatter", None)
if typehints_formatter is not None:
formatted = typehints_formatter(annotation, config)
if formatted is not None:
return formatted
# Special cases
if isinstance(annotation, ForwardRef):
return annotation.__forward_arg__
if annotation is None or annotation is type(None):
return ":py:obj:`None`"
if annotation is Ellipsis:
return ":py:data:`...<Ellipsis>`"
if isinstance(annotation, tuple):
return format_internal_tuple(annotation, config)
try:
module = get_annotation_module(annotation)
class_name = get_annotation_class_name(annotation, module)
args = get_annotation_args(annotation, module, class_name)
except ValueError:
return str(annotation).strip("'")
# Redirect all typing_extensions types to the stdlib typing module
if module == "typing_extensions":
module = "typing"
if module == "_io":
module = "io"
full_name = f"{module}.{class_name}" if module != "builtins" else class_name
fully_qualified: bool = getattr(config, "typehints_fully_qualified", False)
prefix = "" if fully_qualified or full_name == class_name else "~"
role = "data" if module == "typing" and class_name in _PYDATA_ANNOTATIONS else "class"
args_format = "\\[{}]"
formatted_args: str | None = ""
always_use_bars_union: bool = getattr(config, "always_use_bars_union", True)
is_bars_union = full_name == "types.UnionType" or (
always_use_bars_union and type(annotation).__qualname__ == "_UnionGenericAlias"
)
if is_bars_union:
full_name = ""
# Some types require special handling
if full_name == "typing.NewType":
args_format = f"\\(``{annotation.__name__}``, {{}})"
role = "class" if sys.version_info >= (3, 10) else "func"
elif full_name in {"typing.TypeVar", "typing.ParamSpec"}:
params = {k: getattr(annotation, f"__{k}__") for k in ("bound", "covariant", "contravariant")}
params = {k: v for k, v in params.items() if v}
if "bound" in params:
params["bound"] = f" {format_annotation(params['bound'], config)}"
args_format = f"\\(``{annotation.__name__}``{', {}' if args else ''}"
if params:
args_format += "".join(f", {k}={v}" for k, v in params.items())
args_format += ")"
formatted_args = None if args else args_format
elif full_name == "typing.Optional":
args = tuple(x for x in args if x is not type(None))
elif full_name in {"typing.Union", "types.UnionType"} and type(None) in args:
if len(args) == 2: # noqa: PLR2004
full_name = "typing.Optional"
role = "data"
args = tuple(x for x in args if x is not type(None))
else:
simplify_optional_unions: bool = getattr(config, "simplify_optional_unions", True)
if not simplify_optional_unions:
full_name = "typing.Optional"
role = "data"
args_format = f"\\[:py:data:`{prefix}typing.Union`\\[{{}}]]"
args = tuple(x for x in args if x is not type(None))
elif full_name in {"typing.Callable", "collections.abc.Callable"} and args and args[0] is not ...:
fmt = [format_annotation(arg, config) for arg in args]
formatted_args = f"\\[\\[{', '.join(fmt[:-1])}], {fmt[-1]}]"
elif full_name == "typing.Literal":
formatted_args = f"\\[{', '.join(f'``{arg!r}``' for arg in args)}]"
elif is_bars_union:
return " | ".join([format_annotation(arg, config) for arg in args])
if args and not formatted_args:
try:
iter(args)
except TypeError:
fmt = [format_annotation(args, config)]
else:
fmt = [format_annotation(arg, config) for arg in args]
formatted_args = args_format.format(", ".join(fmt))
escape = "\\ " if formatted_args else ""
return f":py:{role}:`{prefix}{full_name}`{escape}{formatted_args}"
| (annotation: 'Any', config: 'Config') -> 'str' |
37,066 | sphinx_autodoc_typehints | format_default | null | def format_default(app: Sphinx, default: Any, is_annotated: bool) -> str | None: # noqa: FBT001
if default is inspect.Parameter.empty:
return None
formatted = repr(default).replace("\\", "\\\\")
if is_annotated:
if app.config.typehints_defaults.startswith("braces"):
return f" (default: ``{formatted}``)"
return f", default: ``{formatted}``"
if app.config.typehints_defaults == "braces-after":
return f" (default: ``{formatted}``)"
return f"default: ``{formatted}``"
| (app: 'Sphinx', default: 'Any', is_annotated: 'bool') -> 'str | None' |
37,067 | sphinx_autodoc_typehints | format_internal_tuple | null | def format_internal_tuple(t: tuple[Any, ...], config: Config) -> str:
# An annotation can be a tuple, e.g., for nptyping:
# In this case, format_annotation receives:
# This solution should hopefully be general for *any* type that allows tuples in annotations
fmt = [format_annotation(a, config) for a in t]
if len(fmt) == 0:
return "()"
if len(fmt) == 1:
return f"({fmt[0]}, )"
return f"({', '.join(fmt)})"
| (t: 'tuple[Any, ...]', config: 'Config') -> 'str' |
37,068 | sphinx_autodoc_typehints | get_all_type_hints | null | def get_all_type_hints(autodoc_mock_imports: list[str], obj: Any, name: str) -> dict[str, Any]:
result = _get_type_hint(autodoc_mock_imports, name, obj)
if not result:
result = backfill_type_hints(obj, name)
try:
obj.__annotations__ = result
except (AttributeError, TypeError):
pass
else:
result = _get_type_hint(autodoc_mock_imports, name, obj)
return result
| (autodoc_mock_imports: list[str], obj: Any, name: str) -> dict[str, typing.Any] |
37,069 | sphinx_autodoc_typehints | get_annotation_args |
Get annotation arguments.
:param annotation:
:param module:
:param class_name:
:return:
| def get_annotation_args(annotation: Any, module: str, class_name: str) -> tuple[Any, ...]: # noqa: PLR0911
"""
Get annotation arguments.
:param annotation:
:param module:
:param class_name:
:return:
"""
try:
original = getattr(sys.modules[module], class_name)
except (KeyError, AttributeError):
pass
else:
if annotation is original:
return () # This is the original, not parametrized type
# Special cases
if class_name in {"Pattern", "Match"} and hasattr(annotation, "type_var"): # Python < 3.7
return (annotation.type_var,)
if class_name == "ClassVar" and hasattr(annotation, "__type__"): # ClassVar on Python < 3.7
return (annotation.__type__,)
if class_name == "TypeVar" and hasattr(annotation, "__constraints__"):
return annotation.__constraints__ # type: ignore[no-any-return]
if class_name == "NewType" and hasattr(annotation, "__supertype__"):
return (annotation.__supertype__,)
if class_name == "Literal" and hasattr(annotation, "__values__"):
return annotation.__values__ # type: ignore[no-any-return]
if class_name == "Generic":
return annotation.__parameters__ # type: ignore[no-any-return]
result = getattr(annotation, "__args__", ())
# 3.10 and earlier Tuple[()] returns ((), ) instead of () the tuple does
return () if len(result) == 1 and result[0] == () else result # type: ignore[misc]
| (annotation: Any, module: str, class_name: str) -> tuple[typing.Any, ...] |
37,070 | sphinx_autodoc_typehints | get_annotation_class_name |
Get class name for annotation.
:param annotation:
:param module:
:return:
| def get_annotation_class_name(annotation: Any, module: str) -> str: # noqa: C901, PLR0911
"""
Get class name for annotation.
:param annotation:
:param module:
:return:
"""
# Special cases
if annotation is None:
return "None"
if annotation is AnyStr:
return "AnyStr"
val = _get_types_type(annotation)
if val is not None:
return val
if _is_newtype(annotation):
return "NewType"
if getattr(annotation, "__qualname__", None):
return annotation.__qualname__ # type: ignore[no-any-return]
if getattr(annotation, "_name", None): # Required for generic aliases on Python 3.7+
return annotation._name # type: ignore[no-any-return] # noqa: SLF001
if module in {"typing", "typing_extensions"} and isinstance(getattr(annotation, "name", None), str):
# Required for at least Pattern and Match
return annotation.name # type: ignore[no-any-return]
origin = getattr(annotation, "__origin__", None)
if origin:
if getattr(origin, "__qualname__", None): # Required for Protocol subclasses
return origin.__qualname__ # type: ignore[no-any-return]
if getattr(origin, "_name", None): # Required for Union on Python 3.7+
return origin._name # type: ignore[no-any-return] # noqa: SLF001
annotation_cls = annotation if inspect.isclass(annotation) else type(annotation)
return annotation_cls.__qualname__.lstrip("_")
| (annotation: Any, module: str) -> str |
37,071 | sphinx_autodoc_typehints | get_annotation_module |
Get module for an annotation.
:param annotation:
:return:
| def get_annotation_module(annotation: Any) -> str:
"""
Get module for an annotation.
:param annotation:
:return:
"""
if annotation is None:
return "builtins"
if _get_types_type(annotation) is not None:
return "types"
is_new_type = sys.version_info >= (3, 10) and isinstance(annotation, NewType)
if (
is_new_type
or isinstance(annotation, TypeVar)
or type(annotation).__name__ in {"ParamSpec", "ParamSpecArgs", "ParamSpecKwargs"}
):
return "typing"
if hasattr(annotation, "__module__"):
return annotation.__module__ # type: ignore[no-any-return]
if hasattr(annotation, "__origin__"):
return annotation.__origin__.__module__ # type: ignore[no-any-return]
msg = f"Cannot determine the module of {annotation}"
raise ValueError(msg)
| (annotation: Any) -> str |
37,072 | sphinx_autodoc_typehints | get_insert_index | null | def get_insert_index(app: Sphinx, lines: list[str]) -> InsertIndexInfo | None:
# 1. If there is an existing :rtype: anywhere, don't insert anything.
if any(line.startswith(":rtype:") for line in lines):
return None
# 2. If there is a :returns: anywhere, either modify that line or insert
# just before it.
for at, line in enumerate(lines):
if line.startswith((":return:", ":returns:")):
return InsertIndexInfo(insert_index=at, found_return=True)
# 3. Insert after the parameters.
# To find the parameters, parse as a docutils tree.
settings = OptionParser(components=(RSTParser,)).get_default_values()
settings.env = app.env
doc = parse("\n".join(lines), settings)
# Find a top level child which is a field_list that contains a field whose
# name starts with one of the PARAM_SYNONYMS. This is the parameter list. We
# hope there is at most of these.
for child in doc.children:
if tag_name(child) != "field_list":
continue
if not any(c.children[0].astext().startswith(PARAM_SYNONYMS) for c in child.children):
continue
# Found it! Try to insert before the next sibling. If there is no next
# sibling, insert at end.
# If there is a next sibling but we can't locate a line number, insert
# at end. (I don't know of any input where this happens.)
next_sibling = child.next_node(descend=False, siblings=True)
line_no = node_line_no(next_sibling) if next_sibling else None
at = line_no - 2 if line_no else len(lines)
return InsertIndexInfo(insert_index=at, found_param=True)
# 4. Insert before examples
for child in doc.children:
if tag_name(child) in {"literal_block", "paragraph", "field_list"}:
continue
line_no = node_line_no(child)
at = line_no - 2 if line_no else len(lines)
return InsertIndexInfo(insert_index=at, found_directive=True)
# 5. Otherwise, insert at end
return InsertIndexInfo(insert_index=len(lines))
| (app: 'Sphinx', lines: 'list[str]') -> 'InsertIndexInfo | None' |
37,076 | sphinx_autodoc_typehints.patches | install_patches |
Install the patches.
:param app: the Sphinx app
| def install_patches(app: Sphinx) -> None:
"""
Install the patches.
:param app: the Sphinx app
"""
fix_autodoc_typehints_for_overloaded_methods()
patch_attribute_handling(app)
_patch_google_docstring_lookup_annotation()
fix_napoleon_numpy_docstring_return_type(app)
_patch_line_numbers()
| (app: 'Sphinx') -> 'None' |
37,077 | sphinx_autodoc_typehints | load_args | null | def load_args(obj_ast: FunctionDef) -> list[Any]:
func_args = obj_ast.args
args = []
pos_only = getattr(func_args, "posonlyargs", None)
if pos_only:
args.extend(pos_only)
args.extend(func_args.args)
if func_args.vararg:
args.append(func_args.vararg)
args.extend(func_args.kwonlyargs)
if func_args.kwarg:
args.append(func_args.kwarg)
return args
| (obj_ast: 'FunctionDef') -> 'list[Any]' |
37,079 | sphinx.ext.autodoc.mock | mock | Insert mock modules during context::
with mock(['target.module.name']):
# mock modules are enabled here
...
| null | (modnames: 'list[str]') -> 'Iterator[None]' |
37,080 | sphinx_autodoc_typehints | node_line_no |
Get the 1-indexed line on which the node starts if possible. If not, return None.
Descend through the first children until we locate one with a line number or return None if None of them have one.
I'm not aware of any rst on which this returns None, to find out would require a more detailed analysis of the
docutils rst parser source code. An example where the node doesn't have a line number but the first child does is
all `definition_list` nodes. It seems like bullet_list and option_list get line numbers, but enum_list also doesn't.
| def node_line_no(node: Node) -> int | None:
"""
Get the 1-indexed line on which the node starts if possible. If not, return None.
Descend through the first children until we locate one with a line number or return None if None of them have one.
I'm not aware of any rst on which this returns None, to find out would require a more detailed analysis of the
docutils rst parser source code. An example where the node doesn't have a line number but the first child does is
all `definition_list` nodes. It seems like bullet_list and option_list get line numbers, but enum_list also doesn't.
"""
if node is None:
return None
while node.line is None and node.children:
node = node.children[0]
return node.line
| (node: 'Node') -> 'int | None' |
37,082 | sphinx_autodoc_typehints | normalize_source_lines |
Normalize the source lines.
It finds the indentation level of the function definition (`def`), then it indents all lines in the function body to
a point at or greater than that level. This allows for comments and continued string literals that are at a lower
indentation than the rest of the code.
:param source_lines: source code
:return: source lines that have been correctly aligned
| def normalize_source_lines(source_lines: str) -> str:
"""
Normalize the source lines.
It finds the indentation level of the function definition (`def`), then it indents all lines in the function body to
a point at or greater than that level. This allows for comments and continued string literals that are at a lower
indentation than the rest of the code.
:param source_lines: source code
:return: source lines that have been correctly aligned
"""
lines = source_lines.split("\n")
def remove_prefix(text: str, prefix: str) -> str:
return text[text.startswith(prefix) and len(prefix) :]
# Find the line and line number containing the function definition
for pos, line in enumerate(lines):
if line.lstrip().startswith("def "):
idx = pos
whitespace_separator = "def"
break
if line.lstrip().startswith("async def"):
idx = pos
whitespace_separator = "async def"
break
else:
return "\n".join(lines)
fn_def = lines[idx]
# Get a string representing the amount of leading whitespace
whitespace = fn_def.split(whitespace_separator)[0]
# Add this leading whitespace to all lines before and after the `def`
aligned_prefix = [whitespace + remove_prefix(s, whitespace) for s in lines[:idx]]
aligned_suffix = [whitespace + remove_prefix(s, whitespace) for s in lines[idx + 1 :]]
# Put it together again
aligned_prefix.append(fn_def)
return "\n".join(aligned_prefix + aligned_suffix)
| (source_lines: str) -> str |
37,083 | sphinx_autodoc_typehints.parser | parse | Parse inputstr and return a docutils document. | def parse(inputstr: str, settings: Values | optparse.Values) -> nodes.document:
"""Parse inputstr and return a docutils document."""
doc = new_document("", settings=settings)
with sphinx_domains(settings.env):
parser = _RstSnippetParser()
parser.set_application(settings.env.app)
parser.parse(inputstr, doc)
return doc
| (inputstr: 'str', settings: 'Values | optparse.Values') -> 'nodes.document' |
37,086 | sphinx_autodoc_typehints | process_docstring |
Process the docstring for an entry.
:param app: the Sphinx app
:param what: the target
:param name: the name
:param obj: the object
:param options: the options
:param lines: the lines
:return:
| def process_docstring( # noqa: PLR0913, PLR0917, PLR0917
app: Sphinx,
what: str,
name: str,
obj: Any,
options: Options | None, # noqa: ARG001
lines: list[str],
) -> None:
"""
Process the docstring for an entry.
:param app: the Sphinx app
:param what: the target
:param name: the name
:param obj: the object
:param options: the options
:param lines: the lines
:return:
"""
original_obj = obj
obj = obj.fget if isinstance(obj, property) else obj
if not callable(obj):
return
obj = obj.__init__ if inspect.isclass(obj) else obj
obj = inspect.unwrap(obj)
try:
signature = sphinx_signature(obj)
except (ValueError, TypeError):
signature = None
type_hints = get_all_type_hints(app.config.autodoc_mock_imports, obj, name)
app.config._annotation_globals = getattr(obj, "__globals__", {}) # type: ignore[attr-defined] # noqa: SLF001
try:
_inject_types_to_docstring(type_hints, signature, original_obj, app, what, name, lines)
finally:
delattr(app.config, "_annotation_globals")
| (app: 'Sphinx', what: 'str', name: 'str', obj: 'Any', options: 'Options | None', lines: 'list[str]') -> 'None' |
37,087 | sphinx_autodoc_typehints | process_signature |
Process the signature.
:param app:
:param what:
:param name:
:param obj:
:param options:
:param signature:
:param return_annotation:
:return:
| def process_signature( # noqa: C901, PLR0913, PLR0917
app: Sphinx,
what: str,
name: str,
obj: Any,
options: Options, # noqa: ARG001
signature: str, # noqa: ARG001
return_annotation: str, # noqa: ARG001
) -> tuple[str, None] | None:
"""
Process the signature.
:param app:
:param what:
:param name:
:param obj:
:param options:
:param signature:
:param return_annotation:
:return:
"""
if not callable(obj):
return None
original_obj = obj
obj = getattr(obj, "__init__", getattr(obj, "__new__", None)) if inspect.isclass(obj) else obj
if not getattr(obj, "__annotations__", None): # when has no annotation we cannot autodoc typehints so bail
return None
obj = inspect.unwrap(obj)
sph_signature = sphinx_signature(obj)
if app.config.typehints_use_signature:
parameters = list(sph_signature.parameters.values())
else:
parameters = [param.replace(annotation=inspect.Parameter.empty) for param in sph_signature.parameters.values()]
# if we have parameters we may need to delete first argument that's not documented, e.g. self
start = 0
if parameters:
if inspect.isclass(original_obj) or (what == "method" and name.endswith(".__init__")):
start = 1
elif what == "method":
# bail if it is a local method as we cannot determine if first argument needs to be deleted or not
if "<locals>" in obj.__qualname__ and not _is_dataclass(name, what, obj.__qualname__):
_LOGGER.warning('Cannot handle as a local function: "%s" (use @functools.wraps)', name)
return None
outer = inspect.getmodule(obj)
for class_name in obj.__qualname__.split(".")[:-1]:
outer = getattr(outer, class_name)
method_name = obj.__name__
if method_name.startswith("__") and not method_name.endswith("__"):
# when method starts with double underscore Python applies mangling -> prepend the class name
method_name = f"_{obj.__qualname__.split('.')[-2]}{method_name}"
method_object = outer.__dict__[method_name] if outer else obj
if not isinstance(method_object, (classmethod, staticmethod)):
start = 1
sph_signature = sph_signature.replace(parameters=parameters[start:])
show_return_annotation = app.config.typehints_use_signature_return
unqualified_typehints = not getattr(app.config, "typehints_fully_qualified", False)
return (
stringify_signature(
sph_signature,
show_return_annotation=show_return_annotation,
unqualified_typehints=unqualified_typehints,
).replace("\\", "\\\\"),
None,
)
| (app: 'Sphinx', what: 'str', name: 'str', obj: 'Any', options: 'Options', signature: 'str', return_annotation: 'str') -> 'tuple[str, None] | None' |
37,090 | sphinx_autodoc_typehints | setup | null | def setup(app: Sphinx) -> dict[str, bool]:
app.add_config_value("always_document_param_types", False, "html") # noqa: FBT003
app.add_config_value("typehints_fully_qualified", False, "env") # noqa: FBT003
app.add_config_value("typehints_document_rtype", True, "env") # noqa: FBT003
app.add_config_value("typehints_use_rtype", True, "env") # noqa: FBT003
app.add_config_value("typehints_defaults", None, "env")
app.add_config_value("simplify_optional_unions", True, "env") # noqa: FBT003
app.add_config_value("always_use_bars_union", False, "env") # noqa: FBT003
app.add_config_value("typehints_formatter", None, "env")
app.add_config_value("typehints_use_signature", False, "env") # noqa: FBT003
app.add_config_value("typehints_use_signature_return", False, "env") # noqa: FBT003
app.add_role("sphinx_autodoc_typehints_type", sphinx_autodoc_typehints_type_role)
app.connect("env-before-read-docs", validate_config) # config may be changed after “config-inited” event
app.connect("autodoc-process-signature", process_signature)
app.connect("autodoc-process-docstring", process_docstring)
install_patches(app)
return {"parallel_read_safe": True, "parallel_write_safe": True}
| (app: 'Sphinx') -> 'dict[str, bool]' |
37,091 | sphinx_autodoc_typehints | sphinx_autodoc_typehints_type_role |
Add css tag around rendered type.
The body should be escaped rst. This renders its body as rst and wraps the
result in <span class="sphinx_autodoc_typehints-type"> </span>
| def sphinx_autodoc_typehints_type_role(
_role: str,
_rawtext: str,
text: str,
_lineno: int,
inliner: states.Inliner,
_options: dict[str, Any] | None = None,
_content: list[str] | None = None,
) -> tuple[list[Node], list[Node]]:
"""
Add css tag around rendered type.
The body should be escaped rst. This renders its body as rst and wraps the
result in <span class="sphinx_autodoc_typehints-type"> </span>
"""
unescaped = unescape(text)
doc = parse(unescaped, inliner.document.settings)
n = nodes.inline(text)
n["classes"].append("sphinx_autodoc_typehints-type")
n += doc.children[0].children
return [n], []
| (_role: 'str', _rawtext: 'str', text: 'str', _lineno: 'int', inliner: 'states.Inliner', _options: 'dict[str, Any] | None' = None, _content: 'list[str] | None' = None) -> 'tuple[list[Node], list[Node]]' |
37,092 | sphinx.util.inspect | signature | Return a Signature object for the given *subject*.
:param bound_method: Specify *subject* is a bound method or not
| def signature(
subject: Callable[..., Any],
bound_method: bool = False,
type_aliases: Mapping[str, str] | None = None,
) -> Signature:
"""Return a Signature object for the given *subject*.
:param bound_method: Specify *subject* is a bound method or not
"""
if type_aliases is None:
type_aliases = {}
try:
if _should_unwrap(subject):
signature = inspect.signature(subject)
else:
signature = inspect.signature(subject, follow_wrapped=True)
except ValueError:
# follow built-in wrappers up (ex. functools.lru_cache)
signature = inspect.signature(subject)
parameters = list(signature.parameters.values())
return_annotation = signature.return_annotation
try:
# Resolve annotations using ``get_type_hints()`` and type_aliases.
localns = TypeAliasNamespace(type_aliases)
annotations = typing.get_type_hints(subject, None, localns)
for i, param in enumerate(parameters):
if param.name in annotations:
annotation = annotations[param.name]
if isinstance(annotation, TypeAliasForwardRef):
annotation = annotation.name
parameters[i] = param.replace(annotation=annotation)
if 'return' in annotations:
if isinstance(annotations['return'], TypeAliasForwardRef):
return_annotation = annotations['return'].name
else:
return_annotation = annotations['return']
except Exception:
# ``get_type_hints()`` does not support some kind of objects like partial,
# ForwardRef and so on.
pass
if bound_method:
if inspect.ismethod(subject):
# ``inspect.signature()`` considers the subject is a bound method and removes
# first argument from signature. Therefore no skips are needed here.
pass
else:
if len(parameters) > 0:
parameters.pop(0)
# To allow to create signature object correctly for pure python functions,
# pass an internal parameter __validate_parameters__=False to Signature
#
# For example, this helps a function having a default value `inspect._empty`.
# refs: https://github.com/sphinx-doc/sphinx/issues/7935
return Signature(
parameters, return_annotation=return_annotation, __validate_parameters__=False
)
| (subject: 'Callable[..., Any]', bound_method: 'bool' = False, type_aliases: 'Mapping[str, str] | None' = None) -> 'Signature' |
37,093 | sphinx_autodoc_typehints | split_type_comment_args | null | def split_type_comment_args(comment: str) -> list[str | None]:
def add(val: str) -> None:
result.append(val.strip().lstrip("*")) # remove spaces, and var/kw arg marker
comment = comment.strip().lstrip("(").rstrip(")")
result: list[str | None] = []
if not comment:
return result
brackets, start_arg_at, at = 0, 0, 0
for at, char in enumerate(comment):
if char in {"[", "("}:
brackets += 1
elif char in {"]", ")"}:
brackets -= 1
elif char == "," and brackets == 0:
add(comment[start_arg_at:at])
start_arg_at = at + 1
add(comment[start_arg_at : at + 1])
return result
| (comment: str) -> list[str | None] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.