index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
35,205
bqplot.traits
array_squeeze
null
def array_squeeze(trait, value): if len(value.shape) > 1: return np.squeeze(value) else: return value
(trait, value)
35,206
bqplot.traits
array_supported_kinds
null
def array_supported_kinds(kinds='biufMSUO'): def validator(trait, value): if value.dtype.kind not in kinds: raise TraitError('Array type not supported for trait %s of class %s: expected a \ array of kind in list %r and got an array of type %s (kind %s)' % ( trait.name, trait.this_class, list(kinds), value.dtype, value.dtype.kind)) return value return validator
(kinds='biufMSUO')
35,218
ipywidgets.widgets.widget
register
A decorator registering a widget class in the widget registry.
def register(widget): """A decorator registering a widget class in the widget registry.""" w = widget.class_traits() _registry.register(w['_model_module'].default_value, w['_model_module_version'].default_value, w['_model_name'].default_value, w['_view_module'].default_value, w['_view_module_version'].default_value, w['_view_name'].default_value, widget) return widget
(widget)
35,219
bqplot.axes
register_axis
Returns a decorator registering an axis class in the axis type registry. If no key is provided, the class name is used as a key. A key is provided for each core bqplot axis so that the frontend can use this key regardless of the kernel language.
def register_axis(key=None): """Returns a decorator registering an axis class in the axis type registry. If no key is provided, the class name is used as a key. A key is provided for each core bqplot axis so that the frontend can use this key regardless of the kernel language. """ def wrap(axis): name = key if key is not None else axis.__module__ + axis.__name__ BaseAxis.axis_types[name] = axis return axis return wrap
(key=None)
35,220
bqplot.marks
register_mark
Returns a decorator registering a mark class in the mark type registry. If no key is provided, the class name is used as a key. A key is provided for each core bqplot mark so that the frontend can use this key regardless of the kernel language.
def register_mark(key=None): """Returns a decorator registering a mark class in the mark type registry. If no key is provided, the class name is used as a key. A key is provided for each core bqplot mark so that the frontend can use this key regardless of the kernel language. """ def wrap(mark): name = key if key is not None else mark.__module__ + mark.__name__ Mark.mark_types[name] = mark return mark return wrap
(key=None)
35,221
bqplot.scales
register_scale
Returns a decorator to register a scale type in the scale type registry. If no key is provided, the class name is used as a key. A key is provided for each core bqplot scale type so that the frontend can use this key regardless of the kernel language.
def register_scale(key=None): """Returns a decorator to register a scale type in the scale type registry. If no key is provided, the class name is used as a key. A key is provided for each core bqplot scale type so that the frontend can use this key regardless of the kernel language. """ def wrap(scale): label = key if key is not None else scale.__module__ + scale.__name__ Scale.scale_types[label] = scale return scale return wrap
(key=None)
35,223
bqplot.marks
shape
null
def shape(*dimensions): def validator(trait, value): err_msg_tmpl = 'Expected an array of shape {} ' + \ 'but got an array of shape {}' if value.shape != dimensions: raise TraitError(err_msg_tmpl.format(dimensions, value.shape)) else: return value return validator
(*dimensions)
35,225
bqplot.marks
topo_load
null
def topo_load(name): with open(os.path.join(os.path.split(os.path.realpath(__file__))[0], name)) as data_file: data = json.load(data_file) return data
(name)
35,229
distro_info
DebianDistroInfo
provides information about Debian's distributions
class DebianDistroInfo(DistroInfo): """provides information about Debian's distributions""" def __init__(self): super().__init__("Debian") def codename(self, release, date=None, default=None): """Map 'unstable', 'testing', etc. to their codenames.""" if release == "unstable": codename = self.devel(date) elif release == "testing": codename = self.testing(date) elif release == "stable": codename = self.stable(date) elif release == "oldstable": codename = self.old(date) else: codename = default return codename def devel(self, date=None, result="codename"): """Get latest development distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if x.release is None or (date < x.release and (x.eol is None or date <= x.eol)) ] if len(distros) < 2: raise DistroDataOutdated() return self._format(result, distros[-2]) def old(self, date=None, result="codename"): """Get old (stable) Debian distribution based on the given date.""" if date is None: date = self._date distros = [x for x in self._avail(date) if x.release is not None and date >= x.release] if len(distros) < 2: raise DistroDataOutdated() return self._format(result, distros[-2]) def supported(self, date=None, result="codename"): """Get list of all supported Debian distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if x.eol is None or date <= x.eol ] return distros def lts_supported(self, date=None, result="codename"): """Get list of all LTS supported Debian distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if (x.eol is not None and date > x.eol) and (x.eol_lts is not None and date <= x.eol_lts) ] return distros def elts_supported(self, date=None, result="codename"): """Get list of all Extended LTS supported Debian distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if (x.eol_lts is not None and date > x.eol_lts) and (x.eol_elts is not None and date <= x.eol_elts) ] return distros def testing(self, date=None, result="codename"): """Get latest testing Debian distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if (x.release is None and x.version) or (x.release is not None and date < x.release and (x.eol is None or date <= x.eol)) ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1]) def valid(self, codename): """Check if the given codename is known.""" return DistroInfo.valid(self, codename) or codename in [ "unstable", "testing", "stable", "oldstable", ]
()
35,230
distro_info
__init__
null
def __init__(self): super().__init__("Debian")
(self)
35,231
distro_info
_avail
Return all distributions that were available on the given date.
def _avail(self, date): """Return all distributions that were available on the given date.""" return [x for x in self._releases if date >= x.created]
(self, date)
35,232
distro_info
_format
Format a given distribution entry.
def _format(self, format_string, release): """Format a given distribution entry.""" if format_string == "object": return release if format_string == "codename": return release.series if format_string == "fullname": return self._distro + " " + release.version + ' "' + release.codename + '"' if format_string == "release": return release.version raise ValueError( "Only codename, fullname, object, and release are allowed " "result values, but not '" + format_string + "'." )
(self, format_string, release)
35,233
distro_info
codename
Map 'unstable', 'testing', etc. to their codenames.
def codename(self, release, date=None, default=None): """Map 'unstable', 'testing', etc. to their codenames.""" if release == "unstable": codename = self.devel(date) elif release == "testing": codename = self.testing(date) elif release == "stable": codename = self.stable(date) elif release == "oldstable": codename = self.old(date) else: codename = default return codename
(self, release, date=None, default=None)
35,234
distro_info
devel
Get latest development distribution based on the given date.
def devel(self, date=None, result="codename"): """Get latest development distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if x.release is None or (date < x.release and (x.eol is None or date <= x.eol)) ] if len(distros) < 2: raise DistroDataOutdated() return self._format(result, distros[-2])
(self, date=None, result='codename')
35,235
distro_info
elts_supported
Get list of all Extended LTS supported Debian distributions based on the given date.
def elts_supported(self, date=None, result="codename"): """Get list of all Extended LTS supported Debian distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if (x.eol_lts is not None and date > x.eol_lts) and (x.eol_elts is not None and date <= x.eol_elts) ] return distros
(self, date=None, result='codename')
35,236
distro_info
get_all
List all known distributions.
def get_all(self, result="codename"): """List all known distributions.""" return [self._format(result, x) for x in self._releases]
(self, result='codename')
35,237
distro_info
lts_supported
Get list of all LTS supported Debian distributions based on the given date.
def lts_supported(self, date=None, result="codename"): """Get list of all LTS supported Debian distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if (x.eol is not None and date > x.eol) and (x.eol_lts is not None and date <= x.eol_lts) ] return distros
(self, date=None, result='codename')
35,238
distro_info
old
Get old (stable) Debian distribution based on the given date.
def old(self, date=None, result="codename"): """Get old (stable) Debian distribution based on the given date.""" if date is None: date = self._date distros = [x for x in self._avail(date) if x.release is not None and date >= x.release] if len(distros) < 2: raise DistroDataOutdated() return self._format(result, distros[-2])
(self, date=None, result='codename')
35,239
distro_info
stable
Get latest stable distribution based on the given date.
def stable(self, date=None, result="codename"): """Get latest stable distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if x.release is not None and date >= x.release and (x.eol is None or date <= x.eol) ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1])
(self, date=None, result='codename')
35,240
distro_info
supported
Get list of all supported Debian distributions based on the given date.
def supported(self, date=None, result="codename"): """Get list of all supported Debian distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if x.eol is None or date <= x.eol ] return distros
(self, date=None, result='codename')
35,241
distro_info
testing
Get latest testing Debian distribution based on the given date.
def testing(self, date=None, result="codename"): """Get latest testing Debian distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if (x.release is None and x.version) or (x.release is not None and date < x.release and (x.eol is None or date <= x.eol)) ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1])
(self, date=None, result='codename')
35,242
distro_info
unsupported
Get list of all unsupported distributions based on the given date.
def unsupported(self, date=None, result="codename"): """Get list of all unsupported distributions based on the given date.""" if date is None: date = self._date supported = self.supported(date) distros = [self._format(result, x) for x in self._avail(date) if x.series not in supported] return distros
(self, date=None, result='codename')
35,243
distro_info
valid
Check if the given codename is known.
def valid(self, codename): """Check if the given codename is known.""" return DistroInfo.valid(self, codename) or codename in [ "unstable", "testing", "stable", "oldstable", ]
(self, codename)
35,244
distro_info
version
Map codename or series to version
def version(self, name, default=None): """Map codename or series to version""" for release in self._releases: if name in (release.codename, release.series): return release.version return default
(self, name, default=None)
35,245
distro_info
DistroDataOutdated
Distribution data outdated.
class DistroDataOutdated(Exception): """Distribution data outdated.""" def __init__(self): super().__init__( "Distribution data outdated. Please check for an update for distro-info-data. " "See /usr/share/doc/distro-info-data/README.Debian for details." )
()
35,246
distro_info
__init__
null
def __init__(self): super().__init__( "Distribution data outdated. Please check for an update for distro-info-data. " "See /usr/share/doc/distro-info-data/README.Debian for details." )
(self)
35,247
distro_info
DistroInfo
Base class for distribution information. Use DebianDistroInfo or UbuntuDistroInfo instead of using this directly.
class DistroInfo(object): """Base class for distribution information. Use DebianDistroInfo or UbuntuDistroInfo instead of using this directly. """ def __init__(self, distro): self._distro = distro filename = os.path.join(_get_data_dir(), distro.lower() + ".csv") csvfile = open(filename) csv_reader = csv.DictReader(csvfile) self._releases = [] for row in csv_reader: release = DistroRelease( row["version"], row["codename"], row["series"], _get_date(row, "created"), _get_date(row, "release"), _get_date(row, "eol"), _get_date(row, "eol-esm"), _get_date(row, "eol-lts"), _get_date(row, "eol-elts"), _get_date(row, "eol-server"), ) self._releases.append(release) csvfile.close() self._date = datetime.date.today() @property def all(self): """List codenames of all known distributions.""" return [x.series for x in self._releases] def get_all(self, result="codename"): """List all known distributions.""" return [self._format(result, x) for x in self._releases] def _avail(self, date): """Return all distributions that were available on the given date.""" return [x for x in self._releases if date >= x.created] def codename(self, release, date=None, default=None): """Map codename aliases to the codename they describe.""" # pylint: disable=no-self-use,unused-argument return release def version(self, name, default=None): """Map codename or series to version""" for release in self._releases: if name in (release.codename, release.series): return release.version return default def devel(self, date=None, result="codename"): """Get latest development distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if x.release is None or (date < x.release and (x.eol is None or date <= x.eol)) ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1]) def _format(self, format_string, release): """Format a given distribution entry.""" if format_string == "object": return release if format_string == "codename": return release.series if format_string == "fullname": return self._distro + " " + release.version + ' "' + release.codename + '"' if format_string == "release": return release.version raise ValueError( "Only codename, fullname, object, and release are allowed " "result values, but not '" + format_string + "'." ) def stable(self, date=None, result="codename"): """Get latest stable distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if x.release is not None and date >= x.release and (x.eol is None or date <= x.eol) ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1]) def supported(self, date=None, result=None): """Get list of all supported distributions based on the given date.""" raise NotImplementedError() def valid(self, codename): """Check if the given codename is known.""" return codename in self.all def unsupported(self, date=None, result="codename"): """Get list of all unsupported distributions based on the given date.""" if date is None: date = self._date supported = self.supported(date) distros = [self._format(result, x) for x in self._avail(date) if x.series not in supported] return distros
(distro)
35,248
distro_info
__init__
null
def __init__(self, distro): self._distro = distro filename = os.path.join(_get_data_dir(), distro.lower() + ".csv") csvfile = open(filename) csv_reader = csv.DictReader(csvfile) self._releases = [] for row in csv_reader: release = DistroRelease( row["version"], row["codename"], row["series"], _get_date(row, "created"), _get_date(row, "release"), _get_date(row, "eol"), _get_date(row, "eol-esm"), _get_date(row, "eol-lts"), _get_date(row, "eol-elts"), _get_date(row, "eol-server"), ) self._releases.append(release) csvfile.close() self._date = datetime.date.today()
(self, distro)
35,251
distro_info
codename
Map codename aliases to the codename they describe.
def codename(self, release, date=None, default=None): """Map codename aliases to the codename they describe.""" # pylint: disable=no-self-use,unused-argument return release
(self, release, date=None, default=None)
35,252
distro_info
devel
Get latest development distribution based on the given date.
def devel(self, date=None, result="codename"): """Get latest development distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._avail(date) if x.release is None or (date < x.release and (x.eol is None or date <= x.eol)) ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1])
(self, date=None, result='codename')
35,255
distro_info
supported
Get list of all supported distributions based on the given date.
def supported(self, date=None, result=None): """Get list of all supported distributions based on the given date.""" raise NotImplementedError()
(self, date=None, result=None)
35,257
distro_info
valid
Check if the given codename is known.
def valid(self, codename): """Check if the given codename is known.""" return codename in self.all
(self, codename)
35,259
distro_info
DistroRelease
Represents a distributions release
class DistroRelease(object): """Represents a distributions release""" # pylint: disable=too-few-public-methods # pylint: disable=too-many-instance-attributes def __init__( self, version, codename, series, created=None, release=None, eol=None, eol_esm=None, eol_lts=None, eol_elts=None, eol_server=None, ): # pylint: disable=too-many-arguments self.version = version self.codename = codename self.series = series self.created = created self.release = release self.eol = eol self.eol_lts = eol_lts self.eol_elts = eol_elts self.eol_esm = eol_esm self.eol_server = eol_server def is_supported(self, date): """Check whether this release is supported on the given date.""" return date >= self.created and ( self.eol is None or date <= self.eol or (self.eol_server is not None and date <= self.eol_server) )
(version, codename, series, created=None, release=None, eol=None, eol_esm=None, eol_lts=None, eol_elts=None, eol_server=None)
35,260
distro_info
__init__
null
def __init__( self, version, codename, series, created=None, release=None, eol=None, eol_esm=None, eol_lts=None, eol_elts=None, eol_server=None, ): # pylint: disable=too-many-arguments self.version = version self.codename = codename self.series = series self.created = created self.release = release self.eol = eol self.eol_lts = eol_lts self.eol_elts = eol_elts self.eol_esm = eol_esm self.eol_server = eol_server
(self, version, codename, series, created=None, release=None, eol=None, eol_esm=None, eol_lts=None, eol_elts=None, eol_server=None)
35,261
distro_info
is_supported
Check whether this release is supported on the given date.
def is_supported(self, date): """Check whether this release is supported on the given date.""" return date >= self.created and ( self.eol is None or date <= self.eol or (self.eol_server is not None and date <= self.eol_server) )
(self, date)
35,262
distro_info
UbuntuDistroInfo
provides information about Ubuntu's distributions
class UbuntuDistroInfo(DistroInfo): """provides information about Ubuntu's distributions""" def __init__(self): super().__init__("Ubuntu") def lts(self, date=None, result="codename"): """Get latest long term support (LTS) Ubuntu distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._releases if x.version.find("LTS") >= 0 and x.release <= date <= x.eol ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1]) def is_lts(self, codename): """Is codename an LTS release?""" distros = [x for x in self._releases if x.series == codename] if not distros: return False return "LTS" in distros[0].version def supported(self, date=None, result="codename"): """Get list of all supported Ubuntu distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if date <= x.eol or (x.eol_server is not None and date <= x.eol_server) ] return distros def supported_esm(self, date=None, result="codename"): """Get list of all ESM supported Ubuntu distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if x.eol_esm is not None and date <= x.eol_esm ] return distros
()
35,263
distro_info
__init__
null
def __init__(self): super().__init__("Ubuntu")
(self)
35,269
distro_info
is_lts
Is codename an LTS release?
def is_lts(self, codename): """Is codename an LTS release?""" distros = [x for x in self._releases if x.series == codename] if not distros: return False return "LTS" in distros[0].version
(self, codename)
35,270
distro_info
lts
Get latest long term support (LTS) Ubuntu distribution based on the given date.
def lts(self, date=None, result="codename"): """Get latest long term support (LTS) Ubuntu distribution based on the given date.""" if date is None: date = self._date distros = [ x for x in self._releases if x.version.find("LTS") >= 0 and x.release <= date <= x.eol ] if not distros: raise DistroDataOutdated() return self._format(result, distros[-1])
(self, date=None, result='codename')
35,272
distro_info
supported
Get list of all supported Ubuntu distributions based on the given date.
def supported(self, date=None, result="codename"): """Get list of all supported Ubuntu distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if date <= x.eol or (x.eol_server is not None and date <= x.eol_server) ] return distros
(self, date=None, result='codename')
35,273
distro_info
supported_esm
Get list of all ESM supported Ubuntu distributions based on the given date.
def supported_esm(self, date=None, result="codename"): """Get list of all ESM supported Ubuntu distributions based on the given date.""" if date is None: date = self._date distros = [ self._format(result, x) for x in self._avail(date) if x.eol_esm is not None and date <= x.eol_esm ] return distros
(self, date=None, result='codename')
35,277
distro_info
_get_data_dir
Get the data directory based on the module location.
def _get_data_dir(): """Get the data directory based on the module location.""" return "/usr/share/distro-info"
()
35,278
distro_info
_get_date
null
def _get_date(row, column): return convert_date(row[column]) if column in row else None
(row, column)
35,279
distro_info
convert_date
Convert a date string in ISO 8601 into a datetime object.
def convert_date(string): """Convert a date string in ISO 8601 into a datetime object.""" if not string: date = None else: parts = [int(x) for x in string.split("-")] if len(parts) == 3: (year, month, day) = parts date = datetime.date(year, month, day) elif len(parts) == 2: (year, month) = parts if month == 12: date = datetime.date(year, month, 31) else: date = datetime.date(year, month + 1, 1) - datetime.timedelta(1) else: raise ValueError("Date not in ISO 8601 format.") return date
(string)
35,283
pycoparsec.parser
Parser
This class implements a parser-combinator style parser on arbitrary None-less iterators. :ivar matcher: The meat-and-potatoes of the parser. Takes in the next token from the stream, and the rest of the token iterator. Gives back either a constructed *output object* or None if the parse failed. :ivar choices: A list of other Parsers to try in order if this Parser fails. By default, a newly constructed parser always fails, so something like ``Parser().choice(parser1, parser2)`` will always defer to ``parser1`` and then ``parser2``.
class Parser(Generic[S, O]): """This class implements a parser-combinator style parser on arbitrary None-less iterators. :ivar matcher: The meat-and-potatoes of the parser. Takes in the next token from the stream, and the rest of the token iterator. Gives back either a constructed *output object* or None if the parse failed. :ivar choices: A list of other Parsers to try in order if this Parser fails. By default, a newly constructed parser always fails, so something like ``Parser().choice(parser1, parser2)`` will always defer to ``parser1`` and then ``parser2``. """ def __init__(self) -> None: self.matcher: Callable[[S, Iterator[S]], Optional[O]] = lambda tok, rest: None self.choices: List["Parser[S, O]"] = [] def exactly(self, token: S, factory: Callable[[S], O]) -> "Parser[S, O]": """Match one element of the input stream exactly, then exit.""" self.matcher = lambda tok, rest: factory(tok) if tok == token else None return self def then(self, parser: "Parser[S, O]") -> "Parser[S, O]": """Chain another parser onto this one, linking their success states together. Successful parse chains call `__add__` on the output object to append them together. If the default behavior of `__add__` does not support the behavior you want, please make a new class which overrides `__add__` and inherits behavior from your desired output type. """ capturedMatcher = self.matcher def _matcher(tok, rest): if (out := capturedMatcher(tok, rest)) is None: return None try: subout = parser.run(rest) if subout is None: return None return out + subout except FailedParsing: return None self.matcher = _matcher return self def choice(self, choices: List["Parser[S, O]"]) -> "Parser[S, O]": """Add a list of alternative Parsers in the case that this Parser fails.""" self.choices.extend(choices) return self def __ror__(self, other: "Parser[S, O]") -> "Parser[S, O]": """Cute syntax for supplying alternatives. Allows you to use something like ``(parser1 | parser2).run()``""" self.choice([other]) return self def __or__(self, other: "Parser[S, O]") -> "Parser[S, O]": """Cute syntax for supplying alternatives. Allows you to use something like ``(parser1 | parser2).run()``""" self.choice([other]) return self def run(self, iter: Iterator[S]) -> O: """Run this parser.""" ourTee, *tees = tee(iter, len(self.choices) + 1) tok = next(ourTee, None) if (out := self.matcher(tok, ourTee)) is not None: return out for teenum, subparser in enumerate(self.choices): try: subout = subparser.run(tees[teenum]) if subout is not None: return subout except FailedParsing: print(f"Failed subparser {teenum}") raise FailedParsing
() -> None
35,284
pycoparsec.parser
__init__
null
def __init__(self) -> None: self.matcher: Callable[[S, Iterator[S]], Optional[O]] = lambda tok, rest: None self.choices: List["Parser[S, O]"] = []
(self) -> NoneType
35,285
pycoparsec.parser
__or__
Cute syntax for supplying alternatives. Allows you to use something like ``(parser1 | parser2).run()``
def __or__(self, other: "Parser[S, O]") -> "Parser[S, O]": """Cute syntax for supplying alternatives. Allows you to use something like ``(parser1 | parser2).run()``""" self.choice([other]) return self
(self, other: pycoparsec.parser.Parser[~S, ~O]) -> pycoparsec.parser.Parser[~S, ~O]
35,286
pycoparsec.parser
__ror__
Cute syntax for supplying alternatives. Allows you to use something like ``(parser1 | parser2).run()``
def __ror__(self, other: "Parser[S, O]") -> "Parser[S, O]": """Cute syntax for supplying alternatives. Allows you to use something like ``(parser1 | parser2).run()``""" self.choice([other]) return self
(self, other: pycoparsec.parser.Parser[~S, ~O]) -> pycoparsec.parser.Parser[~S, ~O]
35,287
pycoparsec.parser
choice
Add a list of alternative Parsers in the case that this Parser fails.
def choice(self, choices: List["Parser[S, O]"]) -> "Parser[S, O]": """Add a list of alternative Parsers in the case that this Parser fails.""" self.choices.extend(choices) return self
(self, choices: List[pycoparsec.parser.Parser[~S, ~O]]) -> pycoparsec.parser.Parser[~S, ~O]
35,288
pycoparsec.parser
exactly
Match one element of the input stream exactly, then exit.
def exactly(self, token: S, factory: Callable[[S], O]) -> "Parser[S, O]": """Match one element of the input stream exactly, then exit.""" self.matcher = lambda tok, rest: factory(tok) if tok == token else None return self
(self, token: ~S, factory: Callable[[~S], ~O]) -> pycoparsec.parser.Parser[~S, ~O]
35,289
pycoparsec.parser
run
Run this parser.
def run(self, iter: Iterator[S]) -> O: """Run this parser.""" ourTee, *tees = tee(iter, len(self.choices) + 1) tok = next(ourTee, None) if (out := self.matcher(tok, ourTee)) is not None: return out for teenum, subparser in enumerate(self.choices): try: subout = subparser.run(tees[teenum]) if subout is not None: return subout except FailedParsing: print(f"Failed subparser {teenum}") raise FailedParsing
(self, iter: Iterator[~S]) -> ~O
35,290
pycoparsec.parser
then
Chain another parser onto this one, linking their success states together. Successful parse chains call `__add__` on the output object to append them together. If the default behavior of `__add__` does not support the behavior you want, please make a new class which overrides `__add__` and inherits behavior from your desired output type.
def then(self, parser: "Parser[S, O]") -> "Parser[S, O]": """Chain another parser onto this one, linking their success states together. Successful parse chains call `__add__` on the output object to append them together. If the default behavior of `__add__` does not support the behavior you want, please make a new class which overrides `__add__` and inherits behavior from your desired output type. """ capturedMatcher = self.matcher def _matcher(tok, rest): if (out := capturedMatcher(tok, rest)) is None: return None try: subout = parser.run(rest) if subout is None: return None return out + subout except FailedParsing: return None self.matcher = _matcher return self
(self, parser: pycoparsec.parser.Parser[~S, ~O]) -> pycoparsec.parser.Parser[~S, ~O]
35,359
authcaptureproxy.auth_capture_proxy
AuthCaptureProxy
Class to handle proxy login connections. This class relies on tests to be provided to indicate the proxy has completed. At proxy completion all data can be found in self.session, self.data, and self.query.
class AuthCaptureProxy: """Class to handle proxy login connections. This class relies on tests to be provided to indicate the proxy has completed. At proxy completion all data can be found in self.session, self.data, and self.query. """ def __init__( self, proxy_url: URL, host_url: URL, session: Optional[httpx.AsyncClient] = None, session_factory: Optional[Callable[[], httpx.AsyncClient]] = None, preserve_headers: bool = False, ) -> None: """Initialize proxy object. Args: proxy_url (URL): url for proxy location. e.g., http://192.168.1.1/. If there is any path, the path is considered part of the base url. If no explicit port is specified, a random port will be generated. If https is passed in, ssl_context must be provided at start_proxy() or the url will be downgraded to http. host_url (URL): original url for login, e.g., http://amazon.com session (httpx.AsyncClient): httpx client to make queries. Optional session_factory (lambda: httpx.AsyncClient): factory to create the aforementioned httpx client if having one fixed session is insufficient. preserve_headers (bool): Whether to preserve headers from the backend. Useful in circumventing CSRF protection. Defaults to False. """ self._preserve_headers = preserve_headers self.session_factory: Callable[[], httpx.AsyncClient] = session_factory or ( lambda: httpx.AsyncClient() ) self.session: httpx.AsyncClient = session if session else self.session_factory() self._proxy_url: URL = proxy_url self._host_url: URL = host_url self._port: int = proxy_url.explicit_port if proxy_url.explicit_port else 0 # type: ignore self.runner: Optional[web.AppRunner] = None self.last_resp: Optional[httpx.Response] = None self.init_query: Dict[Text, Any] = {} self.query: Dict[Text, Any] = {} self.data: Dict[Text, Any] = {} # tests and modifiers should be initialized after port is actually assigned and not during init. # however, to ensure defaults go first, they should have a dummy key set self._tests: Dict[Text, Callable] = {} self._modifiers: Dict[Text, Union[Callable, Dict[Text, Callable]]] = { "text/html": { "prepend_relative_urls": lambda x: x, "change_host_to_proxy": lambda x: x, } } self._old_tests: Dict[Text, Callable] = {} self._old_modifiers: Dict[Text, Union[Callable, Dict[Text, Callable]]] = {} self._active = False self._all_handler_active = True self.headers: Dict[Text, Text] = {} self.redirect_filters: Dict[Text, List[Text]] = { "url": [] } # dictionary of lists of regex strings to filter against @property def active(self) -> bool: """Return whether proxy is started.""" return self._active @property def all_handler_active(self) -> bool: """Return whether all handler is active.""" return self._all_handler_active @all_handler_active.setter def all_handler_active(self, value: bool) -> None: """Set all handler to value.""" self._all_handler_active = value @property def port(self) -> int: """Return port setting.""" return self._port @property def tests(self) -> Dict[Text, Callable]: """Return tests setting. :setter: value (Dict[Text, Any]): A dictionary of tests. The key should be the name of the test and the value should be a function or coroutine that takes a httpx.Response, a dictionary of post variables, and a dictioary of query variables and returns a URL or string. See :mod:`authcaptureproxy.examples.testers` for examples. """ return self._tests @tests.setter def tests(self, value: Dict[Text, Callable]) -> None: """Set tests. Args: value (Dict[Text, Any]): A dictionary of tests. """ self.refresh_tests() # refresh in case of pending change self._old_tests = self._tests.copy() self._tests = value @property def modifiers(self) -> Dict[Text, Union[Callable, Dict[Text, Callable]]]: """Return modifiers setting. :setter: value (Dict[Text, Dict[Text, Callable]): A nested dictionary of modifiers. The key shoud be a MIME type and the value should be a dictionary of modifiers for that MIME type where the key should be the name of the modifier and the value should be a function or couroutine that takes a string and returns a modified string. If parameters are necessary, functools.partial should be used. See :mod:`authcaptureproxy.examples.modifiers` for examples. """ return self._modifiers @modifiers.setter def modifiers(self, value: Dict[Text, Union[Callable, Dict[Text, Callable]]]) -> None: """Set tests. Args: value (Dict[Text, Any]): A dictionary of tests. """ self.refresh_modifiers() # refresh in case of pending change self._old_modifiers = self._modifiers self._modifiers = value def access_url(self) -> URL: """Return access url for proxy with port.""" return self._proxy_url.with_port(self.port) if self.port != 0 else self._proxy_url async def change_host_url(self, new_url: URL) -> None: """Change the host url of the proxy. This will also reset all stored data. Args: new_url (URL): original url for login, e.g., http://amazon.com """ if not isinstance(new_url, URL): raise ValueError("URL required") self._host_url = new_url await self.reset_data() async def reset_data(self) -> None: """Reset all stored data. A proxy may need to service multiple login requests if the route is not torn down. This function will reset all data between logins. """ if self.session: await self.session.aclose() self.session = self.session_factory() self.last_resp = None self.init_query = {} self.query = {} self.data = {} self._active = False self._all_handler_active = True _LOGGER.debug("Proxy data reset.") def refresh_tests(self) -> None: """Refresh tests. Because tests may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. """ if self._tests != self._old_tests: self.tests.update({}) self.old_tests = self.tests.copy() _LOGGER.debug("Refreshed %s tests: %s", len(self.tests), list(self.tests.keys())) def refresh_modifiers(self, site: Optional[URL] = None) -> None: """Refresh modifiers. Because modifiers may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. Args: site (Optional[URL], optional): The current site. Defaults to None. """ DEFAULT_MODIFIERS = { # noqa: N806 "prepend_relative_urls": partial(prepend_relative_urls, self.access_url()), "change_host_to_proxy": partial( replace_matching_urls, self._host_url.with_query({}).with_path("/"), self.access_url(), ), } if self._modifiers != self._old_modifiers: if self.modifiers.get("text/html") is None: self.modifiers["text/html"] = DEFAULT_MODIFIERS # type: ignore elif self.modifiers.get("text/html") and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update(DEFAULT_MODIFIERS) if site and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update( { "change_empty_to_proxy": partial( replace_empty_action_urls, swap_url( old_url=self._host_url.with_query({}), new_url=self.access_url().with_query({}), url=site, ), ), } ) self._old_modifiers = self.modifiers.copy() refreshed_modifers = get_nested_dict_keys(self.modifiers) _LOGGER.debug("Refreshed %s modifiers: %s", len(refreshed_modifers), refreshed_modifers) async def _build_response( self, response: Optional[httpx.Response] = None, *args, **kwargs ) -> web.Response: """ Build a response. """ if "headers" not in kwargs and response is not None: kwargs["headers"] = response.headers.copy() if self._preserve_headers else CIMultiDict() if hdrs.CONTENT_TYPE in kwargs["headers"] and "content_type" in kwargs: del kwargs["headers"][hdrs.CONTENT_TYPE] if hdrs.CONTENT_LENGTH in kwargs["headers"]: del kwargs["headers"][hdrs.CONTENT_LENGTH] if hdrs.CONTENT_ENCODING in kwargs["headers"]: del kwargs["headers"][hdrs.CONTENT_ENCODING] if hdrs.CONTENT_TRANSFER_ENCODING in kwargs["headers"]: del kwargs["headers"][hdrs.CONTENT_TRANSFER_ENCODING] if hdrs.TRANSFER_ENCODING in kwargs["headers"]: del kwargs["headers"][hdrs.TRANSFER_ENCODING] if "x-connection-hash" in kwargs["headers"]: del kwargs["headers"]["x-connection-hash"] while hdrs.SET_COOKIE in kwargs["headers"]: del kwargs["headers"][hdrs.SET_COOKIE] # cache control if hdrs.CACHE_CONTROL in kwargs["headers"]: del kwargs["headers"][hdrs.CACHE_CONTROL] kwargs["headers"][hdrs.CACHE_CONTROL] = "no-cache, no-store, must-revalidate" return web.Response(*args, **kwargs) async def all_handler(self, request: web.Request, **kwargs) -> web.Response: """Handle all requests. This handler will exit on succesful test found in self.tests or if a /stop url is seen. This handler can be used with any aiohttp webserver and disabled after registered using self.all_handler_active. Args request (web.Request): The request to process **kwargs: Additional keyword arguments access_url (URL): The access url for the proxy. Defaults to self.access_url() host_url (URL): The host url for the proxy. Defaults to self._host_url Returns web.Response: The webresponse to the browser Raises web.HTTPFound: Redirect URL upon success web.HTTPNotFound: Return 404 when all_handler is disabled """ if "access_url" in kwargs: access_url = kwargs.pop("access_url") else: access_url = self.access_url() if "host_url" in kwargs: host_url = kwargs.pop("host_url") else: host_url = self._host_url async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -> None: """Process multipart. Args: reader (MultipartReader): Response multipart to process. writer (MultipartWriter): Multipart to write out. """ while True: part = await reader.next() # noqa: B305 # https://github.com/PyCQA/flake8-bugbear/issues/59 if part is None: break if isinstance(part, MultipartReader): await _process_multipart(part, writer) elif part.headers.get("hdrs.CONTENT_TYPE"): if part.headers[hdrs.CONTENT_TYPE] == "application/json": part_data: Optional[ Union[Text, Dict[Text, Any], List[Tuple[Text, Text]], bytes] ] = await part.json() writer.append_json(part_data) elif part.headers[hdrs.CONTENT_TYPE].startswith("text"): part_data = await part.text() writer.append(part_data) elif part.headers[hdrs.CONTENT_TYPE] == "application/www-urlform-encode": part_data = await part.form() writer.append_form(part_data) else: part_data = await part.read() writer.append(part_data) else: part_data = await part.read() if part.name: self.data.update({part.name: part_data}) elif part.filename: part_data = await part.read() self.data.update({part.filename: part_data}) writer.append(part_data) if not self.all_handler_active: _LOGGER.debug("%s all_handler is disabled; returning 404.", self) raise web.HTTPNotFound() # if not self.session: # self.session = httpx.AsyncClient() method = request.method.lower() _LOGGER.debug("Received %s: %s for %s", method, str(request.url), host_url) resp: Optional[httpx.Response] = None old_url: URL = ( access_url.with_host(request.url.host) if request.url.host and request.url.host != access_url.host else access_url ) if request.scheme == "http" and access_url.scheme == "https": # detect reverse proxy downgrade _LOGGER.debug("Detected http while should be https; switching to https") site: str = str( swap_url( ignore_query=True, old_url=old_url.with_scheme("https"), new_url=host_url.with_path("/"), url=URL(str(request.url)).with_scheme("https"), ), ) else: site = str( swap_url( ignore_query=True, old_url=old_url, new_url=host_url.with_path("/"), url=URL(str(request.url)), ), ) self.query.update(request.query) data: Optional[Dict] = None mpwriter = None if request.content_type == "multipart/form-data": mpwriter = MultipartWriter() await _process_multipart(await request.multipart(), mpwriter) else: data = convert_multidict_to_dict(await request.post()) json_data = None if request.has_body: json_data = await request.json() if data: self.data.update(data) _LOGGER.debug("Storing data %s", data) elif json_data: self.data.update(json_data) _LOGGER.debug("Storing json %s", json_data) if URL(str(request.url)).path == re.sub( r"/+", "/", self._proxy_url.with_path(f"{self._proxy_url.path}/stop").path ): self.all_handler_active = False if self.active: asyncio.create_task(self.stop_proxy(3)) return await self._build_response(text="Proxy stopped.") elif ( URL(str(request.url)).path == re.sub(r"/+", "/", self._proxy_url.with_path(f"{self._proxy_url.path}/resume").path) and self.last_resp and isinstance(self.last_resp, httpx.Response) ): self.init_query = self.query.copy() _LOGGER.debug("Resuming request: %s", self.last_resp) resp = self.last_resp else: if URL(str(request.url)).path in [ self._proxy_url.path, re.sub( r"/+", "/", self._proxy_url.with_path(f"{self._proxy_url.path}/resume").path ), ]: # either base path or resume without anything to resume site = str(URL(host_url)) if method == "get": self.init_query = self.query.copy() _LOGGER.debug( "Starting auth capture proxy for %s", host_url, ) headers = await self.modify_headers(URL(site), request) skip_auto_headers: List[str] = headers.get(SKIP_AUTO_HEADERS, []) if skip_auto_headers: _LOGGER.debug("Discovered skip_auto_headers %s", skip_auto_headers) headers.pop(SKIP_AUTO_HEADERS) _LOGGER.debug( "Attempting %s to %s\nheaders: %s \ncookies: %s", method, site, headers, self.session.cookies.jar, ) try: if mpwriter: resp = await getattr(self.session, method)( site, data=mpwriter, headers=headers, follow_redirects=True ) elif data: resp = await getattr(self.session, method)( site, data=data, headers=headers, follow_redirects=True ) elif json_data: for item in ["Host", "Origin", "User-Agent", "dnt", "Accept-Encoding"]: # remove proxy headers if headers.get(item): headers.pop(item) resp = await getattr(self.session, method)( site, json=json_data, headers=headers, follow_redirects=True ) else: resp = await getattr(self.session, method)( site, headers=headers, follow_redirects=True ) except ClientConnectionError as ex: return await self._build_response( text=f"Error connecting to {site}; please retry: {ex}" ) except TooManyRedirects as ex: return await self._build_response( text=f"Error connecting to {site}; too may redirects: {ex}" ) if resp is None: return await self._build_response(text=f"Error connecting to {site}; please retry") self.last_resp = resp print_resp(resp) self.check_redirects() self.refresh_tests() if self.tests: for test_name, test in self.tests.items(): result = None result = await run_func(test, test_name, resp, self.data, self.query) if result: _LOGGER.debug("Test %s triggered", test_name) if isinstance(result, URL): _LOGGER.debug( "Redirecting to callback: %s", result, ) raise web.HTTPFound(location=result) elif isinstance(result, str): _LOGGER.debug("Displaying page:\n%s", result) return await self._build_response( resp, text=result, content_type="text/html" ) else: _LOGGER.warning("Proxy has no tests; please set.") content_type = get_content_type(resp) self.refresh_modifiers(URL(str(resp.url))) if self.modifiers: modified: bool = False if content_type != "text/html" and content_type not in self.modifiers.keys(): text: Text = "" elif content_type != "text/html" and content_type in self.modifiers.keys(): text = resp.text else: text = resp.text if not isinstance(text, str): # process aiohttp text text = await resp.text() if text: for name, modifier in self.modifiers.items(): if isinstance(modifier, dict): if name != content_type: continue for sub_name, sub_modifier in modifier.items(): try: text = await run_func(sub_modifier, sub_name, text) modified = True except TypeError as ex: _LOGGER.warning("Modifier %s is not callable: %s", sub_name, ex) else: # default run against text/html only if content_type == "text/html": try: text = await run_func(modifier, name, text) modified = True except TypeError as ex: _LOGGER.warning("Modifier %s is not callable: %s", name, ex) # _LOGGER.debug("Returning modified text:\n%s", text) if modified: return await self._build_response( resp, text=text, content_type=content_type, ) # pass through non parsed content _LOGGER.debug( "Passing through %s as %s", URL(str(request.url)).name if URL(str(request.url)).name else URL(str(request.url)).path, content_type, ) return await self._build_response(resp, body=resp.content, content_type=content_type) async def start_proxy( self, host: Optional[Text] = None, ssl_context: Optional[SSLContext] = None ) -> None: """Start proxy. Args: host (Optional[Text], optional): The host interface to bind to. Defaults to None which is "0.0.0.0" all interfaces. ssl_context (Optional[SSLContext], optional): SSL Context for the server. Defaults to None. """ app = web.Application() app.add_routes( [ web.route("*", "/{tail:.*}", self.all_handler), ] ) self.runner = web.AppRunner(app) await self.runner.setup() if not self.port: self._port = get_open_port() if self._proxy_url.scheme == "https" and ssl_context is None: _LOGGER.debug("Proxy url is https but no SSL Context set, downgrading to http") self._proxy_url = self._proxy_url.with_scheme("http") site = web.TCPSite(runner=self.runner, host=host, port=self.port, ssl_context=ssl_context) await site.start() self._active = True _LOGGER.debug("Started proxy at %s", self.access_url()) async def stop_proxy(self, delay: int = 0) -> None: """Stop proxy server. Args: delay (int, optional): How many seconds to delay. Defaults to 0. """ if not self.active: _LOGGER.debug("Proxy is not started; ignoring stop command") return _LOGGER.debug("Stopping proxy at %s after %s seconds", self.access_url(), delay) await asyncio.sleep(delay) _LOGGER.debug("Closing site runner") if self.runner: await self.runner.cleanup() await self.runner.shutdown() _LOGGER.debug("Site runner closed") # close session if self.session: _LOGGER.debug("Closing session") await self.session.aclose() _LOGGER.debug("Session closed") self._active = False _LOGGER.debug("Proxy stopped") def _swap_proxy_and_host(self, text: Text, domain_only: bool = False) -> Text: """Replace host with proxy address or proxy with host address. Args text (Text): text to replace domain (bool): Whether only the domains should be swapped. Returns Text: Result of replacing """ host_string: Text = str(self._host_url.with_path("/")) proxy_string: Text = str( self.access_url() if not domain_only else self.access_url().with_path("/") ) if str(self.access_url().with_path("/")).replace("https", "http") in text: _LOGGER.debug( "Replacing %s with %s", str(self.access_url().with_path("/")).replace("https", "http"), str(self.access_url().with_path("/")), ) text = text.replace( str(self.access_url().with_path("/")).replace("https", "http"), str(self.access_url().with_path("/")), ) if proxy_string in text: if host_string[-1] == "/" and ( not proxy_string or proxy_string == "/" or proxy_string[-1] != "/" ): proxy_string = f"{proxy_string}/" _LOGGER.debug("Replacing %s with %s in %s", proxy_string, host_string, text) return text.replace(proxy_string, host_string) elif host_string in text: if host_string[-1] == "/" and ( not proxy_string or proxy_string == "/" or proxy_string[-1] != "/" ): proxy_string = f"{proxy_string}/" _LOGGER.debug("Replacing %s with %s", host_string, proxy_string) return text.replace(host_string, proxy_string) else: _LOGGER.debug("Unable to find %s and %s in %s", host_string, proxy_string, text) return text async def modify_headers(self, site: URL, request: web.Request) -> dict: """Modify headers. Return modified headers based on site and request. To disable auto header generation, pass in to the header a key const.SKIP_AUTO_HEADERS with a list of keys to not generate. For example, to prevent User-Agent generation: {SKIP_AUTO_HEADERS : ["User-Agent"]} Args: site (URL): URL of the next host request. request (web.Request): Proxy directed request. This will need to be changed for the actual host request. Returns: dict: Headers after modifications """ result: Dict[str, Any] = {} result.update(request.headers) # _LOGGER.debug("Original headers %s", headers) if result.get("Host"): result.pop("Host") if result.get("Origin"): result["Origin"] = f"{site.with_path('')}" # remove any cookies in header received from browser. If not removed, httpx will not send session cookies if result.get("Cookie"): result.pop("Cookie") if result.get("Referer") and ( URL(result.get("Referer", "")).query == self.init_query or URL(result.get("Referer", "")).path == "/config/integrations" # home-assistant referer ): # Change referer for starting request; this may have query items we shouldn't pass result["Referer"] = str(self._host_url) elif result.get("Referer"): result["Referer"] = self._swap_proxy_and_host( result.get("Referer", ""), domain_only=True ) for item in [ "Content-Length", "X-Forwarded-For", "X-Forwarded-Host", "X-Forwarded-Port", "X-Forwarded-Proto", "X-Forwarded-Scheme", "X-Forwarded-Server", "X-Real-IP", ]: # remove proxy headers if result.get(item): result.pop(item) result.update(self.headers if self.headers else {}) _LOGGER.debug("Final headers %s", result) return result def check_redirects(self) -> None: """Change host if redirect detected and regex does not match self.redirect_filters. Self.redirect_filters is a dict with key as attr in resp and value as list of regex expressions to filter against. """ if not self.last_resp: return resp: httpx.Response = self.last_resp if resp.history: for item in resp.history: if ( item.status_code in [301, 302, 303, 304, 305, 306, 307, 308] and item.url and resp.url and resp.url.host != self._host_url.host ): filtered = False for attr, regex_list in self.redirect_filters.items(): if getattr(resp, attr) and list( filter( lambda regex_string: re.search( regex_string, str(getattr(resp, attr)) ), regex_list, ) ): _LOGGER.debug( "Check_redirects: Filtered out on %s in %s for resp attribute %s", list( filter( lambda regex_string: re.search( regex_string, str(getattr(resp, attr)) ), regex_list, ) ), str(getattr(resp, attr)), attr, ) filtered = True if filtered: return _LOGGER.debug( "Detected %s redirect from %s to %s; changing proxy host", item.status_code, item.url.host, resp.url.host, ) self._host_url = self._host_url.with_host(resp.url.host)
(proxy_url: yarl.URL, host_url: yarl.URL, session: Optional[httpx.AsyncClient] = None, session_factory: Optional[Callable[[], httpx.AsyncClient]] = None, preserve_headers: bool = False) -> None
35,360
authcaptureproxy.auth_capture_proxy
__init__
Initialize proxy object. Args: proxy_url (URL): url for proxy location. e.g., http://192.168.1.1/. If there is any path, the path is considered part of the base url. If no explicit port is specified, a random port will be generated. If https is passed in, ssl_context must be provided at start_proxy() or the url will be downgraded to http. host_url (URL): original url for login, e.g., http://amazon.com session (httpx.AsyncClient): httpx client to make queries. Optional session_factory (lambda: httpx.AsyncClient): factory to create the aforementioned httpx client if having one fixed session is insufficient. preserve_headers (bool): Whether to preserve headers from the backend. Useful in circumventing CSRF protection. Defaults to False.
def __init__( self, proxy_url: URL, host_url: URL, session: Optional[httpx.AsyncClient] = None, session_factory: Optional[Callable[[], httpx.AsyncClient]] = None, preserve_headers: bool = False, ) -> None: """Initialize proxy object. Args: proxy_url (URL): url for proxy location. e.g., http://192.168.1.1/. If there is any path, the path is considered part of the base url. If no explicit port is specified, a random port will be generated. If https is passed in, ssl_context must be provided at start_proxy() or the url will be downgraded to http. host_url (URL): original url for login, e.g., http://amazon.com session (httpx.AsyncClient): httpx client to make queries. Optional session_factory (lambda: httpx.AsyncClient): factory to create the aforementioned httpx client if having one fixed session is insufficient. preserve_headers (bool): Whether to preserve headers from the backend. Useful in circumventing CSRF protection. Defaults to False. """ self._preserve_headers = preserve_headers self.session_factory: Callable[[], httpx.AsyncClient] = session_factory or ( lambda: httpx.AsyncClient() ) self.session: httpx.AsyncClient = session if session else self.session_factory() self._proxy_url: URL = proxy_url self._host_url: URL = host_url self._port: int = proxy_url.explicit_port if proxy_url.explicit_port else 0 # type: ignore self.runner: Optional[web.AppRunner] = None self.last_resp: Optional[httpx.Response] = None self.init_query: Dict[Text, Any] = {} self.query: Dict[Text, Any] = {} self.data: Dict[Text, Any] = {} # tests and modifiers should be initialized after port is actually assigned and not during init. # however, to ensure defaults go first, they should have a dummy key set self._tests: Dict[Text, Callable] = {} self._modifiers: Dict[Text, Union[Callable, Dict[Text, Callable]]] = { "text/html": { "prepend_relative_urls": lambda x: x, "change_host_to_proxy": lambda x: x, } } self._old_tests: Dict[Text, Callable] = {} self._old_modifiers: Dict[Text, Union[Callable, Dict[Text, Callable]]] = {} self._active = False self._all_handler_active = True self.headers: Dict[Text, Text] = {} self.redirect_filters: Dict[Text, List[Text]] = { "url": [] } # dictionary of lists of regex strings to filter against
(self, proxy_url: yarl.URL, host_url: yarl.URL, session: Optional[httpx.AsyncClient] = None, session_factory: Optional[Callable[[], httpx.AsyncClient]] = None, preserve_headers: bool = False) -> NoneType
35,361
authcaptureproxy.auth_capture_proxy
_build_response
Build a response.
def refresh_modifiers(self, site: Optional[URL] = None) -> None: """Refresh modifiers. Because modifiers may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. Args: site (Optional[URL], optional): The current site. Defaults to None. """ DEFAULT_MODIFIERS = { # noqa: N806 "prepend_relative_urls": partial(prepend_relative_urls, self.access_url()), "change_host_to_proxy": partial( replace_matching_urls, self._host_url.with_query({}).with_path("/"), self.access_url(), ), } if self._modifiers != self._old_modifiers: if self.modifiers.get("text/html") is None: self.modifiers["text/html"] = DEFAULT_MODIFIERS # type: ignore elif self.modifiers.get("text/html") and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update(DEFAULT_MODIFIERS) if site and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update( { "change_empty_to_proxy": partial( replace_empty_action_urls, swap_url( old_url=self._host_url.with_query({}), new_url=self.access_url().with_query({}), url=site, ), ), } ) self._old_modifiers = self.modifiers.copy() refreshed_modifers = get_nested_dict_keys(self.modifiers) _LOGGER.debug("Refreshed %s modifiers: %s", len(refreshed_modifers), refreshed_modifers)
(self, response: Optional[httpx.Response] = None, *args, **kwargs) -> aiohttp.web_response.Response
35,362
authcaptureproxy.auth_capture_proxy
_swap_proxy_and_host
Replace host with proxy address or proxy with host address. Args text (Text): text to replace domain (bool): Whether only the domains should be swapped. Returns Text: Result of replacing
def _swap_proxy_and_host(self, text: Text, domain_only: bool = False) -> Text: """Replace host with proxy address or proxy with host address. Args text (Text): text to replace domain (bool): Whether only the domains should be swapped. Returns Text: Result of replacing """ host_string: Text = str(self._host_url.with_path("/")) proxy_string: Text = str( self.access_url() if not domain_only else self.access_url().with_path("/") ) if str(self.access_url().with_path("/")).replace("https", "http") in text: _LOGGER.debug( "Replacing %s with %s", str(self.access_url().with_path("/")).replace("https", "http"), str(self.access_url().with_path("/")), ) text = text.replace( str(self.access_url().with_path("/")).replace("https", "http"), str(self.access_url().with_path("/")), ) if proxy_string in text: if host_string[-1] == "/" and ( not proxy_string or proxy_string == "/" or proxy_string[-1] != "/" ): proxy_string = f"{proxy_string}/" _LOGGER.debug("Replacing %s with %s in %s", proxy_string, host_string, text) return text.replace(proxy_string, host_string) elif host_string in text: if host_string[-1] == "/" and ( not proxy_string or proxy_string == "/" or proxy_string[-1] != "/" ): proxy_string = f"{proxy_string}/" _LOGGER.debug("Replacing %s with %s", host_string, proxy_string) return text.replace(host_string, proxy_string) else: _LOGGER.debug("Unable to find %s and %s in %s", host_string, proxy_string, text) return text
(self, text: str, domain_only: bool = False) -> str
35,363
authcaptureproxy.auth_capture_proxy
access_url
Return access url for proxy with port.
def access_url(self) -> URL: """Return access url for proxy with port.""" return self._proxy_url.with_port(self.port) if self.port != 0 else self._proxy_url
(self) -> yarl.URL
35,364
authcaptureproxy.auth_capture_proxy
all_handler
Handle all requests. This handler will exit on succesful test found in self.tests or if a /stop url is seen. This handler can be used with any aiohttp webserver and disabled after registered using self.all_handler_active. Args request (web.Request): The request to process **kwargs: Additional keyword arguments access_url (URL): The access url for the proxy. Defaults to self.access_url() host_url (URL): The host url for the proxy. Defaults to self._host_url Returns web.Response: The webresponse to the browser Raises web.HTTPFound: Redirect URL upon success web.HTTPNotFound: Return 404 when all_handler is disabled
def refresh_modifiers(self, site: Optional[URL] = None) -> None: """Refresh modifiers. Because modifiers may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. Args: site (Optional[URL], optional): The current site. Defaults to None. """ DEFAULT_MODIFIERS = { # noqa: N806 "prepend_relative_urls": partial(prepend_relative_urls, self.access_url()), "change_host_to_proxy": partial( replace_matching_urls, self._host_url.with_query({}).with_path("/"), self.access_url(), ), } if self._modifiers != self._old_modifiers: if self.modifiers.get("text/html") is None: self.modifiers["text/html"] = DEFAULT_MODIFIERS # type: ignore elif self.modifiers.get("text/html") and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update(DEFAULT_MODIFIERS) if site and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update( { "change_empty_to_proxy": partial( replace_empty_action_urls, swap_url( old_url=self._host_url.with_query({}), new_url=self.access_url().with_query({}), url=site, ), ), } ) self._old_modifiers = self.modifiers.copy() refreshed_modifers = get_nested_dict_keys(self.modifiers) _LOGGER.debug("Refreshed %s modifiers: %s", len(refreshed_modifers), refreshed_modifers)
(self, request: aiohttp.web_request.Request, **kwargs) -> aiohttp.web_response.Response
35,365
authcaptureproxy.auth_capture_proxy
change_host_url
Change the host url of the proxy. This will also reset all stored data. Args: new_url (URL): original url for login, e.g., http://amazon.com
def access_url(self) -> URL: """Return access url for proxy with port.""" return self._proxy_url.with_port(self.port) if self.port != 0 else self._proxy_url
(self, new_url: yarl.URL) -> NoneType
35,366
authcaptureproxy.auth_capture_proxy
check_redirects
Change host if redirect detected and regex does not match self.redirect_filters. Self.redirect_filters is a dict with key as attr in resp and value as list of regex expressions to filter against.
def check_redirects(self) -> None: """Change host if redirect detected and regex does not match self.redirect_filters. Self.redirect_filters is a dict with key as attr in resp and value as list of regex expressions to filter against. """ if not self.last_resp: return resp: httpx.Response = self.last_resp if resp.history: for item in resp.history: if ( item.status_code in [301, 302, 303, 304, 305, 306, 307, 308] and item.url and resp.url and resp.url.host != self._host_url.host ): filtered = False for attr, regex_list in self.redirect_filters.items(): if getattr(resp, attr) and list( filter( lambda regex_string: re.search( regex_string, str(getattr(resp, attr)) ), regex_list, ) ): _LOGGER.debug( "Check_redirects: Filtered out on %s in %s for resp attribute %s", list( filter( lambda regex_string: re.search( regex_string, str(getattr(resp, attr)) ), regex_list, ) ), str(getattr(resp, attr)), attr, ) filtered = True if filtered: return _LOGGER.debug( "Detected %s redirect from %s to %s; changing proxy host", item.status_code, item.url.host, resp.url.host, ) self._host_url = self._host_url.with_host(resp.url.host)
(self) -> NoneType
35,367
authcaptureproxy.auth_capture_proxy
modify_headers
Modify headers. Return modified headers based on site and request. To disable auto header generation, pass in to the header a key const.SKIP_AUTO_HEADERS with a list of keys to not generate. For example, to prevent User-Agent generation: {SKIP_AUTO_HEADERS : ["User-Agent"]} Args: site (URL): URL of the next host request. request (web.Request): Proxy directed request. This will need to be changed for the actual host request. Returns: dict: Headers after modifications
def _swap_proxy_and_host(self, text: Text, domain_only: bool = False) -> Text: """Replace host with proxy address or proxy with host address. Args text (Text): text to replace domain (bool): Whether only the domains should be swapped. Returns Text: Result of replacing """ host_string: Text = str(self._host_url.with_path("/")) proxy_string: Text = str( self.access_url() if not domain_only else self.access_url().with_path("/") ) if str(self.access_url().with_path("/")).replace("https", "http") in text: _LOGGER.debug( "Replacing %s with %s", str(self.access_url().with_path("/")).replace("https", "http"), str(self.access_url().with_path("/")), ) text = text.replace( str(self.access_url().with_path("/")).replace("https", "http"), str(self.access_url().with_path("/")), ) if proxy_string in text: if host_string[-1] == "/" and ( not proxy_string or proxy_string == "/" or proxy_string[-1] != "/" ): proxy_string = f"{proxy_string}/" _LOGGER.debug("Replacing %s with %s in %s", proxy_string, host_string, text) return text.replace(proxy_string, host_string) elif host_string in text: if host_string[-1] == "/" and ( not proxy_string or proxy_string == "/" or proxy_string[-1] != "/" ): proxy_string = f"{proxy_string}/" _LOGGER.debug("Replacing %s with %s", host_string, proxy_string) return text.replace(host_string, proxy_string) else: _LOGGER.debug("Unable to find %s and %s in %s", host_string, proxy_string, text) return text
(self, site: yarl.URL, request: aiohttp.web_request.Request) -> dict
35,368
authcaptureproxy.auth_capture_proxy
refresh_modifiers
Refresh modifiers. Because modifiers may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. Args: site (Optional[URL], optional): The current site. Defaults to None.
def refresh_modifiers(self, site: Optional[URL] = None) -> None: """Refresh modifiers. Because modifiers may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. Args: site (Optional[URL], optional): The current site. Defaults to None. """ DEFAULT_MODIFIERS = { # noqa: N806 "prepend_relative_urls": partial(prepend_relative_urls, self.access_url()), "change_host_to_proxy": partial( replace_matching_urls, self._host_url.with_query({}).with_path("/"), self.access_url(), ), } if self._modifiers != self._old_modifiers: if self.modifiers.get("text/html") is None: self.modifiers["text/html"] = DEFAULT_MODIFIERS # type: ignore elif self.modifiers.get("text/html") and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update(DEFAULT_MODIFIERS) if site and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update( { "change_empty_to_proxy": partial( replace_empty_action_urls, swap_url( old_url=self._host_url.with_query({}), new_url=self.access_url().with_query({}), url=site, ), ), } ) self._old_modifiers = self.modifiers.copy() refreshed_modifers = get_nested_dict_keys(self.modifiers) _LOGGER.debug("Refreshed %s modifiers: %s", len(refreshed_modifers), refreshed_modifers)
(self, site: Optional[yarl.URL] = None) -> NoneType
35,369
authcaptureproxy.auth_capture_proxy
refresh_tests
Refresh tests. Because tests may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned.
def refresh_tests(self) -> None: """Refresh tests. Because tests may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. """ if self._tests != self._old_tests: self.tests.update({}) self.old_tests = self.tests.copy() _LOGGER.debug("Refreshed %s tests: %s", len(self.tests), list(self.tests.keys()))
(self) -> NoneType
35,370
authcaptureproxy.auth_capture_proxy
reset_data
Reset all stored data. A proxy may need to service multiple login requests if the route is not torn down. This function will reset all data between logins.
def access_url(self) -> URL: """Return access url for proxy with port.""" return self._proxy_url.with_port(self.port) if self.port != 0 else self._proxy_url
(self) -> NoneType
35,371
authcaptureproxy.auth_capture_proxy
start_proxy
Start proxy. Args: host (Optional[Text], optional): The host interface to bind to. Defaults to None which is "0.0.0.0" all interfaces. ssl_context (Optional[SSLContext], optional): SSL Context for the server. Defaults to None.
def refresh_modifiers(self, site: Optional[URL] = None) -> None: """Refresh modifiers. Because modifiers may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. Args: site (Optional[URL], optional): The current site. Defaults to None. """ DEFAULT_MODIFIERS = { # noqa: N806 "prepend_relative_urls": partial(prepend_relative_urls, self.access_url()), "change_host_to_proxy": partial( replace_matching_urls, self._host_url.with_query({}).with_path("/"), self.access_url(), ), } if self._modifiers != self._old_modifiers: if self.modifiers.get("text/html") is None: self.modifiers["text/html"] = DEFAULT_MODIFIERS # type: ignore elif self.modifiers.get("text/html") and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update(DEFAULT_MODIFIERS) if site and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update( { "change_empty_to_proxy": partial( replace_empty_action_urls, swap_url( old_url=self._host_url.with_query({}), new_url=self.access_url().with_query({}), url=site, ), ), } ) self._old_modifiers = self.modifiers.copy() refreshed_modifers = get_nested_dict_keys(self.modifiers) _LOGGER.debug("Refreshed %s modifiers: %s", len(refreshed_modifers), refreshed_modifers)
(self, host: Optional[str] = None, ssl_context: Optional[ssl.SSLContext] = None) -> NoneType
35,372
authcaptureproxy.auth_capture_proxy
stop_proxy
Stop proxy server. Args: delay (int, optional): How many seconds to delay. Defaults to 0.
def refresh_modifiers(self, site: Optional[URL] = None) -> None: """Refresh modifiers. Because modifiers may use partials, they will freeze their parameters which is a problem with self.access() if the port hasn't been assigned. Args: site (Optional[URL], optional): The current site. Defaults to None. """ DEFAULT_MODIFIERS = { # noqa: N806 "prepend_relative_urls": partial(prepend_relative_urls, self.access_url()), "change_host_to_proxy": partial( replace_matching_urls, self._host_url.with_query({}).with_path("/"), self.access_url(), ), } if self._modifiers != self._old_modifiers: if self.modifiers.get("text/html") is None: self.modifiers["text/html"] = DEFAULT_MODIFIERS # type: ignore elif self.modifiers.get("text/html") and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update(DEFAULT_MODIFIERS) if site and isinstance(self.modifiers["text/html"], dict): self.modifiers["text/html"].update( { "change_empty_to_proxy": partial( replace_empty_action_urls, swap_url( old_url=self._host_url.with_query({}), new_url=self.access_url().with_query({}), url=site, ), ), } ) self._old_modifiers = self.modifiers.copy() refreshed_modifers = get_nested_dict_keys(self.modifiers) _LOGGER.debug("Refreshed %s modifiers: %s", len(refreshed_modifers), refreshed_modifers)
(self, delay: int = 0) -> NoneType
35,446
authcaptureproxy.examples.modifiers
find_regex_urls
Find urls for based on regex. Seen in Tesla login with MFA enabled. Args: modifier (Optional[Callable], optional): The function to call. It will take in the html_tag, tag, and attribute and modify the html_tag. Defaults to None. patterns ([Dict[Text,Text]): A dictionary of regex patterns to search. Key is name and value is regex string. html (Text, optional): Text to replace. Defaults to "". Returns: Text: Text after applying the modifier to the urls found using the search.
def autofill(items: dict, html: Text) -> Text: """Autofill input tags in form. WARNING: This modifier does not obfuscate debug logs. Args: items (dict): Dictionary of values to fill. The key the name or id of the form input to fill and the value is the value. html (Text): html to convert Returns: Text: html with values filled in """ soup: BeautifulSoup = BeautifulSoup(html, "html.parser") if not soup or not html: _LOGGER.debug("Soup is empty") return "" if not items: _LOGGER.debug("No items specified; no modifications made") return html for item, value in items.items(): for html_tag in soup.find_all(attrs={"name": item}) + soup.find_all(attrs={"id": item}): if not html_tag.get("value"): html_tag["value"] = value _LOGGER.debug("Filled %s", str(html_tag)) return str(soup)
(modifier: Optional[Callable] = None, patterns: Optional[Dict[str, str]] = None, html: str = '') -> str
35,449
authcaptureproxy.helper
prepend_url
Prepend the url. Args: base_url (URL): Base URL to prepend. The URL must end with a "/" so it's a folder or domain. url (URL): url to prepend encoded (bool): Whether to treat the url as already encoded. This may be needed if the url is JavaScript.
def prepend_url(base_url: URL, url: URL, encoded: bool = False) -> URL: """Prepend the url. Args: base_url (URL): Base URL to prepend. The URL must end with a "/" so it's a folder or domain. url (URL): url to prepend encoded (bool): Whether to treat the url as already encoded. This may be needed if the url is JavaScript. """ if isinstance(base_url, str): base_url = URL(base_url) if isinstance(url, str): url = URL(url) if base_url.name: _LOGGER.debug('Base URL is to file "%s", treating as path', base_url.name) base_url = base_url.with_path(f"{base_url.path}/") if not url.is_absolute(): query = url.query path = url.path return base_url.with_path( re.sub(r"/+", "/", f"{base_url.path}/{path}"), encoded=encoded ).with_query(query) return url
(base_url: yarl.URL, url: yarl.URL, encoded: bool = False) -> yarl.URL
35,450
authcaptureproxy.stackoverflow
return_timer_countdown_refresh_html
Return JavaScript timer countdown for html injection. This is to use for tester success. https://stackoverflow.com/questions/16532577/javascript-refresh-countdown-text/16532611#16532611 Args: seconds (int): Seconds to delay text (Text): HTML text to display before the timer text. hard_refresh (bool): Whether to force refresh of cache Returns: Text: HTML for injection
def return_timer_countdown_refresh_html( seconds: int, text: Text, hard_refresh: bool = True ) -> Text: """Return JavaScript timer countdown for html injection. This is to use for tester success. https://stackoverflow.com/questions/16532577/javascript-refresh-countdown-text/16532611#16532611 Args: seconds (int): Seconds to delay text (Text): HTML text to display before the timer text. hard_refresh (bool): Whether to force refresh of cache Returns: Text: HTML for injection """ return f"""<html><head></head><body>{text}<div id="countdown"></div></body> <script defer="defer">(function countdown(remaining) {{ if(remaining < 0) location.reload({"true" if hard_refresh else "false"}); else{{ document.getElementById('countdown').innerHTML = 'Automaticaly reloading page in ' + Math.max(Math.floor(remaining), 0) + ' seconds.'; setTimeout(function(){{ countdown(remaining - 1); }}, 1000); }}}})({seconds});</script></html>"""
(seconds: int, text: str, hard_refresh: bool = True) -> str
35,452
authcaptureproxy.helper
swap_url
Swap any instances of the old url with the new url. Will not replace query info. Args: ignore_query (bool): Whether the url.query should be ignored. Defaults to True. old_url (URL): Old url to find and replace. If there is any additional path, it will be added to the new_url. new_url (URL): New url to replace. url (URL): url to modify
def swap_url( ignore_query: bool = True, old_url: URL = URL(""), # noqa: B008 new_url: URL = URL(""), # noqa: B008 url: URL = URL(""), # noqa: B008 ) -> URL: """Swap any instances of the old url with the new url. Will not replace query info. Args: ignore_query (bool): Whether the url.query should be ignored. Defaults to True. old_url (URL): Old url to find and replace. If there is any additional path, it will be added to the new_url. new_url (URL): New url to replace. url (URL): url to modify """ if isinstance(old_url, str): old_url = URL(old_url) if isinstance(new_url, str): new_url = URL(new_url) if isinstance(url, str): url = URL(url) old_url_string: Text = str(old_url.with_query({})) new_url_string: Text = str(new_url.with_query({})) old_query: MultiDict = url.query url_string = str(url.with_query({})) # ensure both paths end with "/" if one of them does if ( new_url_string and new_url_string[-1] == "/" and old_url_string and old_url_string[-1] != "/" ): old_url_string += "/" elif ( old_url_string and old_url_string[-1] == "/" and new_url_string and new_url_string[-1] != "/" ): new_url_string += "/" if ignore_query: result = URL(url_string.replace(old_url_string, new_url_string)) # clean up any // in path return result.with_path(re.sub(r"/+", "/", result.path)).with_query(old_query) new_query = {} for key, value in old_query.items(): if value: new_query[key] = value.replace(old_url_string, new_url_string) result = URL(url_string.replace(old_url_string, new_url_string)) return result.with_path(re.sub(r"/+", "/", result.path)).update_query(new_query)
(ignore_query: bool = True, old_url: yarl.URL = URL(''), new_url: yarl.URL = URL(''), url: yarl.URL = URL('')) -> yarl.URL
35,453
deprecation
DeprecatedWarning
A warning class for deprecated methods This is a specialization of the built-in :class:`DeprecationWarning`, adding parameters that allow us to get information into the __str__ that ends up being sent through the :mod:`warnings` system. The attributes aren't able to be retrieved after the warning gets raised and passed through the system as only the class--not the instance--and message are what gets preserved. :param function: The function being deprecated. :param deprecated_in: The version that ``function`` is deprecated in :param removed_in: The version or :class:`datetime.date` specifying when ``function`` gets removed. :param details: Optional details about the deprecation. Most often this will include directions on what to use instead of the now deprecated code.
class DeprecatedWarning(DeprecationWarning): """A warning class for deprecated methods This is a specialization of the built-in :class:`DeprecationWarning`, adding parameters that allow us to get information into the __str__ that ends up being sent through the :mod:`warnings` system. The attributes aren't able to be retrieved after the warning gets raised and passed through the system as only the class--not the instance--and message are what gets preserved. :param function: The function being deprecated. :param deprecated_in: The version that ``function`` is deprecated in :param removed_in: The version or :class:`datetime.date` specifying when ``function`` gets removed. :param details: Optional details about the deprecation. Most often this will include directions on what to use instead of the now deprecated code. """ def __init__(self, function, deprecated_in, removed_in, details=""): # NOTE: The docstring only works for this class if it appears up # near the class name, not here inside __init__. I think it has # to do with being an exception class. self.function = function self.deprecated_in = deprecated_in self.removed_in = removed_in self.details = details super(DeprecatedWarning, self).__init__(function, deprecated_in, removed_in, details) def __str__(self): # Use a defaultdict to give us the empty string # when a part isn't included. parts = collections.defaultdict(str) parts["function"] = self.function if self.deprecated_in: parts["deprecated"] = " as of %s" % self.deprecated_in if self.removed_in: parts["removed"] = " and will be removed {} {}".format("on" if isinstance(self.removed_in, date) else "in", self.removed_in) if any([self.deprecated_in, self.removed_in, self.details]): parts["period"] = "." if self.details: parts["details"] = " %s" % self.details return ("%(function)s is deprecated%(deprecated)s%(removed)s" "%(period)s%(details)s" % (parts))
(function, deprecated_in, removed_in, details='')
35,454
deprecation
__init__
null
def __init__(self, function, deprecated_in, removed_in, details=""): # NOTE: The docstring only works for this class if it appears up # near the class name, not here inside __init__. I think it has # to do with being an exception class. self.function = function self.deprecated_in = deprecated_in self.removed_in = removed_in self.details = details super(DeprecatedWarning, self).__init__(function, deprecated_in, removed_in, details)
(self, function, deprecated_in, removed_in, details='')
35,455
deprecation
__str__
null
def __str__(self): # Use a defaultdict to give us the empty string # when a part isn't included. parts = collections.defaultdict(str) parts["function"] = self.function if self.deprecated_in: parts["deprecated"] = " as of %s" % self.deprecated_in if self.removed_in: parts["removed"] = " and will be removed {} {}".format("on" if isinstance(self.removed_in, date) else "in", self.removed_in) if any([self.deprecated_in, self.removed_in, self.details]): parts["period"] = "." if self.details: parts["details"] = " %s" % self.details return ("%(function)s is deprecated%(deprecated)s%(removed)s" "%(period)s%(details)s" % (parts))
(self)
35,456
deprecation
UnsupportedWarning
A warning class for methods to be removed This is a subclass of :class:`~deprecation.DeprecatedWarning` and is used to output a proper message about a function being unsupported. Additionally, the :func:`~deprecation.fail_if_not_removed` decorator will handle this warning and cause any tests to fail if the system under test uses code that raises this warning.
class UnsupportedWarning(DeprecatedWarning): """A warning class for methods to be removed This is a subclass of :class:`~deprecation.DeprecatedWarning` and is used to output a proper message about a function being unsupported. Additionally, the :func:`~deprecation.fail_if_not_removed` decorator will handle this warning and cause any tests to fail if the system under test uses code that raises this warning. """ def __str__(self): parts = collections.defaultdict(str) parts["function"] = self.function parts["removed"] = self.removed_in if self.details: parts["details"] = " %s" % self.details return ("%(function)s is unsupported as of %(removed)s." "%(details)s" % (parts))
(function, deprecated_in, removed_in, details='')
35,458
deprecation
__str__
null
def __str__(self): parts = collections.defaultdict(str) parts["function"] = self.function parts["removed"] = self.removed_in if self.details: parts["details"] = " %s" % self.details return ("%(function)s is unsupported as of %(removed)s." "%(details)s" % (parts))
(self)
35,461
deprecation
deprecated
Decorate a function to signify its deprecation This function wraps a method that will soon be removed and does two things: * The docstring of the method will be modified to include a notice about deprecation, e.g., "Deprecated since 0.9.11. Use foo instead." * Raises a :class:`~deprecation.DeprecatedWarning` via the :mod:`warnings` module, which is a subclass of the built-in :class:`DeprecationWarning`. Note that built-in :class:`DeprecationWarning`s are ignored by default, so for users to be informed of said warnings they will need to enable them--see the :mod:`warnings` module documentation for more details. :param deprecated_in: The version at which the decorated method is considered deprecated. This will usually be the next version to be released when the decorator is added. The default is **None**, which effectively means immediate deprecation. If this is not specified, then the `removed_in` and `current_version` arguments are ignored. :param removed_in: The version or :class:`datetime.date` when the decorated method will be removed. The default is **None**, specifying that the function is not currently planned to be removed. Note: This parameter cannot be set to a value if `deprecated_in=None`. :param current_version: The source of version information for the currently running code. This will usually be a `__version__` attribute on your library. The default is `None`. When `current_version=None` the automation to determine if the wrapped function is actually in a period of deprecation or time for removal does not work, causing a :class:`~deprecation.DeprecatedWarning` to be raised in all cases. :param details: Extra details to be added to the method docstring and warning. For example, the details may point users to a replacement method, such as "Use the foo_bar method instead". By default there are no details.
def deprecated(deprecated_in=None, removed_in=None, current_version=None, details=""): """Decorate a function to signify its deprecation This function wraps a method that will soon be removed and does two things: * The docstring of the method will be modified to include a notice about deprecation, e.g., "Deprecated since 0.9.11. Use foo instead." * Raises a :class:`~deprecation.DeprecatedWarning` via the :mod:`warnings` module, which is a subclass of the built-in :class:`DeprecationWarning`. Note that built-in :class:`DeprecationWarning`s are ignored by default, so for users to be informed of said warnings they will need to enable them--see the :mod:`warnings` module documentation for more details. :param deprecated_in: The version at which the decorated method is considered deprecated. This will usually be the next version to be released when the decorator is added. The default is **None**, which effectively means immediate deprecation. If this is not specified, then the `removed_in` and `current_version` arguments are ignored. :param removed_in: The version or :class:`datetime.date` when the decorated method will be removed. The default is **None**, specifying that the function is not currently planned to be removed. Note: This parameter cannot be set to a value if `deprecated_in=None`. :param current_version: The source of version information for the currently running code. This will usually be a `__version__` attribute on your library. The default is `None`. When `current_version=None` the automation to determine if the wrapped function is actually in a period of deprecation or time for removal does not work, causing a :class:`~deprecation.DeprecatedWarning` to be raised in all cases. :param details: Extra details to be added to the method docstring and warning. For example, the details may point users to a replacement method, such as "Use the foo_bar method instead". By default there are no details. """ # You can't just jump to removal. It's weird, unfair, and also makes # building up the docstring weird. if deprecated_in is None and removed_in is not None: raise TypeError("Cannot set removed_in to a value " "without also setting deprecated_in") # Only warn when it's appropriate. There may be cases when it makes sense # to add this decorator before a formal deprecation period begins. # In CPython, PendingDeprecatedWarning gets used in that period, # so perhaps mimick that at some point. is_deprecated = False is_unsupported = False # StrictVersion won't take a None or a "", so make whatever goes to it # is at least *something*. Compare versions only if removed_in is not # of type datetime.date if isinstance(removed_in, date): if date.today() >= removed_in: is_unsupported = True else: is_deprecated = True elif current_version: current_version = version.parse(current_version) if (removed_in and current_version >= version.parse(removed_in)): is_unsupported = True elif (deprecated_in and current_version >= version.parse(deprecated_in)): is_deprecated = True else: # If we can't actually calculate that we're in a period of # deprecation...well, they used the decorator, so it's deprecated. # This will cover the case of someone just using # @deprecated("1.0") without the other advantages. is_deprecated = True should_warn = any([is_deprecated, is_unsupported]) def _function_wrapper(function): if should_warn: # Everything *should* have a docstring, but just in case... existing_docstring = function.__doc__ or "" # The various parts of this decorator being optional makes for # a number of ways the deprecation notice could go. The following # makes for a nicely constructed sentence with or without any # of the parts. # If removed_in is a date, use "removed on" # If removed_in is a version, use "removed in" parts = { "deprecated_in": " %s" % deprecated_in if deprecated_in else "", "removed_in": "\n This will be removed {} {}.".format("on" if isinstance(removed_in, date) else "in", removed_in) if removed_in else "", "details": " %s" % details if details else ""} deprecation_note = (".. deprecated::{deprecated_in}" "{removed_in}{details}".format(**parts)) # default location for insertion of deprecation note loc = 1 # split docstring at first occurrence of newline string_list = existing_docstring.split("\n", 1) if len(string_list) > 1: # With a multi-line docstring, when we modify # existing_docstring to add our deprecation_note, # if we're not careful we'll interfere with the # indentation levels of the contents below the # first line, or as PEP 257 calls it, the summary # line. Since the summary line can start on the # same line as the """, dedenting the whole thing # won't help. Split the summary and contents up, # dedent the contents independently, then join # summary, dedent'ed contents, and our # deprecation_note. # in-place dedent docstring content string_list[1] = textwrap.dedent(string_list[1]) # we need another newline string_list.insert(loc, "\n") # change the message_location if we add to end of docstring # do this always if not "top" if message_location != "top": loc = 3 # insert deprecation note and dual newline string_list.insert(loc, deprecation_note) string_list.insert(loc, "\n\n") function.__doc__ = "".join(string_list) @functools.wraps(function) def _inner(*args, **kwargs): if should_warn: if is_unsupported: cls = UnsupportedWarning else: cls = DeprecatedWarning the_warning = cls(function.__name__, deprecated_in, removed_in, details) warnings.warn(the_warning, category=DeprecationWarning, stacklevel=2) return function(*args, **kwargs) return _inner return _function_wrapper
(deprecated_in=None, removed_in=None, current_version=None, details='')
35,462
deprecation
fail_if_not_removed
Decorate a test method to track removal of deprecated code This decorator catches :class:`~deprecation.UnsupportedWarning` warnings that occur during testing and causes unittests to fail, making it easier to keep track of when code should be removed. :raises: :class:`AssertionError` if an :class:`~deprecation.UnsupportedWarning` is raised while running the test method.
def fail_if_not_removed(method): """Decorate a test method to track removal of deprecated code This decorator catches :class:`~deprecation.UnsupportedWarning` warnings that occur during testing and causes unittests to fail, making it easier to keep track of when code should be removed. :raises: :class:`AssertionError` if an :class:`~deprecation.UnsupportedWarning` is raised while running the test method. """ # NOTE(briancurtin): Unless this is named test_inner, nose won't work # properly. See Issue #32. @functools.wraps(method) def test_inner(*args, **kwargs): with warnings.catch_warnings(record=True) as caught_warnings: warnings.simplefilter("always") rv = method(*args, **kwargs) for warning in caught_warnings: if warning.category == UnsupportedWarning: raise AssertionError( ("%s uses a function that should be removed: %s" % (method, str(warning.message)))) return rv return test_inner
(method)
35,467
bug_trail.config
BugTrailConfig
BugTrailConfig(app_name: str, app_author: str, report_folder: str, database_path: str, source_folder: str, ctags_file: str)
class BugTrailConfig: app_name: str app_author: str report_folder: str database_path: str source_folder: str ctags_file: str
(app_name: str, app_author: str, report_folder: str, database_path: str, source_folder: str, ctags_file: str) -> None
35,468
bug_trail.config
__eq__
null
""" Configuration module for Bug Trail. """ import os from dataclasses import dataclass import toml from platformdirs import user_config_dir, user_data_dir @dataclass class BugTrailConfig: app_name: str app_author: str report_folder: str database_path: str source_folder: str ctags_file: str
(self, other)
35,471
bug_trail.handlers
BugTrailHandler
A custom logging handler that logs to a SQLite database.
class BugTrailHandler(logging.Handler): """ A custom logging handler that logs to a SQLite database. """ def __init__(self, db_path: str, minimum_level: int = logging.ERROR) -> None: """ Initialize the handler Args: db_path (str): Path to the SQLite database """ self.base_handler = BaseErrorLogHandler(db_path, minimum_level=minimum_level) super().__init__() def emit(self, record: logging.LogRecord) -> None: """ Insert a log record into the database Args: record (logging.LogRecord): The log record to be inserted """ self.base_handler.emit(record) def close(self) -> None: """ Close the connection to the database """ self.base_handler.close() super().close()
(db_path: str, minimum_level: int = 40) -> None
35,472
bug_trail.handlers
__init__
Initialize the handler Args: db_path (str): Path to the SQLite database
def __init__(self, db_path: str, minimum_level: int = logging.ERROR) -> None: """ Initialize the handler Args: db_path (str): Path to the SQLite database """ self.base_handler = BaseErrorLogHandler(db_path, minimum_level=minimum_level) super().__init__()
(self, db_path: str, minimum_level: int = 40) -> NoneType
35,477
bug_trail.handlers
close
Close the connection to the database
def close(self) -> None: """ Close the connection to the database """ self.base_handler.close() super().close()
(self) -> NoneType
35,479
bug_trail.handlers
emit
Insert a log record into the database Args: record (logging.LogRecord): The log record to be inserted
def emit(self, record: logging.LogRecord) -> None: """ Insert a log record into the database Args: record (logging.LogRecord): The log record to be inserted """ self.base_handler.emit(record)
(self, record: logging.LogRecord) -> NoneType
35,491
bug_trail.handlers
PicoBugTrailHandler
A custom logging handler that logs to a SQLite database.
class PicoBugTrailHandler(picologging.Handler): """ A custom logging handler that logs to a SQLite database. """ def __init__(self, db_path: str, minimum_level: int = logging.ERROR) -> None: """ Initialize the handler Args: db_path (str): Path to the SQLite database """ super().__init__() self.base_handler = BaseErrorLogHandler(db_path, pico=True, minimum_level=minimum_level) def emit(self, record: picologging.LogRecord) -> None: """ Insert a log record into the database Args: record (logging.LogRecord): The log record to be inserted """ self.base_handler.pico_emit(record) def close(self) -> None: """ Close the connection to the database """ self.base_handler.close() super().close()
(db_path: str, minimum_level: int = 40) -> None
35,492
bug_trail.handlers
__init__
Initialize the handler Args: db_path (str): Path to the SQLite database
def __init__(self, db_path: str, minimum_level: int = logging.ERROR) -> None: """ Initialize the handler Args: db_path (str): Path to the SQLite database """ super().__init__() self.base_handler = BaseErrorLogHandler(db_path, pico=True, minimum_level=minimum_level)
(self, db_path: str, minimum_level: int = 40) -> NoneType
35,494
bug_trail.handlers
emit
Insert a log record into the database Args: record (logging.LogRecord): The log record to be inserted
def emit(self, record: picologging.LogRecord) -> None: """ Insert a log record into the database Args: record (logging.LogRecord): The log record to be inserted """ self.base_handler.pico_emit(record)
(self, record: picologging.LogRecord) -> NoneType
35,497
bug_trail.config
read_config
Read the Bug Trail configuration from a pyproject.toml file. Args: config_path (str): Path to the pyproject.toml file. Returns: BugTrailConfig: Configuration object for Bug Trail.
def read_config(config_path: str) -> BugTrailConfig: """ Read the Bug Trail configuration from a pyproject.toml file. Args: config_path (str): Path to the pyproject.toml file. Returns: BugTrailConfig: Configuration object for Bug Trail. """ # Read the TOML file try: bug_trail_config = toml.load(config_path) except (FileNotFoundError, toml.TomlDecodeError): bug_trail_config = {} except TypeError: # print(f"Error reading config file: {e}") bug_trail_config = {} section = bug_trail_config.get("tool", {}).get("bug_trail", {}) # Set default values app_name = section.get("app_name", "bug_trail") app_author = section.get("app_author", "bug_trail") default_data_dir = user_data_dir("bug_trail", app_author, ensure_exists=True) default_config_dir = user_config_dir("bug_trail", app_author, ensure_exists=True) report_folder = section.get("report_folder", os.path.join(default_data_dir, "reports")) database_path = section.get("database_path", os.path.join(default_config_dir, "bug_trail.db")) # input! source_folder = section.get("source_folder", "") ctags_file = section.get("ctags_file", "") return BugTrailConfig(app_name, app_author, report_folder, database_path, source_folder, ctags_file)
(config_path: str) -> bug_trail.config.BugTrailConfig
35,499
authsys.AuthSys
AuthSys
null
class AuthSys(): def __init__(self) -> None: self.main_api: str = "http://authsys.catway.org" self.mac: str = str(get_mac_address()) def _send_request(self, endpoint: str, params: Dict[str, str]) -> Dict[str, str]: """ Send a request to the API endpoint. Args: endpoint: The API endpoint to send the request to. params: A dictionary containing the request parameters. Returns: A dictionary containing the API response. """ req: str = requests.get(f"{self.main_api}/{endpoint}", params=params, timeout=10).text response: Dict[str, str] = json.loads(req) return response def login(self, key: str, access_token: str) -> Dict[str, str]: """ Log in with the provided key and access token. Args: key: The user's key. access_token: The user's access token. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"key": key, "access_token": access_token, "mac": self.mac} return self._send_request("login", params) def register(self, time: str) -> Dict[str, str]: """ Register a new user with the provided time. Args: time: The registration time. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"time": time} return self._send_request("register", params) def remove(self, key: str, secret_auth: str) -> Dict[str, str]: """ Remove a user with the provided key and secret authentication. Args: key: The user's key. secret_auth: The user's secret authentication. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"key": key, "auth": secret_auth} return self._send_request("delete", params) def edit(self, key: str, auth: str, mac: Optional[str] = None, time: Optional[str] = None) -> Dict[str, str]: """ Edit user information. Args: key: The user's key. auth: The user's authentication. mac: (Optional) The new MAC address. time: (Optional) The new time. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"key": key, "auth": auth} if mac: params["mac"] = mac if time: params["time"] = time return self._send_request("edit", params)
() -> None
35,500
authsys.AuthSys
__init__
null
def __init__(self) -> None: self.main_api: str = "http://authsys.catway.org" self.mac: str = str(get_mac_address())
(self) -> NoneType
35,501
authsys.AuthSys
_send_request
Send a request to the API endpoint. Args: endpoint: The API endpoint to send the request to. params: A dictionary containing the request parameters. Returns: A dictionary containing the API response.
def _send_request(self, endpoint: str, params: Dict[str, str]) -> Dict[str, str]: """ Send a request to the API endpoint. Args: endpoint: The API endpoint to send the request to. params: A dictionary containing the request parameters. Returns: A dictionary containing the API response. """ req: str = requests.get(f"{self.main_api}/{endpoint}", params=params, timeout=10).text response: Dict[str, str] = json.loads(req) return response
(self, endpoint: str, params: Dict[str, str]) -> Dict[str, str]
35,502
authsys.AuthSys
edit
Edit user information. Args: key: The user's key. auth: The user's authentication. mac: (Optional) The new MAC address. time: (Optional) The new time. Returns: A dictionary containing the API response.
def edit(self, key: str, auth: str, mac: Optional[str] = None, time: Optional[str] = None) -> Dict[str, str]: """ Edit user information. Args: key: The user's key. auth: The user's authentication. mac: (Optional) The new MAC address. time: (Optional) The new time. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"key": key, "auth": auth} if mac: params["mac"] = mac if time: params["time"] = time return self._send_request("edit", params)
(self, key: str, auth: str, mac: Optional[str] = None, time: Optional[str] = None) -> Dict[str, str]
35,503
authsys.AuthSys
login
Log in with the provided key and access token. Args: key: The user's key. access_token: The user's access token. Returns: A dictionary containing the API response.
def login(self, key: str, access_token: str) -> Dict[str, str]: """ Log in with the provided key and access token. Args: key: The user's key. access_token: The user's access token. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"key": key, "access_token": access_token, "mac": self.mac} return self._send_request("login", params)
(self, key: str, access_token: str) -> Dict[str, str]
35,504
authsys.AuthSys
register
Register a new user with the provided time. Args: time: The registration time. Returns: A dictionary containing the API response.
def register(self, time: str) -> Dict[str, str]: """ Register a new user with the provided time. Args: time: The registration time. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"time": time} return self._send_request("register", params)
(self, time: str) -> Dict[str, str]
35,505
authsys.AuthSys
remove
Remove a user with the provided key and secret authentication. Args: key: The user's key. secret_auth: The user's secret authentication. Returns: A dictionary containing the API response.
def remove(self, key: str, secret_auth: str) -> Dict[str, str]: """ Remove a user with the provided key and secret authentication. Args: key: The user's key. secret_auth: The user's secret authentication. Returns: A dictionary containing the API response. """ params: Dict[str, str] = {"key": key, "auth": secret_auth} return self._send_request("delete", params)
(self, key: str, secret_auth: str) -> Dict[str, str]
35,506
pefile
AddressSet
null
class AddressSet(set): def __init__(self): super().__init__() self.min = None self.max = None def add(self, value): super().add(value) self.min = value if self.min is None else min(self.min, value) self.max = value if self.max is None else max(self.max, value) def diff(self): return 0 if self.min is None or self.max is None else self.max - self.min
()
35,507
pefile
__init__
null
def __init__(self): super().__init__() self.min = None self.max = None
(self)
35,508
pefile
add
null
def add(self, value): super().add(value) self.min = value if self.min is None else min(self.min, value) self.max = value if self.max is None else max(self.max, value)
(self, value)
35,509
pefile
diff
null
def diff(self): return 0 if self.min is None or self.max is None else self.max - self.min
(self)
35,510
pefile
BaseRelocationData
Holds base relocation information. struct: IMAGE_BASE_RELOCATION structure entries: list of relocation data (RelocationData instances)
class BaseRelocationData(DataContainer): """Holds base relocation information. struct: IMAGE_BASE_RELOCATION structure entries: list of relocation data (RelocationData instances) """
(**args)
35,511
pefile
__init__
null
def __init__(self, **args): bare_setattr = super(DataContainer, self).__setattr__ for key, value in args.items(): bare_setattr(key, value)
(self, **args)