code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
warnings.warn("/oauth/ro will be deprecated in future releases", DeprecationWarning)
return self.post(
'https://{}/oauth/ro'.format(self.domain),
data={
'client_id': client_id,
'username': username,
'password': password,
'id_token': id_token,
'connection': connection,
'device': device,
'grant_type': grant_type,
'scope': scope,
},
headers={'Content-Type': 'application/json'}
) | def login(self, client_id, username, password, connection, id_token=None,
grant_type='password', device=None, scope='openid') | Login using username and password
Given the user credentials and the connection specified, it will do
the authentication on the provider and return a dict with the
access_token and id_token. This endpoint only works for database
connections, passwordless connections, Active Directory/LDAP,
Windows Azure AD and ADFS. | 2.400555 | 2.531396 | 0.948313 |
body = {
'client_id': client_id,
'email': email,
'password': password,
'connection': connection,
'username': username,
'user_metadata': user_metadata
}
return self.post(
'https://{}/dbconnections/signup'.format(self.domain),
data=body,
headers={'Content-Type': 'application/json'}
) | def signup(self, client_id, email, password, connection, username=None,
user_metadata=None) | Signup using email and password.
Args:
client_id (str): ID of the application to use.
email (str): The user's email address.
password (str): The user's desired password.
connection (str): The name of the database connection where this user should be created.
username (str, optional): The user's username, if required by the database connection.
user_metadata (dict, optional): Additional key-value information to store for the user.
Some limitations apply, see: https://auth0.com/docs/metadata#metadata-restrictions
See: https://auth0.com/docs/api/authentication#signup | 2.242429 | 2.306466 | 0.972236 |
return self.post(
'https://{}/dbconnections/change_password'.format(self.domain),
data={
'client_id': client_id,
'email': email,
'password': password,
'connection': connection,
},
headers={'Content-Type': 'application/json'}
) | def change_password(self, client_id, email, connection, password=None) | Asks to change a password for a given user. | 2.694785 | 2.916484 | 0.923984 |
no_setup_requires_arguments = (
'-h', '--help',
'-n', '--dry-run',
'-q', '--quiet',
'-v', '--verbose',
'-V', '--version',
'--author',
'--author-email',
'--classifiers',
'--contact',
'--contact-email',
'--description',
'--egg-base',
'--fullname',
'--help-commands',
'--keywords',
'--licence',
'--license',
'--long-description',
'--maintainer',
'--maintainer-email',
'--name',
'--no-user-cfg',
'--obsoletes',
'--platforms',
'--provides',
'--requires',
'--url',
'clean',
'egg_info',
'register',
'sdist',
'upload',
)
def is_short_option(argument):
return len(argument) >= 2 and argument[0] == '-' and argument[1] != '-'
def expand_short_options(argument):
return ('-' + char for char in argument[1:])
def argument_without_setup_requirements(argv, i):
if argv[i] in no_setup_requires_arguments:
# Simple case: An argument which is either an option or a command
# which doesn't need setup requirements.
return True
elif (is_short_option(argv[i]) and
all(option in no_setup_requires_arguments
for option in expand_short_options(argv[i]))):
# Not so simple case: Combined short options none of which need
# setup requirements.
return True
elif argv[i - 1:i] == ['--egg-base']:
# Tricky case: --egg-info takes an argument which should not make
# us use setup_requires (defeating the purpose of this code).
return True
else:
return False
if all(argument_without_setup_requirements(argv, i)
for i in range(1, len(argv))):
return {
"cmdclass": {
"build": DummyCFFIBuild,
"install": DummyCFFIInstall,
"test": DummyPyTest,
}
}
else:
return {
"setup_requires": [CFFI_DEPENDENCY],
"cmdclass": {
"test": PyTest,
},
"cffi_modules": CFFI_MODULES,
} | def keywords_with_side_effects(argv) | Get a dictionary with setup keywords that (can) have side effects.
:param argv: A list of strings with command line arguments.
:returns: A dictionary with keyword arguments for the ``setup()`` function.
This setup.py script uses the setuptools 'setup_requires' feature because
this is required by the cffi package to compile extension modules. The
purpose of ``keywords_with_side_effects()`` is to avoid triggering the cffi
build process as a result of setup.py invocations that don't need the cffi
module to be built (setup.py serves the dual purpose of exposing package
metadata).
All of the options listed by ``python setup.py --help`` that print
information should be recognized here. The commands ``clean``,
``egg_info``, ``register``, ``sdist`` and ``upload`` are also recognized.
Any combination of these options and commands is also supported.
This function was originally based on the `setup.py script`_ of SciPy (see
also the discussion in `pip issue #25`_).
.. _pip issue #25: https://github.com/pypa/pip/issues/25
.. _setup.py script: https://github.com/scipy/scipy/blob/master/setup.py | 2.968305 | 2.940276 | 1.009533 |
hkey = k[:_inbytes(self.keysize)]
ekey = k[_inbytes(self.keysize):]
# encrypt
iv = _randombits(self.blocksize)
cipher = Cipher(algorithms.AES(ekey), modes.CBC(iv),
backend=self.backend)
encryptor = cipher.encryptor()
padder = PKCS7(self.blocksize).padder()
padded_data = padder.update(m) + padder.finalize()
e = encryptor.update(padded_data) + encryptor.finalize()
# mac
t = self._mac(hkey, a, iv, e)
return (iv, e, t) | def encrypt(self, k, a, m) | Encrypt according to the selected encryption and hashing
functions.
:param k: Encryption key (optional)
:param a: Additional Authentication Data
:param m: Plaintext
Returns a dictionary with the computed data. | 2.831542 | 3.167868 | 0.893832 |
hkey = k[:_inbytes(self.keysize)]
dkey = k[_inbytes(self.keysize):]
# verify mac
if not constant_time.bytes_eq(t, self._mac(hkey, a, iv, e)):
raise InvalidSignature('Failed to verify MAC')
# decrypt
cipher = Cipher(algorithms.AES(dkey), modes.CBC(iv),
backend=self.backend)
decryptor = cipher.decryptor()
d = decryptor.update(e) + decryptor.finalize()
unpadder = PKCS7(self.blocksize).unpadder()
return unpadder.update(d) + unpadder.finalize() | def decrypt(self, k, a, iv, e, t) | Decrypt according to the selected encryption and hashing
functions.
:param k: Encryption key (optional)
:param a: Additional Authenticated Data
:param iv: Initialization Vector
:param e: Ciphertext
:param t: Authentication Tag
Returns plaintext or raises an error | 2.828671 | 3.058453 | 0.92487 |
iv = _randombits(96)
cipher = Cipher(algorithms.AES(k), modes.GCM(iv),
backend=self.backend)
encryptor = cipher.encryptor()
encryptor.authenticate_additional_data(a)
e = encryptor.update(m) + encryptor.finalize()
return (iv, e, encryptor.tag) | def encrypt(self, k, a, m) | Encrypt accoriding to the selected encryption and hashing
functions.
:param k: Encryption key (optional)
:param a: Additional Authentication Data
:param m: Plaintext
Returns a dictionary with the computed data. | 2.860318 | 3.319065 | 0.861784 |
cipher = Cipher(algorithms.AES(k), modes.GCM(iv, t),
backend=self.backend)
decryptor = cipher.decryptor()
decryptor.authenticate_additional_data(a)
return decryptor.update(e) + decryptor.finalize() | def decrypt(self, k, a, iv, e, t) | Decrypt accoriding to the selected encryption and hashing
functions.
:param k: Encryption key (optional)
:param a: Additional Authenticated Data
:param iv: Initialization Vector
:param e: Ciphertext
:param t: Authentication Tag
Returns plaintext or raises an error | 2.051314 | 2.404383 | 0.853156 |
obj = cls()
try:
jkey = json_decode(key)
except Exception as e: # pylint: disable=broad-except
raise InvalidJWKValue(e)
obj.import_key(**jkey)
return obj | def from_json(cls, key) | Creates a RFC 7517 JWK from the standard JSON format.
:param key: The RFC 7517 representation of a JWK. | 4.802056 | 4.786366 | 1.003278 |
if private_key is True:
# Use _export_all for backwards compatibility, as this
# function allows to export symmetrict keys too
return self._export_all()
else:
return self.export_public() | def export(self, private_key=True) | Exports the key in the standard JSON format.
Exports the key regardless of type, if private_key is False
and the key is_symmetric an exceptionis raised.
:param private_key(bool): Whether to export the private key.
Defaults to True. | 10.64411 | 9.082932 | 1.17188 |
if self.is_symmetric:
return False
reg = JWKValuesRegistry[self._params['kty']]
for value in reg:
if reg[value].public and value in self._key:
return True | def has_public(self) | Whether this JWK has an asymmetric Public key. | 13.555452 | 8.692871 | 1.559376 |
k = self._key
if self._params['kty'] not in ['EC', 'OKP']:
raise InvalidJWKType('Not an EC or OKP key')
if arg and k['crv'] != arg:
raise InvalidJWKValue('Curve requested is "%s", but '
'key curve is "%s"' % (arg, k['crv']))
return self._get_curve_by_name(k['crv']) | def get_curve(self, arg) | Gets the Elliptic Curve associated with the key.
:param arg: an optional curve name
:raises InvalidJWKType: the key is not an EC or OKP key.
:raises InvalidJWKValue: if the curve names is invalid. | 4.265827 | 3.168746 | 1.346219 |
validops = self._params.get('key_ops',
list(JWKOperationsRegistry.keys()))
if validops is not list:
validops = [validops]
if operation is None:
if self._params['kty'] == 'oct':
return self._key['k']
raise InvalidJWKOperation(operation, validops)
elif operation == 'sign':
self._check_constraints('sig', operation)
return self._get_private_key(arg)
elif operation == 'verify':
self._check_constraints('sig', operation)
return self._get_public_key(arg)
elif operation == 'encrypt' or operation == 'wrapKey':
self._check_constraints('enc', operation)
return self._get_public_key(arg)
elif operation == 'decrypt' or operation == 'unwrapKey':
self._check_constraints('enc', operation)
return self._get_private_key(arg)
else:
raise NotImplementedError | def get_op_key(self, operation=None, arg=None) | Get the key object associated to the requested opration.
For example the public RSA key for the 'verify' operation or
the private EC key for the 'decrypt' operation.
:param operation: The requested operation.
The valid set of operations is availble in the
:data:`JWKOperationsRegistry` registry.
:param arg: an optional, context specific, argument
For example a curve name.
:raises InvalidJWKOperation: if the operation is unknown or
not permitted with this key.
:raises InvalidJWKUsage: if the use constraints do not permit
the operation. | 2.686903 | 2.336835 | 1.149804 |
try:
key = serialization.load_pem_private_key(
data, password=password, backend=default_backend())
except ValueError as e:
if password is not None:
raise e
try:
key = serialization.load_pem_public_key(
data, backend=default_backend())
except ValueError:
try:
cert = x509.load_pem_x509_certificate(
data, backend=default_backend())
key = cert.public_key()
except ValueError:
raise e
self.import_from_pyca(key)
self._params['kid'] = self.thumbprint() | def import_from_pem(self, data, password=None) | Imports a key from data loaded from a PEM file.
The key may be encrypted with a password.
Private keys (PKCS#8 format), public keys, and X509 certificate's
public keys can be imported with this interface.
:param data(bytes): The data contained in a PEM file.
:param password(bytes): An optional password to unwrap the key. | 2.171796 | 2.286235 | 0.949944 |
e = serialization.Encoding.PEM
if private_key:
if not self.has_private:
raise InvalidJWKType("No private key available")
f = serialization.PrivateFormat.PKCS8
if password is None:
a = serialization.NoEncryption()
elif isinstance(password, bytes):
a = serialization.BestAvailableEncryption(password)
elif password is False:
raise ValueError("The password must be None or a bytes string")
else:
raise TypeError("The password string must be bytes")
return self._get_private_key().private_bytes(
encoding=e, format=f, encryption_algorithm=a)
else:
if not self.has_public:
raise InvalidJWKType("No public key available")
f = serialization.PublicFormat.SubjectPublicKeyInfo
return self._get_public_key().public_bytes(encoding=e, format=f) | def export_to_pem(self, private_key=False, password=False) | Exports keys to a data buffer suitable to be stored as a PEM file.
Either the public or the private key can be exported to a PEM file.
For private keys the PKCS#8 format is used. If a password is provided
the best encryption method available as determined by the cryptography
module is used to wrap the key.
:param private_key: Whether the private key should be exported.
Defaults to `False` which means the public key is exported by default.
:param password(bytes): A password for wrapping the private key.
Defaults to False which will cause the operation to fail. To avoid
encryption the user must explicitly pass None, otherwise the user
needs to provide a password in a bytes buffer. | 2.328697 | 2.388658 | 0.974897 |
obj = cls()
obj.import_from_pem(data, password)
return obj | def from_pem(cls, data, password=None) | Creates a key from PKCS#8 formatted data loaded from a PEM file.
See the function `import_from_pem` for details.
:param data(bytes): The data contained in a PEM file.
:param password(bytes): An optional password to unwrap the key. | 4.569929 | 5.687638 | 0.803484 |
t = {'kty': self._params['kty']}
for name, val in iteritems(JWKValuesRegistry[t['kty']]):
if val.required:
t[name] = self._key[name]
digest = hashes.Hash(hashalg, backend=default_backend())
digest.update(bytes(json_encode(t).encode('utf8')))
return base64url_encode(digest.finalize()) | def thumbprint(self, hashalg=hashes.SHA256()) | Returns the key thumbprint as specified by RFC 7638.
:param hashalg: A hash function (defaults to SHA256) | 4.661511 | 4.824299 | 0.966257 |
if not isinstance(elem, JWK):
raise TypeError('Only JWK objects are valid elements')
set.add(self, elem) | def add(self, elem) | Adds a JWK object to the set
:param elem: the JWK object to add.
:raises TypeError: if the object is not a JWK. | 7.603588 | 6.387132 | 1.190454 |
exp_dict = dict()
for k, v in iteritems(self):
if k == 'keys':
keys = list()
for jwk in v:
keys.append(json_decode(jwk.export(private_keys)))
v = keys
exp_dict[k] = v
return json_encode(exp_dict) | def export(self, private_keys=True) | Exports a RFC 7517 keyset using the standard JSON format
:param private_key(bool): Whether to export private keys.
Defaults to True. | 3.486623 | 3.718165 | 0.937727 |
try:
jwkset = json_decode(keyset)
except Exception: # pylint: disable=broad-except
raise InvalidJWKValue()
if 'keys' not in jwkset:
raise InvalidJWKValue()
for k, v in iteritems(jwkset):
if k == 'keys':
for jwk in v:
self['keys'].add(JWK(**jwk))
else:
self[k] = v | def import_keyset(self, keyset) | Imports a RFC 7517 keyset using the standard JSON format.
:param keyset: The RFC 7517 representation of a JOSE Keyset. | 2.7898 | 2.934964 | 0.95054 |
if self.plaintext is None:
raise ValueError('Missing plaintext')
if not isinstance(self.plaintext, bytes):
raise ValueError("Plaintext must be 'bytes'")
if isinstance(header, dict):
header = json_encode(header)
jh = self._get_jose_header(header)
alg, enc = self._get_alg_enc_from_headers(jh)
rec = dict()
if header:
rec['header'] = header
wrapped = alg.wrap(key, enc.wrap_key_size, self.cek, jh)
self.cek = wrapped['cek']
if 'ek' in wrapped:
rec['encrypted_key'] = wrapped['ek']
if 'header' in wrapped:
h = json_decode(rec.get('header', '{}'))
nh = self._merge_headers(h, wrapped['header'])
rec['header'] = json_encode(nh)
if 'ciphertext' not in self.objects:
self._encrypt(alg, enc, jh)
if 'recipients' in self.objects:
self.objects['recipients'].append(rec)
elif 'encrypted_key' in self.objects or 'header' in self.objects:
self.objects['recipients'] = list()
n = dict()
if 'encrypted_key' in self.objects:
n['encrypted_key'] = self.objects.pop('encrypted_key')
if 'header' in self.objects:
n['header'] = self.objects.pop('header')
self.objects['recipients'].append(n)
self.objects['recipients'].append(rec)
else:
self.objects.update(rec) | def add_recipient(self, key, header=None) | Encrypt the plaintext with the given key.
:param key: A JWK key or password of appropriate type for the 'alg'
provided in the JOSE Headers.
:param header: A JSON string representing the per-recipient header.
:raises ValueError: if the plaintext is missing or not of type bytes.
:raises ValueError: if the compression type is unknown.
:raises InvalidJWAAlgorithm: if the 'alg' provided in the JOSE
headers is missing or unknown, or otherwise not implemented. | 2.770923 | 2.675777 | 1.035558 |
if 'ciphertext' not in self.objects:
raise InvalidJWEOperation("No available ciphertext")
if compact:
for invalid in 'aad', 'unprotected':
if invalid in self.objects:
raise InvalidJWEOperation(
"Can't use compact encoding when the '%s' parameter"
"is set" % invalid)
if 'protected' not in self.objects:
raise InvalidJWEOperation(
"Can't use compat encoding without protected headers")
else:
ph = json_decode(self.objects['protected'])
for required in 'alg', 'enc':
if required not in ph:
raise InvalidJWEOperation(
"Can't use compat encoding, '%s' must be in the "
"protected header" % required)
if 'recipients' in self.objects:
if len(self.objects['recipients']) != 1:
raise InvalidJWEOperation("Invalid number of recipients")
rec = self.objects['recipients'][0]
else:
rec = self.objects
if 'header' in rec:
# The AESGCMKW algorithm generates data (iv, tag) we put in the
# per-recipient unpotected header by default. Move it to the
# protected header and re-encrypt the payload, as the protected
# header is used as additional authenticated data.
h = json_decode(rec['header'])
ph = json_decode(self.objects['protected'])
nph = self._merge_headers(h, ph)
self.objects['protected'] = json_encode(nph)
jh = self._get_jose_header()
alg, enc = self._get_alg_enc_from_headers(jh)
self._encrypt(alg, enc, jh)
del rec['header']
return '.'.join([base64url_encode(self.objects['protected']),
base64url_encode(rec.get('encrypted_key', '')),
base64url_encode(self.objects['iv']),
base64url_encode(self.objects['ciphertext']),
base64url_encode(self.objects['tag'])])
else:
obj = self.objects
enc = {'ciphertext': base64url_encode(obj['ciphertext']),
'iv': base64url_encode(obj['iv']),
'tag': base64url_encode(self.objects['tag'])}
if 'protected' in obj:
enc['protected'] = base64url_encode(obj['protected'])
if 'unprotected' in obj:
enc['unprotected'] = json_decode(obj['unprotected'])
if 'aad' in obj:
enc['aad'] = base64url_encode(obj['aad'])
if 'recipients' in obj:
enc['recipients'] = list()
for rec in obj['recipients']:
e = dict()
if 'encrypted_key' in rec:
e['encrypted_key'] = \
base64url_encode(rec['encrypted_key'])
if 'header' in rec:
e['header'] = json_decode(rec['header'])
enc['recipients'].append(e)
else:
if 'encrypted_key' in obj:
enc['encrypted_key'] = \
base64url_encode(obj['encrypted_key'])
if 'header' in obj:
enc['header'] = json_decode(obj['header'])
return json_encode(enc) | def serialize(self, compact=False) | Serializes the object into a JWE token.
:param compact(boolean): if True generates the compact
representation, otherwise generates a standard JSON format.
:raises InvalidJWEOperation: if the object cannot serialized
with the compact representation and `compact` is True.
:raises InvalidJWEOperation: if no recipients have been added
to the object. | 2.501411 | 2.448091 | 1.02178 |
if 'ciphertext' not in self.objects:
raise InvalidJWEOperation("No available ciphertext")
self.decryptlog = list()
if 'recipients' in self.objects:
for rec in self.objects['recipients']:
try:
self._decrypt(key, rec)
except Exception as e: # pylint: disable=broad-except
self.decryptlog.append('Failed: [%s]' % repr(e))
else:
try:
self._decrypt(key, self.objects)
except Exception as e: # pylint: disable=broad-except
self.decryptlog.append('Failed: [%s]' % repr(e))
if not self.plaintext:
raise InvalidJWEData('No recipient matched the provided '
'key' + repr(self.decryptlog)) | def decrypt(self, key) | Decrypt a JWE token.
:param key: The (:class:`jwcrypto.jwk.JWK`) decryption key.
:param key: A (:class:`jwcrypto.jwk.JWK`) decryption key or a password
string (optional).
:raises InvalidJWEOperation: if the key is not a JWK object.
:raises InvalidJWEData: if the ciphertext can't be decrypted or
the object is otherwise malformed. | 3.621127 | 3.236636 | 1.118793 |
self.objects = dict()
self.plaintext = None
self.cek = None
o = dict()
try:
try:
djwe = json_decode(raw_jwe)
o['iv'] = base64url_decode(djwe['iv'])
o['ciphertext'] = base64url_decode(djwe['ciphertext'])
o['tag'] = base64url_decode(djwe['tag'])
if 'protected' in djwe:
p = base64url_decode(djwe['protected'])
o['protected'] = p.decode('utf-8')
if 'unprotected' in djwe:
o['unprotected'] = json_encode(djwe['unprotected'])
if 'aad' in djwe:
o['aad'] = base64url_decode(djwe['aad'])
if 'recipients' in djwe:
o['recipients'] = list()
for rec in djwe['recipients']:
e = dict()
if 'encrypted_key' in rec:
e['encrypted_key'] = \
base64url_decode(rec['encrypted_key'])
if 'header' in rec:
e['header'] = json_encode(rec['header'])
o['recipients'].append(e)
else:
if 'encrypted_key' in djwe:
o['encrypted_key'] = \
base64url_decode(djwe['encrypted_key'])
if 'header' in djwe:
o['header'] = json_encode(djwe['header'])
except ValueError:
c = raw_jwe.split('.')
if len(c) != 5:
raise InvalidJWEData()
p = base64url_decode(c[0])
o['protected'] = p.decode('utf-8')
ekey = base64url_decode(c[1])
if ekey != b'':
o['encrypted_key'] = base64url_decode(c[1])
o['iv'] = base64url_decode(c[2])
o['ciphertext'] = base64url_decode(c[3])
o['tag'] = base64url_decode(c[4])
self.objects = o
except Exception as e: # pylint: disable=broad-except
raise InvalidJWEData('Invalid format', repr(e))
if key:
self.decrypt(key) | def deserialize(self, raw_jwe, key=None) | Deserialize a JWE token.
NOTE: Destroys any current status and tries to import the raw
JWE provided.
:param raw_jwe: a 'raw' JWE token (JSON Encoded or Compact
notation) string.
:param key: A (:class:`jwcrypto.jwk.JWK`) decryption key or a password
string (optional).
If a key is provided a decryption step will be attempted after
the object is successfully deserialized.
:raises InvalidJWEData: if the raw object is an invaid JWE token.
:raises InvalidJWEOperation: if the decryption fails. | 1.727251 | 1.724397 | 1.001655 |
payload = self._payload()
sigin = b'.'.join([self.protected.encode('utf-8'), payload])
signature = self.engine.sign(self.key, sigin)
return {'protected': self.protected,
'payload': payload,
'signature': base64url_encode(signature)} | def sign(self) | Generates a signature | 4.445624 | 4.181938 | 1.063054 |
try:
payload = self._payload()
sigin = b'.'.join([self.protected.encode('utf-8'), payload])
self.engine.verify(self.key, sigin, signature)
except Exception as e: # pylint: disable=broad-except
raise InvalidJWSSignature('Verification failed', repr(e))
return True | def verify(self, signature) | Verifies a signature
:raises InvalidJWSSignature: if the verification fails. | 5.577739 | 4.838322 | 1.152825 |
self.verifylog = list()
self.objects['valid'] = False
obj = self.objects
if 'signature' in obj:
try:
self._verify(alg, key,
obj['payload'],
obj['signature'],
obj.get('protected', None),
obj.get('header', None))
obj['valid'] = True
except Exception as e: # pylint: disable=broad-except
self.verifylog.append('Failed: [%s]' % repr(e))
elif 'signatures' in obj:
for o in obj['signatures']:
try:
self._verify(alg, key,
obj['payload'],
o['signature'],
o.get('protected', None),
o.get('header', None))
# Ok if at least one verifies
obj['valid'] = True
except Exception as e: # pylint: disable=broad-except
self.verifylog.append('Failed: [%s]' % repr(e))
else:
raise InvalidJWSSignature('No signatures availble')
if not self.is_valid:
raise InvalidJWSSignature('Verification failed for all '
'signatures' + repr(self.verifylog)) | def verify(self, key, alg=None) | Verifies a JWS token.
:param key: The (:class:`jwcrypto.jwk.JWK`) verification key.
:param alg: The signing algorithm (optional). usually the algorithm
is known as it is provided with the JOSE Headers of the token.
:raises InvalidJWSSignature: if the verification fails. | 2.940171 | 2.795123 | 1.051893 |
self.objects = dict()
o = dict()
try:
try:
djws = json_decode(raw_jws)
if 'signatures' in djws:
o['signatures'] = list()
for s in djws['signatures']:
os = self._deserialize_signature(s)
o['signatures'].append(os)
self._deserialize_b64(o, os.get('protected'))
else:
o = self._deserialize_signature(djws)
self._deserialize_b64(o, o.get('protected'))
if 'payload' in djws:
if o.get('b64', True):
o['payload'] = base64url_decode(str(djws['payload']))
else:
o['payload'] = djws['payload']
except ValueError:
c = raw_jws.split('.')
if len(c) != 3:
raise InvalidJWSObject('Unrecognized representation')
p = base64url_decode(str(c[0]))
if len(p) > 0:
o['protected'] = p.decode('utf-8')
self._deserialize_b64(o, o['protected'])
o['payload'] = base64url_decode(str(c[1]))
o['signature'] = base64url_decode(str(c[2]))
self.objects = o
except Exception as e: # pylint: disable=broad-except
raise InvalidJWSObject('Invalid format', repr(e))
if key:
self.verify(key, alg) | def deserialize(self, raw_jws, key=None, alg=None) | Deserialize a JWS token.
NOTE: Destroys any current status and tries to import the raw
JWS provided.
:param raw_jws: a 'raw' JWS token (JSON Encoded or Compact
notation) string.
:param key: A (:class:`jwcrypto.jwk.JWK`) verification key (optional).
If a key is provided a verification step will be attempted after
the object is successfully deserialized.
:param alg: The signing algorithm (optional). usually the algorithm
is known as it is provided with the JOSE Headers of the token.
:raises InvalidJWSObject: if the raw object is an invaid JWS token.
:raises InvalidJWSSignature: if the verification fails. | 2.35972 | 2.383301 | 0.990106 |
if not self.objects.get('payload', None):
raise InvalidJWSObject('Missing Payload')
b64 = True
p = dict()
if protected:
if isinstance(protected, dict):
p = protected
protected = json_encode(p)
else:
p = json_decode(protected)
# If b64 is present we must enforce criticality
if 'b64' in list(p.keys()):
crit = p.get('crit', [])
if 'b64' not in crit:
raise InvalidJWSObject('b64 header must always be critical')
b64 = p['b64']
if 'b64' in self.objects:
if b64 != self.objects['b64']:
raise InvalidJWSObject('Mixed b64 headers on signatures')
h = None
if header:
if isinstance(header, dict):
h = header
header = json_encode(header)
else:
h = json_decode(header)
p = self._merge_check_headers(p, h)
if 'alg' in p:
if alg is None:
alg = p['alg']
elif alg != p['alg']:
raise ValueError('"alg" value mismatch, specified "alg" '
'does not match JOSE header value')
if alg is None:
raise ValueError('"alg" not specified')
c = JWSCore(alg, key, protected, self.objects['payload'])
sig = c.sign()
o = dict()
o['signature'] = base64url_decode(sig['signature'])
if protected:
o['protected'] = protected
if header:
o['header'] = h
o['valid'] = True
if 'signatures' in self.objects:
self.objects['signatures'].append(o)
elif 'signature' in self.objects:
self.objects['signatures'] = list()
n = dict()
n['signature'] = self.objects.pop('signature')
if 'protected' in self.objects:
n['protected'] = self.objects.pop('protected')
if 'header' in self.objects:
n['header'] = self.objects.pop('header')
if 'valid' in self.objects:
n['valid'] = self.objects.pop('valid')
self.objects['signatures'].append(n)
self.objects['signatures'].append(o)
else:
self.objects.update(o)
self.objects['b64'] = b64 | def add_signature(self, key, alg=None, protected=None, header=None) | Adds a new signature to the object.
:param key: A (:class:`jwcrypto.jwk.JWK`) key of appropriate for
the "alg" provided.
:param alg: An optional algorithm name. If already provided as an
element of the protected or unprotected header it can be safely
omitted.
:param potected: The Protected Header (optional)
:param header: The Unprotected Header (optional)
:raises InvalidJWSObject: if no payload has been set on the object,
or invalid headers are provided.
:raises ValueError: if the key is not a :class:`JWK` object.
:raises ValueError: if the algorithm is missing or is not provided
by one of the headers.
:raises InvalidJWAAlgorithm: if the algorithm is not valid, is
unknown or otherwise not yet implemented. | 2.483904 | 2.420238 | 1.026306 |
if compact:
if 'signatures' in self.objects:
raise InvalidJWSOperation("Can't use compact encoding with "
"multiple signatures")
if 'signature' not in self.objects:
raise InvalidJWSSignature("No available signature")
if not self.objects.get('valid', False):
raise InvalidJWSSignature("No valid signature found")
if 'protected' in self.objects:
protected = base64url_encode(self.objects['protected'])
else:
protected = ''
if self.objects.get('payload', False):
if self.objects.get('b64', True):
payload = base64url_encode(self.objects['payload'])
else:
if isinstance(self.objects['payload'], bytes):
payload = self.objects['payload'].decode('utf-8')
else:
payload = self.objects['payload']
if '.' in payload:
raise InvalidJWSOperation(
"Can't use compact encoding with unencoded "
"payload that uses the . character")
else:
payload = ''
return '.'.join([protected, payload,
base64url_encode(self.objects['signature'])])
else:
obj = self.objects
sig = dict()
if self.objects.get('payload', False):
if self.objects.get('b64', True):
sig['payload'] = base64url_encode(self.objects['payload'])
else:
sig['payload'] = self.objects['payload']
if 'signature' in obj:
if not obj.get('valid', False):
raise InvalidJWSSignature("No valid signature found")
sig['signature'] = base64url_encode(obj['signature'])
if 'protected' in obj:
sig['protected'] = base64url_encode(obj['protected'])
if 'header' in obj:
sig['header'] = obj['header']
elif 'signatures' in obj:
sig['signatures'] = list()
for o in obj['signatures']:
if not o.get('valid', False):
continue
s = {'signature': base64url_encode(o['signature'])}
if 'protected' in o:
s['protected'] = base64url_encode(o['protected'])
if 'header' in o:
s['header'] = o['header']
sig['signatures'].append(s)
if len(sig['signatures']) == 0:
raise InvalidJWSSignature("No valid signature found")
else:
raise InvalidJWSSignature("No available signature")
return json_encode(sig) | def serialize(self, compact=False) | Serializes the object into a JWS token.
:param compact(boolean): if True generates the compact
representation, otherwise generates a standard JSON format.
:raises InvalidJWSOperation: if the object cannot serialized
with the compact representation and `compat` is True.
:raises InvalidJWSSignature: if no signature has been added
to the object, or no valid signature can be found. | 1.93893 | 1.825569 | 1.062096 |
t = JWS(self.claims)
t.add_signature(key, protected=self.header)
self.token = t | def make_signed_token(self, key) | Signs the payload.
Creates a JWS token with the header as the JWS protected header and
the claims as the payload. See (:class:`jwcrypto.jws.JWS`) for
details on the exceptions that may be reaised.
:param key: A (:class:`jwcrypto.jwk.JWK`) key. | 7.634037 | 6.975766 | 1.094366 |
t = JWE(self.claims, self.header)
t.add_recipient(key)
self.token = t | def make_encrypted_token(self, key) | Encrypts the payload.
Creates a JWE token with the header as the JWE protected header and
the claims as the plaintext. See (:class:`jwcrypto.jwe.JWE`) for
details on the exceptions that may be reaised.
:param key: A (:class:`jwcrypto.jwk.JWK`) key. | 8.200864 | 7.28502 | 1.125716 |
c = jwt.count('.')
if c == 2:
self.token = JWS()
elif c == 4:
self.token = JWE()
else:
raise ValueError("Token format unrecognized")
# Apply algs restrictions if any, before performing any operation
if self._algs:
self.token.allowed_algs = self._algs
self.deserializelog = list()
# now deserialize and also decrypt/verify (or raise) if we
# have a key
if key is None:
self.token.deserialize(jwt, None)
elif isinstance(key, JWK):
self.token.deserialize(jwt, key)
self.deserializelog.append("Success")
elif isinstance(key, JWKSet):
self.token.deserialize(jwt, None)
if 'kid' in self.token.jose_header:
kid_key = key.get_key(self.token.jose_header['kid'])
if not kid_key:
raise JWTMissingKey('Key ID %s not in key set'
% self.token.jose_header['kid'])
self.token.deserialize(jwt, kid_key)
else:
for k in key:
try:
self.token.deserialize(jwt, k)
self.deserializelog.append("Success")
break
except Exception as e: # pylint: disable=broad-except
keyid = k.key_id
if keyid is None:
keyid = k.thumbprint()
self.deserializelog.append('Key [%s] failed: [%s]' % (
keyid, repr(e)))
continue
if "Success" not in self.deserializelog:
raise JWTMissingKey('No working key found in key set')
else:
raise ValueError("Unrecognized Key Type")
if key is not None:
self.header = self.token.jose_header
self.claims = self.token.payload.decode('utf-8')
self._check_provided_claims() | def deserialize(self, jwt, key=None) | Deserialize a JWT token.
NOTE: Destroys any current status and tries to import the raw
token provided.
:param jwt: a 'raw' JWT token.
:param key: A (:class:`jwcrypto.jwk.JWK`) verification or
decryption key, or a (:class:`jwcrypto.jwk.JWKSet`) that
contains a key indexed by the 'kid' header. | 3.200133 | 3.134585 | 1.020911 |
iter_valid = copy.copy(self.iter_valid)
losses, lbl_trues, lbl_preds = [], [], []
vizs = []
dataset = iter_valid.dataset
desc = 'valid [iteration=%08d]' % self.iteration
for batch in tqdm.tqdm(iter_valid, desc=desc, total=len(dataset),
ncols=80, leave=False):
img, lbl_true = zip(*batch)
batch = map(datasets.transform_lsvrc2012_vgg16, batch)
with chainer.no_backprop_mode(), \
chainer.using_config('train', False):
in_vars = utils.batch_to_vars(batch, device=self.device)
loss = self.model(*in_vars)
losses.append(float(loss.data))
score = self.model.score
lbl_pred = chainer.functions.argmax(score, axis=1)
lbl_pred = chainer.cuda.to_cpu(lbl_pred.data)
for im, lt, lp in zip(img, lbl_true, lbl_pred):
lbl_trues.append(lt)
lbl_preds.append(lp)
if len(vizs) < n_viz:
viz = utils.visualize_segmentation(
lbl_pred=lp, lbl_true=lt,
img=im, n_class=self.model.n_class)
vizs.append(viz)
# save visualization
out_viz = osp.join(self.out, 'visualizations_valid',
'iter%08d.jpg' % self.iteration)
if not osp.exists(osp.dirname(out_viz)):
os.makedirs(osp.dirname(out_viz))
viz = utils.get_tile_image(vizs)
skimage.io.imsave(out_viz, viz)
# generate log
acc = utils.label_accuracy_score(
lbl_trues, lbl_preds, self.model.n_class)
self._write_log(**{
'epoch': self.epoch,
'iteration': self.iteration,
'elapsed_time': time.time() - self.stamp_start,
'valid/loss': np.mean(losses),
'valid/acc': acc[0],
'valid/acc_cls': acc[1],
'valid/mean_iu': acc[2],
'valid/fwavacc': acc[3],
})
self._save_model() | def validate(self, n_viz=9) | Validate current model using validation dataset.
Parameters
----------
n_viz: int
Number fo visualization.
Returns
-------
log: dict
Log values. | 2.596512 | 2.633215 | 0.986062 |
self.stamp_start = time.time()
for iteration, batch in tqdm.tqdm(enumerate(self.iter_train),
desc='train', total=self.max_iter,
ncols=80):
self.epoch = self.iter_train.epoch
self.iteration = iteration
############
# validate #
############
if self.interval_validate and \
self.iteration % self.interval_validate == 0:
self.validate()
#########
# train #
#########
batch = map(datasets.transform_lsvrc2012_vgg16, batch)
in_vars = utils.batch_to_vars(batch, device=self.device)
self.model.zerograds()
loss = self.model(*in_vars)
if loss is not None:
loss.backward()
self.optimizer.update()
lbl_true = zip(*batch)[1]
lbl_pred = chainer.functions.argmax(self.model.score, axis=1)
lbl_pred = chainer.cuda.to_cpu(lbl_pred.data)
acc = utils.label_accuracy_score(
lbl_true, lbl_pred, self.model.n_class)
self._write_log(**{
'epoch': self.epoch,
'iteration': self.iteration,
'elapsed_time': time.time() - self.stamp_start,
'train/loss': float(loss.data),
'train/acc': acc[0],
'train/acc_cls': acc[1],
'train/mean_iu': acc[2],
'train/fwavacc': acc[3],
})
if iteration >= self.max_iter:
self._save_model()
break | def train(self) | Train the network using the training dataset.
Parameters
----------
None
Returns
-------
None | 2.958436 | 2.998869 | 0.986517 |
if src.shape[:2] == dst_shape[:2]:
return src
centerized = np.zeros(dst_shape, dtype=src.dtype)
if margin_color:
centerized[:, :] = margin_color
pad_vertical, pad_horizontal = 0, 0
h, w = src.shape[:2]
dst_h, dst_w = dst_shape[:2]
if h < dst_h:
pad_vertical = (dst_h - h) // 2
if w < dst_w:
pad_horizontal = (dst_w - w) // 2
centerized[pad_vertical:pad_vertical + h,
pad_horizontal:pad_horizontal + w] = src
return centerized | def centerize(src, dst_shape, margin_color=None) | Centerize image for specified image size
@param src: image to centerize
@param dst_shape: image shape (height, width) or (height, width, channel) | 1.679561 | 1.773497 | 0.947033 |
y_num, x_num = tile_shape
one_width = imgs[0].shape[1]
one_height = imgs[0].shape[0]
if concatenated_image is None:
if len(imgs[0].shape) == 3:
n_channels = imgs[0].shape[2]
assert all(im.shape[2] == n_channels for im in imgs)
concatenated_image = np.zeros(
(one_height * y_num, one_width * x_num, n_channels),
dtype=np.uint8,
)
else:
concatenated_image = np.zeros(
(one_height * y_num, one_width * x_num), dtype=np.uint8)
for y in six.moves.range(y_num):
for x in six.moves.range(x_num):
i = x + y * x_num
if i >= len(imgs):
pass
else:
concatenated_image[y * one_height:(y + 1) * one_height,
x * one_width:(x + 1) * one_width] = imgs[i]
return concatenated_image | def _tile_images(imgs, tile_shape, concatenated_image) | Concatenate images whose sizes are same.
@param imgs: image list which should be concatenated
@param tile_shape: shape for which images should be concatenated
@param concatenated_image: returned image.
if it is None, new image will be created. | 1.638103 | 1.670523 | 0.980593 |
def resize(*args, **kwargs):
# anti_aliasing arg cannot be passed to skimage<0.14
# use LooseVersion to allow 0.14dev.
if LooseVersion(skimage.__version__) < LooseVersion('0.14'):
kwargs.pop('anti_aliasing', None)
return skimage.transform.resize(*args, **kwargs)
def get_tile_shape(img_num):
x_num = 0
y_num = int(math.sqrt(img_num))
while x_num * y_num < img_num:
x_num += 1
return y_num, x_num
if tile_shape is None:
tile_shape = get_tile_shape(len(imgs))
# get max tile size to which each image should be resized
max_height, max_width = np.inf, np.inf
for img in imgs:
max_height = min([max_height, img.shape[0]])
max_width = min([max_width, img.shape[1]])
# resize and concatenate images
for i, img in enumerate(imgs):
h, w = img.shape[:2]
dtype = img.dtype
h_scale, w_scale = max_height / h, max_width / w
scale = min([h_scale, w_scale])
h, w = int(scale * h), int(scale * w)
img = resize(
image=img,
output_shape=(h, w),
mode='reflect',
preserve_range=True,
anti_aliasing=True,
).astype(dtype)
if len(img.shape) == 3:
img = centerize(img, (max_height, max_width, 3), margin_color)
else:
img = centerize(img, (max_height, max_width), margin_color)
imgs[i] = img
return _tile_images(imgs, tile_shape, result_img) | def get_tile_image(imgs, tile_shape=None, result_img=None, margin_color=None) | Concatenate images whose sizes are different.
@param imgs: image list which should be concatenated
@param tile_shape: shape for which images should be concatenated
@param result_img: numpy array to put result image | 2.337143 | 2.328938 | 1.003523 |
img = kwargs.pop('img', None)
lbl_true = kwargs.pop('lbl_true', None)
lbl_pred = kwargs.pop('lbl_pred', None)
n_class = kwargs.pop('n_class', None)
label_names = kwargs.pop('label_names', None)
if kwargs:
raise RuntimeError(
'Unexpected keys in kwargs: {}'.format(kwargs.keys()))
if lbl_true is None and lbl_pred is None:
raise ValueError('lbl_true or lbl_pred must be not None.')
lbl_true = copy.deepcopy(lbl_true)
lbl_pred = copy.deepcopy(lbl_pred)
mask_unlabeled = None
viz_unlabeled = None
if lbl_true is not None:
mask_unlabeled = lbl_true == -1
lbl_true[mask_unlabeled] = 0
viz_unlabeled = (
np.random.random((lbl_true.shape[0], lbl_true.shape[1], 3)) * 255
).astype(np.uint8)
if lbl_pred is not None:
lbl_pred[mask_unlabeled] = 0
vizs = []
if lbl_true is not None:
viz_trues = [
img,
label2rgb(lbl_true, label_names=label_names, n_labels=n_class),
label2rgb(lbl_true, img, label_names=label_names,
n_labels=n_class),
]
viz_trues[1][mask_unlabeled] = viz_unlabeled[mask_unlabeled]
viz_trues[2][mask_unlabeled] = viz_unlabeled[mask_unlabeled]
vizs.append(get_tile_image(viz_trues, (1, 3)))
if lbl_pred is not None:
viz_preds = [
img,
label2rgb(lbl_pred, label_names=label_names, n_labels=n_class),
label2rgb(lbl_pred, img, label_names=label_names,
n_labels=n_class),
]
if mask_unlabeled is not None and viz_unlabeled is not None:
viz_preds[1][mask_unlabeled] = viz_unlabeled[mask_unlabeled]
viz_preds[2][mask_unlabeled] = viz_unlabeled[mask_unlabeled]
vizs.append(get_tile_image(viz_preds, (1, 3)))
if len(vizs) == 1:
return vizs[0]
elif len(vizs) == 2:
return get_tile_image(vizs, (2, 1))
else:
raise RuntimeError | def visualize_segmentation(**kwargs) | Visualize segmentation.
Parameters
----------
img: ndarray
Input image to predict label.
lbl_true: ndarray
Ground truth of the label.
lbl_pred: ndarray
Label predicted.
n_class: int
Number of classes.
label_names: dict or list
Names of each label value.
Key or index is label_value and value is its name.
Returns
-------
img_array: ndarray
Visualized image. | 1.72553 | 1.66237 | 1.037994 |
factor = (size + 1) // 2
if size % 2 == 1:
center = factor - 1
else:
center = factor - 0.5
og = np.ogrid[:size, :size]
filter = (1 - abs(og[0] - center) / factor) * \
(1 - abs(og[1] - center) / factor)
return filter | def _get_upsampling_filter(size) | Make a 2D bilinear kernel suitable for upsampling | 1.390924 | 1.323959 | 1.050579 |
if output_format is None:
file_name, file_ext = path.splitext(output_path)
output_format = file_ext[len(extsep):].lower()
self.LOG.debug("Output format is not explicitly set, determined format is {0}.".format(output_format))
if not dry_run:
if output_format in self.ZIPFILE_FORMATS:
from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED
if compresslevel is not None:
if sys.version_info > (3, 7):
archive = ZipFile(path.abspath(output_path), 'w', compresslevel=compresslevel)
else:
raise ValueError("Compression level for zip archives requires Python 3.7+")
else:
archive = ZipFile(path.abspath(output_path), 'w')
def add_file(file_path, arcname):
if not path.islink(file_path):
archive.write(file_path, arcname, ZIP_DEFLATED)
else:
i = ZipInfo(arcname)
i.create_system = 3
i.external_attr = 0xA1ED0000
archive.writestr(i, readlink(file_path))
elif output_format in self.TARFILE_FORMATS:
import tarfile
mode = self.TARFILE_FORMATS[output_format]
if compresslevel is not None:
try:
archive = tarfile.open(path.abspath(output_path), mode, compresslevel=compresslevel)
except TypeError:
raise ValueError("{0} cannot be compressed".format(output_format))
else:
archive = tarfile.open(path.abspath(output_path), mode)
def add_file(file_path, arcname):
archive.add(file_path, arcname)
else:
raise ValueError("unknown format: {0}".format(output_format))
def archiver(file_path, arcname):
self.LOG.debug("{0} => {1}".format(file_path, arcname))
add_file(file_path, arcname)
else:
archive = None
def archiver(file_path, arcname):
self.LOG.info("{0} => {1}".format(file_path, arcname))
self.archive_all_files(archiver)
if archive is not None:
archive.close() | def create(self, output_path, dry_run=False, output_format=None, compresslevel=None) | Create the archive at output_file_path.
Type of the archive is determined either by extension of output_file_path or by output_format.
Supported formats are: gz, zip, bz2, xz, tar, tgz, txz
@param output_path: Output file path.
@type output_path: str
@param dry_run: Determines whether create should do nothing but print what it would archive.
@type dry_run: bool
@param output_format: Determines format of the output archive. If None, format is determined from extension
of output_file_path.
@type output_format: str | 2.044438 | 2.075337 | 0.985111 |
next(self._check_attr_gens[repo_abspath])
attrs = self._check_attr_gens[repo_abspath].send(repo_file_path)
return attrs['export-ignore'] == 'set' | def is_file_excluded(self, repo_abspath, repo_file_path) | Checks whether file at a given path is excluded.
@param repo_abspath: Absolute path to the git repository.
@type repo_abspath: str
@param repo_file_path: Path to a file relative to repo_abspath.
@type repo_file_path: str
@return: True if file should be excluded. Otherwise False.
@rtype: bool | 9.142146 | 10.661208 | 0.857515 |
for file_path in self.extra:
archiver(path.abspath(file_path), path.join(self.prefix, file_path))
for file_path in self.walk_git_files():
archiver(path.join(self.main_repo_abspath, file_path), path.join(self.prefix, file_path)) | def archive_all_files(self, archiver) | Archive all files using archiver.
@param archiver: Callable that accepts 2 arguments:
abspath to file on the system and relative path within archive.
@type archiver: Callable | 4.06413 | 4.1071 | 0.989538 |
repo_abspath = path.join(self.main_repo_abspath, repo_path)
assert repo_abspath not in self._check_attr_gens
self._check_attr_gens[repo_abspath] = self.check_attr(repo_abspath, ['export-ignore'])
try:
repo_file_paths = self.run_git_shell(
'git ls-files -z --cached --full-name --no-empty-directory',
repo_abspath
).split('\0')[:-1]
for repo_file_path in repo_file_paths:
repo_file_abspath = path.join(repo_abspath, repo_file_path) # absolute file path
main_repo_file_path = path.join(repo_path, repo_file_path) # relative to main_repo_abspath
# Only list symlinks and files.
if not path.islink(repo_file_abspath) and path.isdir(repo_file_abspath):
continue
if self.is_file_excluded(repo_abspath, repo_file_path):
continue
yield main_repo_file_path
if self.force_sub:
self.run_git_shell('git submodule init', repo_abspath)
self.run_git_shell('git submodule update', repo_abspath)
try:
repo_gitmodules_abspath = path.join(repo_abspath, ".gitmodules")
with open(repo_gitmodules_abspath) as f:
lines = f.readlines()
for l in lines:
m = re.match("^\\s*path\\s*=\\s*(.*)\\s*$", l)
if m:
repo_submodule_path = m.group(1) # relative to repo_path
main_repo_submodule_path = path.join(repo_path, repo_submodule_path) # relative to main_repo_abspath
if self.is_file_excluded(repo_abspath, repo_submodule_path):
continue
for main_repo_submodule_file_path in self.walk_git_files(main_repo_submodule_path):
repo_submodule_file_path = path.relpath(main_repo_submodule_file_path, repo_path) # relative to repo_path
if self.is_file_excluded(repo_abspath, repo_submodule_file_path):
continue
yield main_repo_submodule_file_path
except IOError:
pass
finally:
self._check_attr_gens[repo_abspath].close()
del self._check_attr_gens[repo_abspath] | def walk_git_files(self, repo_path='') | An iterator method that yields a file path relative to main_repo_abspath
for each file that should be included in the archive.
Skips those that match the exclusion patterns found in
any discovered .gitattributes files along the way.
Recurs into submodules as well.
@param repo_path: Path to the git submodule repository relative to main_repo_abspath.
@type repo_path: str
@return: Iterator to traverse files under git control relative to main_repo_abspath.
@rtype: Iterable | 2.232218 | 2.157279 | 1.034738 |
def make_process():
env = dict(environ, GIT_FLUSH='1')
cmd = 'git check-attr --stdin -z {0}'.format(' '.join(attrs))
return Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, cwd=repo_abspath, env=env)
def read_attrs(process, repo_file_path):
process.stdin.write(repo_file_path.encode('utf-8') + b'\0')
process.stdin.flush()
# For every attribute check-attr will output: <path> NUL <attribute> NUL <info> NUL
path, attr, info = b'', b'', b''
nuls_count = 0
nuls_expected = 3 * len(attrs)
while nuls_count != nuls_expected:
b = process.stdout.read(1)
if b == b'' and process.poll() is not None:
raise RuntimeError("check-attr exited prematurely")
elif b == b'\0':
nuls_count += 1
if nuls_count % 3 == 0:
yield map(self.decode_git_output, (path, attr, info))
path, attr, info = b'', b'', b''
elif nuls_count % 3 == 0:
path += b
elif nuls_count % 3 == 1:
attr += b
elif nuls_count % 3 == 2:
info += b
def read_attrs_old(process, repo_file_path):
process.stdin.write(repo_file_path.encode('utf-8') + b'\0')
process.stdin.flush()
# For every attribute check-attr will output: <path>: <attribute>: <info>\n
# where <path> is c-quoted
path, attr, info = b'', b'', b''
lines_count = 0
lines_expected = len(attrs)
while lines_count != lines_expected:
line = process.stdout.readline()
info_start = line.rfind(b': ')
if info_start == -1:
raise RuntimeError("unexpected output of check-attr: {0}".format(line))
attr_start = line.rfind(b': ', 0, info_start)
if attr_start == -1:
raise RuntimeError("unexpected output of check-attr: {0}".format(line))
info = line[info_start + 2:len(line) - 1] # trim leading ": " and trailing \n
attr = line[attr_start + 2:info_start] # trim leading ": "
path = line[:attr_start]
yield map(self.decode_git_output, (path, attr, info))
lines_count += 1
if not attrs:
return
process = make_process()
try:
while True:
repo_file_path = yield
repo_file_attrs = {}
if self.git_version is None or self.git_version > (1, 8, 5):
reader = read_attrs
else:
reader = read_attrs_old
for path, attr, value in reader(process, repo_file_path):
repo_file_attrs[attr] = value
yield repo_file_attrs
finally:
process.stdin.close()
process.wait() | def check_attr(self, repo_abspath, attrs) | Generator that returns attributes for given paths relative to repo_abspath.
>>> g = GitArchiver.check_attr('repo_path', ['export-ignore'])
>>> next(g)
>>> attrs = g.send('relative_path')
>>> print(attrs['export-ignore'])
@param repo_abspath: Absolute path to a git repository.
@type repo_abspath: str
@param attrs: Attributes to check.
@type attrs: [str]
@rtype: generator | 2.367434 | 2.344548 | 1.009761 |
p = Popen(cmd, shell=True, stdout=PIPE, cwd=cwd)
output, _ = p.communicate()
output = cls.decode_git_output(output)
if p.returncode:
if sys.version_info > (2, 6):
raise CalledProcessError(returncode=p.returncode, cmd=cmd, output=output)
else:
raise CalledProcessError(returncode=p.returncode, cmd=cmd)
return output | def run_git_shell(cls, cmd, cwd=None) | Runs git shell command, reads output and decodes it into unicode string.
@param cmd: Command to be executed.
@type cmd: str
@type cwd: str
@param cwd: Working directory.
@rtype: str
@return: Output of the command.
@raise CalledProcessError: Raises exception if return code of the command is non-zero. | 2.153832 | 2.222888 | 0.968934 |
try:
output = cls.run_git_shell('git version')
except CalledProcessError:
cls.LOG.warning("Unable to get Git version.")
return None
try:
version = output.split()[2]
except IndexError:
cls.LOG.warning("Unable to parse Git version \"%s\".", output)
return None
try:
return tuple(int(v) for v in version.split('.'))
except ValueError:
cls.LOG.warning("Unable to parse Git version \"%s\".", version)
return None | def get_git_version(cls) | Return version of git current shell points to.
If version cannot be parsed None is returned.
@rtype: tuple or None | 2.408863 | 2.346238 | 1.026692 |
'''Base function for one time http requests.
Args:
method (str): The http method to use. For example 'GET'
uri (str): The url of the resource.
Example: 'https://example.com/stuff'
kwargs: Any number of arguments supported, found here:
http://asks.rtfd.io/en/latest/overview-of-funcs-and-args.html
Returns:
Response (asks.Response): The Response object.
'''
c_interact = kwargs.pop('persist_cookies', None)
ssl_context = kwargs.pop('ssl_context', None)
async with Session(persist_cookies=c_interact, ssl_context=ssl_context) as s:
r = await s.request(method, url=uri, **kwargs)
return r | async def request(method, uri, **kwargs) | Base function for one time http requests.
Args:
method (str): The http method to use. For example 'GET'
uri (str): The url of the resource.
Example: 'https://example.com/stuff'
kwargs: Any number of arguments supported, found here:
http://asks.rtfd.io/en/latest/overview-of-funcs-and-args.html
Returns:
Response (asks.Response): The Response object. | 5.027074 | 1.920265 | 2.617906 |
parts = uri.split('%')
for i in range(1, len(parts)):
h = parts[i][0:2]
if len(h) == 2 and h.isalnum():
try:
c = chr(int(h, 16))
except ValueError:
raise ValueError("Invalid percent-escape sequence: '%s'" % h)
if c in UNRESERVED_SET:
parts[i] = c + parts[i][2:]
else:
parts[i] = '%' + parts[i]
else:
parts[i] = '%' + parts[i]
return ''.join(parts) | def unquote_unreserved(uri) | Un-escape any percent-escape sequences in a URI that are unreserved
characters. This leaves all reserved, illegal and non-ASCII bytes encoded.
:rtype: str | 2.1639 | 2.11253 | 1.024317 |
safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
safe_without_percent = "!#$&'()*+,/:;=?@[]~"
try:
# Unquote only the unreserved characters
# Then quote only illegal characters (do not quote reserved,
# unreserved, or '%')
return quote(unquote_unreserved(uri), safe=safe_with_percent)
except ValueError:
# We couldn't unquote the given URI, so let's try quoting it, but
# there may be unquoted '%'s in the URI. We need to make sure they're
# properly quoted so they do not cause issues elsewhere.
return quote(uri, safe=safe_without_percent) | def requote_uri(uri) | Re-quote the given URI.
This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted.
:rtype: str | 4.589056 | 4.676187 | 0.981367 |
'''
Takes care of the i/o side of the request once it's been built,
and calls a couple of cleanup functions to check for redirects / store
cookies and the likes.
Args:
h11_request (h11.Request): A h11.Request object
h11_body (h11.Data): A h11.Data object, representing the request
body.
h11_connection (h11.Connection): The h11 connection for the request.
Returns:
(Response): The final response object, including any response
objects in `.history` generated by redirects.
Notes:
This function sets off a possible call to `_redirect` which
is semi-recursive.
'''
await self._send(h11_request, h11_body, h11_connection)
response_obj = await self._catch_response(h11_connection)
parse_cookies(response_obj, self.host)
# If there's a cookie tracker object, store any cookies we
# might've picked up along our travels.
if self.persist_cookies is not None:
self.persist_cookies._store_cookies(response_obj)
# Have a crack at guessing the encoding of the response.
response_obj._guess_encoding()
# Check to see if there's a PostResponseAuth set, and does magic.
if self.auth is not None:
response_obj = await self._auth_handler_post_check_retry(
response_obj)
# check redirects
if self.method != 'HEAD':
if self.max_redirects < 0:
raise TooManyRedirects
response_obj = await self._redirect(response_obj)
response_obj.history = self.history_objects
return response_obj | async def _request_io(self, h11_request, h11_body, h11_connection) | Takes care of the i/o side of the request once it's been built,
and calls a couple of cleanup functions to check for redirects / store
cookies and the likes.
Args:
h11_request (h11.Request): A h11.Request object
h11_body (h11.Data): A h11.Data object, representing the request
body.
h11_connection (h11.Connection): The h11 connection for the request.
Returns:
(Response): The final response object, including any response
objects in `.history` generated by redirects.
Notes:
This function sets off a possible call to `_redirect` which
is semi-recursive. | 6.145422 | 2.808915 | 2.187828 |
'''
Constructs the actual request URL with accompanying query if any.
Returns:
None: But does modify self.path, which contains the final
request path sent to the server.
'''
if not self.path:
self.path = '/'
if self.uri_parameters:
self.path = self.path + ';' + requote_uri(self.uri_parameters)
if self.query:
self.path = (self.path + '?' + self.query)
if self.params:
try:
if self.query:
self.path = self.path + self._dict_to_query(
self.params, base_query=True)
else:
self.path = self.path + self._dict_to_query(self.params)
except AttributeError:
self.path = self.path + '?' + self.params
self.path = requote_uri(self.path)
self.req_url = urlunparse(
(self.scheme, self.host, (self.path or ''), '', '', '')) | def _build_path(self) | Constructs the actual request URL with accompanying query if any.
Returns:
None: But does modify self.path, which contains the final
request path sent to the server. | 3.619014 | 2.329011 | 1.553885 |
'''
Calls the _check_redirect method of the supplied response object
in order to determine if the http status code indicates a redirect.
Returns:
Response: May or may not be the result of recursive calls due
to redirects!
Notes:
If it does redirect, it calls the appropriate method with the
redirect location, returning the response object. Furthermore,
if there is a redirect, this function is recursive in a roundabout
way, storing the previous response object in `.history_objects`.
'''
redirect, force_get, location = False, None, None
if 300 <= response_obj.status_code < 400:
if response_obj.status_code == 303:
self.data, self.json, self.files = None, None, None
if response_obj.status_code in [301, 305]:
# redirect / force GET / location
redirect = True
force_get = False
else:
redirect = True
force_get = True
location = response_obj.headers['Location']
if redirect:
allow_redirect = True
redirect_uri = urlparse(location.strip())
# relative redirect
if not redirect_uri.netloc:
self.uri = urlunparse(
(self.scheme, self.host, *redirect_uri[2:]))
# absolute-redirect
else:
location = location.strip()
if self.auth is not None:
if not self.auth_off_domain:
allow_redirect = self._location_auth_protect(location)
self.uri = location
l_scheme, l_netloc, *_ = urlparse(location)
if l_scheme != self.scheme or l_netloc != self.host:
await self._get_new_sock()
# follow redirect with correct http method type
if force_get:
self.history_objects.append(response_obj)
self.method = 'GET'
else:
self.history_objects.append(response_obj)
self.max_redirects -= 1
try:
if response_obj.headers['connection'].lower() == 'close':
await self._get_new_sock()
except KeyError:
pass
if allow_redirect:
_, response_obj = await self.make_request()
return response_obj | async def _redirect(self, response_obj) | Calls the _check_redirect method of the supplied response object
in order to determine if the http status code indicates a redirect.
Returns:
Response: May or may not be the result of recursive calls due
to redirects!
Notes:
If it does redirect, it calls the appropriate method with the
redirect location, returning the response object. Furthermore,
if there is a redirect, this function is recursive in a roundabout
way, storing the previous response object in `.history_objects`. | 4.612394 | 2.792785 | 1.651539 |
'''
On 'Connection: close' headers we've to create a new connection.
This reaches in to the parent session and pulls a switcheroo, dunking
the current connection and requesting a new one.
'''
self.sock._active = False
self.sock = await self.session._grab_connection(self.uri)
self.port = self.sock.port | async def _get_new_sock(self) | On 'Connection: close' headers we've to create a new connection.
This reaches in to the parent session and pulls a switcheroo, dunking
the current connection and requesting a new one. | 16.938375 | 2.598813 | 6.517736 |
'''
Takes user supplied data / files and forms it / them
appropriately, returning the contents type, len,
and the request body its self.
Returns:
The str mime type for the Content-Type header.
The len of the body.
The body as a str.
'''
c_type, body = None, ''
multipart_ctype = 'multipart/form-data; boundary={}'.format(_BOUNDARY)
if self.data is not None:
if self.files or self.json is not None:
raise TypeError('data arg cannot be used in conjunction with'
'files or json arg.')
c_type = 'application/x-www-form-urlencoded'
try:
body = self._dict_to_query(self.data, params=False)
except AttributeError:
body = self.data
c_type = self.mimetype or 'text/plain'
elif self.files is not None:
if self.data or self.json is not None:
raise TypeError('files arg cannot be used in conjunction with'
'data or json arg.')
c_type = multipart_ctype
body = await self._multipart(self.files)
elif self.json is not None:
if self.data or self.files:
raise TypeError('json arg cannot be used in conjunction with'
'data or files arg.')
c_type = 'application/json'
body = _json.dumps(self.json)
return c_type, str(len(body)), body | async def _formulate_body(self) | Takes user supplied data / files and forms it / them
appropriately, returning the contents type, len,
and the request body its self.
Returns:
The str mime type for the Content-Type header.
The len of the body.
The body as a str. | 3.394696 | 2.037191 | 1.666361 |
'''
Turns python dicts in to valid body-queries or queries for use directly
in the request url. Unlike the stdlib quote() and it's variations,
this also works on iterables like lists which are normally not valid.
The use of lists in this manner is not a great idea unless
the server supports it. Caveat emptor.
Returns:
Query part of url (or body).
'''
query = []
for k, v in data.items():
if v is None:
continue
if isinstance(v, (str, Number)):
query.append('='.join(quote_plus(x) for x in (k, str(v))))
elif isinstance(v, dict):
for key in v:
query.append('='.join(quote_plus(x) for x in (k, key)))
elif hasattr(v, '__iter__'):
for elm in v:
query.append('='.join(quote_plus(x) for x in (k,
quote_plus('+'.join(str(elm).split())))))
if params and query:
if not base_query:
return requote_uri('?' + '&'.join(query))
else:
return requote_uri('&' + '&'.join(query))
return requote_uri('&'.join(query)) | def _dict_to_query(data, params=True, base_query=False) | Turns python dicts in to valid body-queries or queries for use directly
in the request url. Unlike the stdlib quote() and it's variations,
this also works on iterables like lists which are normally not valid.
The use of lists in this manner is not a great idea unless
the server supports it. Caveat emptor.
Returns:
Query part of url (or body). | 4.660281 | 1.896441 | 2.457383 |
'''
Forms multipart requests from a dict with name, path k/vs. Name
does not have to be the actual file name.
Args:
files_dict (dict): A dict of `filename:filepath`s, to be sent
as multipart files.
Returns:
multip_pkg (str): The strings representation of the content body,
multipart formatted.
'''
boundary = bytes(_BOUNDARY, self.encoding)
hder_format = 'Content-Disposition: form-data; name="{}"'
hder_format_io = '; filename="{}"'
multip_pkg = b''
num_of_parts = len(files_dict)
for index, kv in enumerate(files_dict.items(), start=1):
multip_pkg += (b'--' + boundary + b'\r\n')
k, v = kv
try:
pkg_body = await self._file_manager(v)
multip_pkg += bytes(hder_format.format(k) +
hder_format_io.format(basename(v)),
self.encoding)
mime_type = mimetypes.guess_type(basename(v))
if not mime_type[1]:
mime_type = 'application/octet-stream'
else:
mime_type = '/'.join(mime_type)
multip_pkg += bytes('; Content-Type: ' + mime_type,
self.encoding)
multip_pkg += b'\r\n'*2 + pkg_body
except (TypeError, FileNotFoundError):
pkg_body = bytes(v, self.encoding) + b'\r\n'
multip_pkg += bytes(hder_format.format(k) +
'\r\n'*2, self.encoding)
multip_pkg += pkg_body
if index == num_of_parts:
multip_pkg += b'--' + boundary + b'--\r\n'
return multip_pkg | async def _multipart(self, files_dict) | Forms multipart requests from a dict with name, path k/vs. Name
does not have to be the actual file name.
Args:
files_dict (dict): A dict of `filename:filepath`s, to be sent
as multipart files.
Returns:
multip_pkg (str): The strings representation of the content body,
multipart formatted. | 3.621479 | 2.262479 | 1.600669 |
'''
Instantiates the parser which manages incoming data, first getting
the headers, storing cookies, and then parsing the response's body,
if any.
This function also instances the Response class in which the response
status line, headers, cookies, and body is stored.
It should be noted that in order to remain preformant, if the user
wishes to do any file IO it should use async files or risk long wait
times and risk connection issues server-side when using callbacks.
If a callback is used, the response's body will be None.
Returns:
The most recent response object.
'''
response = await self._recv_event(h11_connection)
resp_data = {'encoding': self.encoding,
'method': self.method,
'status_code': response.status_code,
'reason_phrase': str(response.reason, 'utf-8'),
'http_version': str(response.http_version, 'utf-8'),
'headers': c_i_dict(
[(str(name, 'utf-8'), str(value, 'utf-8'))
for name, value in response.headers]),
'body': b'',
'url': self.req_url
}
for header in response.headers:
if header[0] == b'set-cookie':
try:
resp_data['headers']['set-cookie'].append(str(header[1],
'utf-8'))
except (KeyError, AttributeError):
resp_data['headers']['set-cookie'] = [str(header[1],
'utf-8')]
# check whether we should receive body according to RFC 7230
# https://tools.ietf.org/html/rfc7230#section-3.3.3
get_body = False
try:
if int(resp_data['headers']['content-length']) > 0:
get_body = True
except KeyError:
try:
if 'chunked' in resp_data['headers']['transfer-encoding'].lower():
get_body = True
except KeyError:
if resp_data['headers'].get('connection', '').lower() == 'close':
get_body = True
if get_body:
if self.callback is not None:
endof = await self._body_callback(h11_connection)
elif self.stream:
if not ((self.scheme == self.initial_scheme and
self.host == self.initial_netloc) or
resp_data['headers']['connection'].lower() == 'close'):
self.sock._active = False
resp_data['body'] = StreamBody(
h11_connection,
self.sock,
resp_data['headers'].get('content-encoding', None),
resp_data['encoding'])
self.streaming = True
else:
while True:
data = await self._recv_event(h11_connection)
if isinstance(data, h11.Data):
resp_data['body'] += data.data
elif isinstance(data, h11.EndOfMessage):
break
else:
endof = await self._recv_event(h11_connection)
assert isinstance(endof, h11.EndOfMessage)
if self.streaming:
return StreamResponse(**resp_data)
return Response(**resp_data) | async def _catch_response(self, h11_connection) | Instantiates the parser which manages incoming data, first getting
the headers, storing cookies, and then parsing the response's body,
if any.
This function also instances the Response class in which the response
status line, headers, cookies, and body is stored.
It should be noted that in order to remain preformant, if the user
wishes to do any file IO it should use async files or risk long wait
times and risk connection issues server-side when using callbacks.
If a callback is used, the response's body will be None.
Returns:
The most recent response object. | 3.623669 | 2.256892 | 1.605601 |
'''
Takes a package and body, combines then, then shoots 'em off in to
the ether.
Args:
package (list of str): The header package.
body (str): The str representation of the body.
'''
await self.sock.send_all(h11_connection.send(request_bytes))
if body_bytes is not None:
await self.sock.send_all(h11_connection.send(body_bytes))
await self.sock.send_all(h11_connection.send(h11.EndOfMessage())) | async def _send(self, request_bytes, body_bytes, h11_connection) | Takes a package and body, combines then, then shoots 'em off in to
the ether.
Args:
package (list of str): The header package.
body (str): The str representation of the body. | 4.960176 | 1.733603 | 2.861196 |
'''
If the user supplied auth does rely on a response
(is a PostResponseAuth object) then we call the auth's __call__
returning a dict to update the request's headers with, as long
as there is an appropriate 401'd response object to calculate auth
details from.
'''
# pylint: disable=not-callable
if isinstance(self.auth, PostResponseAuth):
if self.history_objects:
authable_resp = self.history_objects[-1]
if authable_resp.status_code == 401:
if not self.auth.auth_attempted:
self.auth.auth_attempted = True
return await self.auth(authable_resp, self)
return {} | async def _auth_handler_post_get_auth(self) | If the user supplied auth does rely on a response
(is a PostResponseAuth object) then we call the auth's __call__
returning a dict to update the request's headers with, as long
as there is an appropriate 401'd response object to calculate auth
details from. | 7.733469 | 2.369389 | 3.263909 |
'''
The other half of _auth_handler_post_check_retry (what a mouthful).
If auth has not yet been attempted and the most recent response
object is a 401, we store that response object and retry the request
in exactly the same manner as before except with the correct auth.
If it fails a second time, we simply return the failed response.
'''
if isinstance(self.auth, PostResponseAuth):
if response_obj.status_code == 401:
if not self.auth.auth_attempted:
self.history_objects.append(response_obj)
_, r = await self.make_request()
self.auth.auth_attempted = False
return r
else:
response_obj.history = self.history_objects
return response_obj
return response_obj | async def _auth_handler_post_check_retry(self, response_obj) | The other half of _auth_handler_post_check_retry (what a mouthful).
If auth has not yet been attempted and the most recent response
object is a 401, we store that response object and retry the request
in exactly the same manner as before except with the correct auth.
If it fails a second time, we simply return the failed response. | 5.772559 | 2.190066 | 2.635793 |
'''
Checks to see if the new location is
1. The same top level domain
2. As or more secure than the current connection type
Returns:
True (bool): If the current top level domain is the same
and the connection type is equally or more secure.
False otherwise.
'''
netloc_sans_port = self.host.split(':')[0]
netloc_sans_port = netloc_sans_port.replace(
(re.match(_WWX_MATCH, netloc_sans_port)[0]), '')
base_domain = '.'.join(netloc_sans_port.split('.')[-2:])
l_scheme, l_netloc, _, _, _, _ = urlparse(location)
location_sans_port = l_netloc.split(':')[0]
location_sans_port = location_sans_port.replace(
(re.match(_WWX_MATCH, location_sans_port)[0]), '')
location_domain = '.'.join(location_sans_port.split('.')[-2:])
if base_domain == location_domain:
if l_scheme < self.scheme:
return False
else:
return True | async def _location_auth_protect(self, location) | Checks to see if the new location is
1. The same top level domain
2. As or more secure than the current connection type
Returns:
True (bool): If the current top level domain is the same
and the connection type is equally or more secure.
False otherwise. | 3.902331 | 2.220527 | 1.75739 |
'''
A callback func to be supplied if the user wants to do something
directly with the response body's stream.
'''
# pylint: disable=not-callable
while True:
next_event = await self._recv_event(h11_connection)
if isinstance(next_event, h11.Data):
await self.callback(next_event.data)
else:
return next_event | async def _body_callback(self, h11_connection) | A callback func to be supplied if the user wants to do something
directly with the response body's stream. | 5.468048 | 2.622687 | 2.084903 |
'''
Creates a normal async socket, returns it.
Args:
location (tuple(str, int)): A tuple of net location (eg
'127.0.0.1' or 'example.org') and port (eg 80 or 25000).
'''
sock = await connect_tcp(location[0], location[1], bind_host=self.source_address)
sock._active = True
return sock | async def _open_connection_http(self, location) | Creates a normal async socket, returns it.
Args:
location (tuple(str, int)): A tuple of net location (eg
'127.0.0.1' or 'example.org') and port (eg 80 or 25000). | 7.955544 | 2.436662 | 3.264935 |
'''
Creates an async SSL socket, returns it.
Args:
location (tuple(str, int)): A tuple of net location (eg
'127.0.0.1' or 'example.org') and port (eg 80 or 25000).
'''
sock = await connect_tcp(location[0],
location[1],
ssl_context=self.ssl_context or ssl.SSLContext(),
bind_host=self.source_address,
autostart_tls=True)
sock._active = True
return sock | async def _open_connection_https(self, location) | Creates an async SSL socket, returns it.
Args:
location (tuple(str, int)): A tuple of net location (eg
'127.0.0.1' or 'example.org') and port (eg 80 or 25000). | 6.670624 | 2.731613 | 2.442009 |
'''
Simple enough stuff to figure out where we should connect, and creates
the appropriate connection.
'''
scheme, host, path, parameters, query, fragment = urlparse(
host_loc)
if parameters or query or fragment:
raise ValueError('Supplied info beyond scheme, host.' +
' Host should be top level only: ', path)
host, port = get_netloc_port(scheme, host)
if scheme == 'http':
return await self._open_connection_http(
(host, int(port))), port
else:
return await self._open_connection_https(
(host, int(port))), port | async def _connect(self, host_loc) | Simple enough stuff to figure out where we should connect, and creates
the appropriate connection. | 7.072578 | 4.471385 | 1.581742 |
if isinstance(e, (RemoteProtocolError, AssertionError)):
await sock.close()
raise BadHttpResponse('Invalid HTTP response from server.') from e
if isinstance(e, Exception):
await sock.close()
raise e | async def _handle_exception(self, e, sock) | Given an exception, we want to handle it appropriately. Some exceptions we
prefer to shadow with an asks exception, and some we want to raise directly.
In all cases we clean up the underlying socket. | 6.626234 | 6.256636 | 1.059073 |
'''
The connection pool handler. Returns a connection
to the caller. If there are no connections ready, and
as many connections checked out as there are available total,
we yield control to the event loop.
If there is a connection ready or space to create a new one, we
pop/create it, register it as checked out, and return it.
Args:
url (str): breaks the url down and uses the top level location
info to see if we have any connections to the location already
lying around.
'''
scheme, host, _, _, _, _ = urlparse(url)
host_loc = urlunparse((scheme, host, '', '', '', ''))
sock = self._checkout_connection(host_loc)
if sock is None:
sock = await self._make_connection(host_loc)
return sock | async def _grab_connection(self, url) | The connection pool handler. Returns a connection
to the caller. If there are no connections ready, and
as many connections checked out as there are available total,
we yield control to the event loop.
If there is a connection ready or space to create a new one, we
pop/create it, register it as checked out, and return it.
Args:
url (str): breaks the url down and uses the top level location
info to see if we have any connections to the location already
lying around. | 8.599785 | 1.804423 | 4.765948 |
'''
If the response's body is valid json, we load it as a python dict
and return it.
'''
body = self._decompress(self.encoding)
return _json.loads(body, **kwargs) | def json(self, **kwargs) | If the response's body is valid json, we load it as a python dict
and return it. | 8.143477 | 3.67439 | 2.21628 |
'''
Raise BadStatus if one occurred.
'''
if 400 <= self.status_code < 500:
raise BadStatus('{} Client Error: {} for url: {}'.format(self.status_code, self.reason_phrase, self.url), self.status_code)
elif 500 <= self.status_code < 600:
raise BadStatus('{} Server Error: {} for url: {}'.format(self.status_code, self.reason_phrase, self.url), self.status_code) | def raise_for_status(self) | Raise BadStatus if one occurred. | 2.081643 | 1.732742 | 1.201358 |
cookie_pie = []
try:
for cookie in response.headers['set-cookie']:
cookie_jar = {}
name_val, *rest = cookie.split(';')
name, value = name_val.split('=', 1)
cookie_jar['name'] = name.strip()
cookie_jar['value'] = value
for item in rest:
try:
name, value = item.split('=')
if value.startswith('.'):
value = value[1:]
cookie_jar[name.lower().lstrip()] = value
except ValueError:
cookie_jar[item.lower().lstrip()] = True
cookie_pie.append(cookie_jar)
response.cookies = [Cookie(host, x) for x in cookie_pie]
except KeyError:
pass | def parse_cookies(response, host) | Sticks cookies to a response. | 2.640919 | 2.613609 | 1.010449 |
'''
takes all the images coming from the redactor editor and
stores it in the database and returns all the files
'''
upurl = ''
if request.FILES.get("upload"):
f = request.FILES.get("upload")
obj = Image_File.objects.create(upload=f, is_image=True)
obj.save()
thumbnail_name = 'thumb' + f.name
if getattr(settings, 'AWS_ENABLED', False):
image_file = requests.get(obj.upload.url, stream=True)
with open(thumbnail_name, 'wb') as destination:
for chunk in image_file.iter_content():
destination.write(chunk)
else:
image_file = f
with open(thumbnail_name, 'wb') as destination:
for chunk in image_file.chunks():
destination.write(chunk)
im = Image.open(destination.name)
size = (128, 128)
im.thumbnail(size)
im.save(thumbnail_name)
with open(thumbnail_name, 'rb') as imdata:
obj.thumbnail.save(thumbnail_name, File(imdata))
obj.save()
os.remove(os.path.join(settings.BASE_DIR, thumbnail_name))
upurl = "/" + obj.upload.url
return HttpResponse(
.format(request.GET['CKEditorFuncNum'], upurl)
) | def upload_photos(request) | takes all the images coming from the redactor editor and
stores it in the database and returns all the files | 3.112222 | 2.638504 | 1.179541 |
''' returns all the images from the data base '''
imgs = []
for obj in Image_File.objects.filter(is_image=True).order_by("-date_created"):
upurl = "/" + obj.upload.url
thumburl = ""
if obj.thumbnail:
thumburl = "/" + obj.thumbnail.url
imgs.append({'src': upurl, 'thumb': thumburl, 'is_image': True})
return render_to_response('dashboard/browse.html', {'files': imgs}) | def recent_photos(request) | returns all the images from the data base | 4.276783 | 3.865848 | 1.106299 |
error = request.args.get('error')
state = request.args.get('state')
if error:
return render_template('login_error.html', error=error)
else:
code = request.args.get('code')
client = Client()
access_token = client.exchange_code_for_token(client_id=app.config['STRAVA_CLIENT_ID'],
client_secret=app.config['STRAVA_CLIENT_SECRET'],
code=code)
# Probably here you'd want to store this somewhere -- e.g. in a database.
strava_athlete = client.get_athlete()
return render_template('login_results.html', athlete=strava_athlete, access_token=access_token) | def logged_in() | Method called by Strava (redirect) that includes parameters.
- state
- code
- error | 2.465686 | 2.380681 | 1.035706 |
if self.units:
# Note that we don't want to cast to type in this case!
if not isinstance(v, Quantity):
v = self.units(v)
elif not isinstance(v, self.type):
v = self.type(v)
return v | def unmarshal(self, v) | Convert the value from parsed JSON structure to native python representation.
By default this will leave the value as-is since the JSON parsing routines
typically convert to native types. The exception may be date strings or other
more complex types, where subclasses will override this behavior. | 4.682058 | 4.953691 | 0.945165 |
if not isinstance(v, date):
# 2012-12-13
v = datetime.strptime(v, "%Y-%m-%d").date()
return v | def unmarshal(self, v) | Convert a date in "2012-12-13" format to a :class:`datetime.date` object. | 3.495254 | 2.286607 | 1.528577 |
if not isinstance(v, datetime):
if isinstance(v, six.integer_types):
v = arrow.get(v)
else:
try:
# Most dates are in this format 2012-12-13T03:43:19Z
v = datetime.strptime(v, "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
# ... but not all.
v = arrow.get(v).datetime
# Translate to specified TZ
v = v.replace(tzinfo=self.tzinfo)
return v | def unmarshal(self, v) | Convert a timestamp in "2012-12-13T03:43:19Z" format to a `datetime.datetime` object. | 3.617261 | 2.916704 | 1.240188 |
return "{lat},{lon}".format(lat=v.lat, lon=v.lon) if v else None | def marshal(self, v) | Turn this value into format for wire (JSON).
:param v: The lat/lon.
:type v: LatLon
:return: Serialized format.
:rtype: str | 6.471543 | 4.795624 | 1.349469 |
if not isinstance(v, tzinfo):
# (GMT-08:00) America/Los_Angeles
tzname = v.split(' ', 1)[1]
v = pytz.timezone(tzname)
return v | def unmarshal(self, v) | Convert a timestamp in format "(GMT-08:00) America/Los_Angeles" to
a `pytz.timestamp` object. | 4.026596 | 2.918782 | 1.379547 |
if not isinstance(v, timedelta):
v = timedelta(seconds=v)
return v | def unmarshal(self, v) | Convert the value from parsed JSON structure to native python representation.
By default this will leave the value as-is since the JSON parsing routines
typically convert to native types. The exception may be date strings or other
more complex types, where subclasses will override this behavior. | 4.508466 | 6.933887 | 0.650208 |
if v:
orig = [i for i in self.choices if self.choices[i] == v]
if len(orig) == 1:
return orig[0]
elif len(orig) == 0:
# No such choice
raise NotImplementedError("No such reverse choice {0} for field {1}.".format(v, self))
else:
# Too many choices. We could return one possible choice (e.g. orig[0]).
raise NotImplementedError("Too many reverse choices {0} for value {1} for field {2}".format(orig, v, self)) | def marshal(self, v) | Turn this value into API format.
Do a reverse dictionary lookup on choices to find the original value. If
there are no keys or too many keys for now we raise a NotImplementedError
as marshal is not used anywhere currently. In the future we will want to
fail gracefully. | 4.184234 | 3.367084 | 1.242688 |
try:
return self.choices[v]
except KeyError:
self.log.warning("No such choice {0} for field {1}.".format(v, self))
# Just return the value from the API
return v | def unmarshal(self, v) | Convert the value from Strava API format to useful python representation.
If the value does not appear in the choices attribute we log an error rather
than raising an exception as this may be caused by a change to the API upstream
so we want to fail gracefully. | 5.544923 | 4.259021 | 1.301924 |
#self.log.debug("Unmarshall {0!r}: {1!r}".format(self, value))
if not isinstance(value, self.type):
o = self.type()
if bind_client is not None and hasattr(o.__class__, 'bind_client'):
o.bind_client = bind_client
if isinstance(value, dict):
for (k, v) in value.items():
if not hasattr(o.__class__, k):
self.log.warning("Unable to set attribute {0} on entity {1!r}".format(k, o))
else:
#self.log.debug("Setting attribute {0} on entity {1!r}".format(k, o))
setattr(o, k, v)
value = o
else:
raise Exception("Unable to unmarshall object {0!r}".format(value))
return value | def unmarshal(self, value, bind_client=None) | Cast the specified value to the entity type. | 2.36779 | 2.277408 | 1.039686 |
if values is not None:
return [super(EntityCollection, self).marshal(v) for v in values] | def marshal(self, values) | Turn a list of entities into a list of dictionaries.
:param values: The entities to serialize.
:type values: List[stravalib.model.BaseEntity]
:return: List of dictionaries of attributes
:rtype: List[Dict[str, Any]] | 5.62053 | 5.663761 | 0.992367 |
if values is not None:
return [super(EntityCollection, self).unmarshal(v, bind_client=bind_client) for v in values] | def unmarshal(self, values, bind_client=None) | Cast the list. | 3.731094 | 3.141307 | 1.187752 |
return self.protocol.authorization_url(client_id=client_id,
redirect_uri=redirect_uri,
approval_prompt=approval_prompt,
scope=scope, state=state) | def authorization_url(self, client_id, redirect_uri, approval_prompt='auto',
scope=None, state=None) | Get the URL needed to authorize your application to access a Strava user's information.
:param client_id: The numeric developer client id.
:type client_id: int
:param redirect_uri: The URL that Strava will redirect to after successful (or failed) authorization.
:type redirect_uri: str
:param approval_prompt: Whether to prompt for approval even if approval already granted to app.
Choices are 'auto' or 'force'. (Default is 'auto')
:type approval_prompt: str
:param scope: The access scope required. Omit to imply "public".
Valid values are 'read', 'read_all', 'profile:read_all', 'profile:write', 'profile:read_all',
'activity:read_all', 'activity:write'
:type scope: str
:param state: An arbitrary variable that will be returned to your application in the redirect URI.
:type state: str
:return: The URL to use for authorization link.
:rtype: str | 2.034898 | 2.765749 | 0.735749 |
return self.protocol.exchange_code_for_token(client_id=client_id,
client_secret=client_secret,
code=code) | def exchange_code_for_token(self, client_id, client_secret, code) | Exchange the temporary authorization code (returned with redirect from strava authorization URL)
for a temporary access token and a refresh token (used to obtain the next access token later on).
:param client_id: The numeric developer client id.
:type client_id: int
:param client_secret: The developer client secret
:type client_secret: str
:param code: The temporary authorization code
:type code: str
:return: Dictionary containing the access_token, refresh_token
and expires_at (number of seconds since Epoch when the provided access token will expire)
:rtype: dict | 2.833885 | 3.466931 | 0.817405 |
return self.protocol.refresh_access_token(client_id=client_id,
client_secret=client_secret,
refresh_token=refresh_token) | def refresh_access_token(self, client_id, client_secret, refresh_token) | Exchange the temporary authorization code (returned with redirect from strava authorization URL)
for a temporary access token and a refresh token (used to obtain the next access token later on).
:param client_id: The numeric developer client id.
:type client_id: int
:param client_secret: The developer client secret
:type client_secret: str
:param refresh_token: The refresh token obtain from a previous authorization request
:type refresh_token: str
:return: Dictionary containing the access_token, refresh_token
and expires_at (number of seconds since Epoch when the provided access token will expire)
:rtype: dict | 2.69734 | 3.119871 | 0.864568 |
if isinstance(activity_datetime, str):
activity_datetime = arrow.get(activity_datetime).datetime
assert isinstance(activity_datetime, datetime)
if activity_datetime.tzinfo:
activity_datetime = activity_datetime.astimezone(pytz.utc)
return calendar.timegm(activity_datetime.timetuple()) | def _utc_datetime_to_epoch(self, activity_datetime) | Convert the specified datetime value to a unix epoch timestamp (seconds since epoch).
:param activity_datetime: A string which may contain tzinfo (offset) or a datetime object (naive datetime will
be considered to be UTC).
:return: Epoch timestamp.
:rtype: int | 2.128711 | 2.238888 | 0.950789 |
if before:
before = self._utc_datetime_to_epoch(before)
if after:
after = self._utc_datetime_to_epoch(after)
params = dict(before=before, after=after)
result_fetcher = functools.partial(self.protocol.get,
'/athlete/activities',
**params)
return BatchedResultsIterator(entity=model.Activity,
bind_client=self,
result_fetcher=result_fetcher,
limit=limit) | def get_activities(self, before=None, after=None, limit=None) | Get activities for authenticated user sorted by newest first.
http://strava.github.io/api/v3/activities/
:param before: Result will start with activities whose start date is
before specified date. (UTC)
:type before: datetime.datetime or str or None
:param after: Result will start with activities whose start date is after
specified value. (UTC)
:type after: datetime.datetime or str or None
:param limit: How many maximum activities to return.
:type limit: int or None
:return: An iterator of :class:`stravalib.model.Activity` objects.
:rtype: :class:`BatchedResultsIterator` | 4.013159 | 3.346936 | 1.199054 |
if athlete_id is None:
raw = self.protocol.get('/athlete')
else:
raise NotImplementedError("The /athletes/{id} endpoint was removed by Strava. "
"See https://developers.strava.com/docs/january-2018-update/")
# raw = self.protocol.get('/athletes/{athlete_id}', athlete_id=athlete_id)
return model.Athlete.deserialize(raw, bind_client=self) | def get_athlete(self, athlete_id=None) | Gets the specified athlete; if athlete_id is None then retrieves a
detail-level representation of currently authenticated athlete;
otherwise summary-level representation returned of athlete.
http://strava.github.io/api/v3/athlete/#get-details
http://strava.github.io/api/v3/athlete/#get-another-details
:return: The athlete model object.
:rtype: :class:`stravalib.model.Athlete` | 4.792219 | 4.492564 | 1.0667 |
if athlete_id is None:
result_fetcher = functools.partial(self.protocol.get, '/athlete/friends')
else:
raise NotImplementedError("The /athletes/{id}/friends endpoint was removed by Strava. "
"See https://developers.strava.com/docs/january-2018-update/")
# result_fetcher = functools.partial(self.protocol.get,
# '/athletes/{id}/friends',
# id=athlete_id)
return BatchedResultsIterator(entity=model.Athlete,
bind_client=self,
result_fetcher=result_fetcher,
limit=limit) | def get_athlete_friends(self, athlete_id=None, limit=None) | Gets friends for current (or specified) athlete.
http://strava.github.io/api/v3/follow/#friends
:param: athlete_id
:type: athlete_id: int
:param limit: Maximum number of athletes to return (default unlimited).
:type limit: int
:return: An iterator of :class:`stravalib.model.Athlete` objects.
:rtype: :class:`BatchedResultsIterator` | 4.249773 | 3.803138 | 1.117438 |
params = {'city': city,
'state': state,
'country': country,
'sex': sex}
params = {k: v for (k, v) in params.items() if v is not None}
if weight is not None:
params['weight'] = float(weight)
raw_athlete = self.protocol.put('/athlete', **params)
return model.Athlete.deserialize(raw_athlete, bind_client=self) | def update_athlete(self, city=None, state=None, country=None, sex=None, weight=None) | Updates the properties of the authorized athlete.
http://strava.github.io/api/v3/athlete/#update
:param city: City the athlete lives in
:param state: State the athlete lives in
:param country: Country the athlete lives in
:param sex: Sex of the athlete
:param weight: Weight of the athlete in kg (float)
:return: The updated athlete
:rtype: :class:`stravalib.model.Athlete` | 2.833462 | 3.039382 | 0.932249 |
result_fetcher = functools.partial(self.protocol.get,
'/athletes/{id}/koms',
id=athlete_id)
return BatchedResultsIterator(entity=model.SegmentEffort,
bind_client=self,
result_fetcher=result_fetcher,
limit=limit) | def get_athlete_koms(self, athlete_id, limit=None) | Gets Q/KOMs/CRs for specified athlete.
KOMs are returned as `stravalib.model.SegmentEffort` objects.
http://strava.github.io/api/v3/athlete/#koms
:param athlete_id: The ID of the athlete.
:type athlete_id: int
:param limit: Maximum number of KOM segment efforts to return (default unlimited).
:type limit: int
:return: An iterator of :class:`stravalib.model.SegmentEffort` objects.
:rtype: :class:`BatchedResultsIterator` | 7.696207 | 5.345566 | 1.439737 |
if athlete_id is None:
athlete_id = self.get_athlete().id
raw = self.protocol.get('/athletes/{id}/stats', id=athlete_id)
# TODO: Better error handling - this will return a 401 if this athlete
# is not the authenticated athlete.
return model.AthleteStats.deserialize(raw) | def get_athlete_stats(self, athlete_id=None) | Returns Statistics for the athlete.
athlete_id must be the id of the authenticated athlete or left blank.
If it is left blank two requests will be made - first to get the
authenticated athlete's id and second to get the Stats.
http://strava.github.io/api/v3/athlete/#stats
:return: A model containing the Stats
:rtype: :py:class:`stravalib.model.AthleteStats` | 4.591566 | 4.327211 | 1.061091 |
club_structs = self.protocol.get('/athlete/clubs')
return [model.Club.deserialize(raw, bind_client=self) for raw in club_structs] | def get_athlete_clubs(self) | List the clubs for the currently authenticated athlete.
http://strava.github.io/api/v3/clubs/#get-athletes
:return: A list of :class:`stravalib.model.Club`
:rtype: :py:class:`list` | 10.388772 | 10.995307 | 0.944837 |
raw = self.protocol.get("/clubs/{id}", id=club_id)
return model.Club.deserialize(raw, bind_client=self) | def get_club(self, club_id) | Return a specific club object.
http://strava.github.io/api/v3/clubs/#get-details
:param club_id: The ID of the club to fetch.
:type club_id: int
:rtype: :class:`stravalib.model.Club` | 9.976069 | 9.68963 | 1.029561 |
result_fetcher = functools.partial(self.protocol.get,
'/clubs/{id}/members',
id=club_id)
return BatchedResultsIterator(entity=model.Athlete, bind_client=self,
result_fetcher=result_fetcher, limit=limit) | def get_club_members(self, club_id, limit=None) | Gets the member objects for specified club ID.
http://strava.github.io/api/v3/clubs/#get-members
:param club_id: The numeric ID for the club.
:type club_id: int
:param limit: Maximum number of athletes to return. (default unlimited)
:type limit: int
:return: An iterator of :class:`stravalib.model.Athlete` objects.
:rtype: :class:`BatchedResultsIterator` | 8.407222 | 6.389548 | 1.315777 |
result_fetcher = functools.partial(self.protocol.get,
'/clubs/{id}/activities',
id=club_id)
return BatchedResultsIterator(entity=model.Activity, bind_client=self,
result_fetcher=result_fetcher, limit=limit) | def get_club_activities(self, club_id, limit=None) | Gets the activities associated with specified club.
http://strava.github.io/api/v3/clubs/#get-activities
:param club_id: The numeric ID for the club.
:type club_id: int
:param limit: Maximum number of activities to return. (default unlimited)
:type limit: int
:return: An iterator of :class:`stravalib.model.Activity` objects.
:rtype: :class:`BatchedResultsIterator` | 7.996587 | 6.731323 | 1.187967 |
raw = self.protocol.get('/activities/{id}', id=activity_id,
include_all_efforts=include_all_efforts)
return model.Activity.deserialize(raw, bind_client=self) | def get_activity(self, activity_id, include_all_efforts=False) | Gets specified activity.
Will be detail-level if owned by authenticated user; otherwise summary-level.
http://strava.github.io/api/v3/activities/#get-details
:param activity_id: The ID of activity to fetch.
:type activity_id: int
:param inclue_all_efforts: Whether to include segment efforts - only
available to the owner of the activty.
:type include_all_efforts: bool
:rtype: :class:`stravalib.model.Activity` | 4.852388 | 5.015071 | 0.967561 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.