code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
def encrypt_and_hash(self, plaintext: bytes) -> bytes: ciphertext = self.cipher_state.encrypt_with_ad(self.h, plaintext) self.mix_hash(ciphertext) return ciphertext
Sets ciphertext = EncryptWithAd(h, plaintext), calls MixHash(ciphertext), and returns ciphertext. Note that if k is empty, the EncryptWithAd() call will set ciphertext equal to plaintext. :param plaintext: bytes sequence :return: ciphertext bytes sequence
null
null
null
def decrypt_and_hash(self, ciphertext: bytes) -> bytes: plaintext = self.cipher_state.decrypt_with_ad(self.h, ciphertext) self.mix_hash(ciphertext) return plaintext
Sets plaintext = DecryptWithAd(h, ciphertext), calls MixHash(ciphertext), and returns plaintext. Note that if k is empty, the DecryptWithAd() call will set plaintext equal to ciphertext. :param ciphertext: bytes sequence :return: plaintext bytes sequence
null
null
null
def split(self): # Sets temp_k1, temp_k2 = HKDF(ck, b'', 2). temp_k1, temp_k2 = self.noise_protocol.hkdf(self.ck, b'', 2) # If HASHLEN is 64, then truncates temp_k1 and temp_k2 to 32 bytes. if self.noise_protocol.hash_fn.hashlen == 64: temp_k1 = temp_k1[:32] temp_k2 = temp_k2[:32] # Creates two new CipherState objects c1 and c2. # Calls c1.InitializeKey(temp_k1) and c2.InitializeKey(temp_k2). c1, c2 = CipherState(self.noise_protocol), CipherState(self.noise_protocol) c1.initialize_key(temp_k1) c2.initialize_key(temp_k2) if self.noise_protocol.handshake_state.initiator: self.noise_protocol.cipher_state_encrypt = c1 self.noise_protocol.cipher_state_decrypt = c2 else: self.noise_protocol.cipher_state_encrypt = c2 self.noise_protocol.cipher_state_decrypt = c1 self.noise_protocol.handshake_done() # Returns the pair (c1, c2). return c1, c2
Returns a pair of CipherState objects for encrypting/decrypting transport messages. :return: tuple (CipherState, CipherState)
null
null
null
def initialize(cls, noise_protocol: 'NoiseProtocol', initiator: bool, prologue: bytes=b'', s: '_KeyPair'=None, e: '_KeyPair'=None, rs: '_KeyPair'=None, re: '_KeyPair'=None) -> 'HandshakeState': # Create HandshakeState instance = cls() instance.noise_protocol = noise_protocol # Originally in specification: # "Derives a protocol_name byte sequence by combining the names for # the handshake pattern and crypto functions, as specified in Section 8." # Instead, we supply the NoiseProtocol to the function. The protocol name should already be validated. # Calls InitializeSymmetric(noise_protocol) instance.symmetric_state = SymmetricState.initialize_symmetric(noise_protocol) # Calls MixHash(prologue) instance.symmetric_state.mix_hash(prologue) # Sets the initiator, s, e, rs, and re variables to the corresponding arguments instance.initiator = initiator instance.s = s if s is not None else Empty() instance.e = e if e is not None else Empty() instance.rs = rs if rs is not None else Empty() instance.re = re if re is not None else Empty() # Calls MixHash() once for each public key listed in the pre-messages from handshake_pattern, with the specified # public key as input (...). If both initiator and responder have pre-messages, the initiator’s public keys are # hashed first initiator_keypair_getter = instance._get_local_keypair if initiator else instance._get_remote_keypair responder_keypair_getter = instance._get_remote_keypair if initiator else instance._get_local_keypair for keypair in map(initiator_keypair_getter, noise_protocol.pattern.get_initiator_pre_messages()): instance.symmetric_state.mix_hash(keypair.public_bytes) for keypair in map(responder_keypair_getter, noise_protocol.pattern.get_responder_pre_messages()): instance.symmetric_state.mix_hash(keypair.public_bytes) # Sets message_patterns to the message patterns from handshake_pattern instance.message_patterns = noise_protocol.pattern.tokens.copy() return instance
Constructor method. Comments below are mostly copied from specification. Instead of taking handshake_pattern as an argument, we take full NoiseProtocol object, that way we have access to protocol name and crypto functions :param noise_protocol: a valid NoiseProtocol instance :param initiator: boolean indicating the initiator or responder role :param prologue: byte sequence which may be zero-length, or which may contain context information that both parties want to confirm is identical :param s: local static key pair :param e: local ephemeral key pair :param rs: remote party’s static public key :param re: remote party’s ephemeral public key :return: initialized HandshakeState instance
null
null
null
def write_message(self, payload: Union[bytes, bytearray], message_buffer: bytearray): # Fetches and deletes the next message pattern from message_patterns, then sequentially processes each token # from the message pattern message_pattern = self.message_patterns.pop(0) for token in message_pattern: if token == TOKEN_E: # Sets e = GENERATE_KEYPAIR(). Appends e.public_key to the buffer. Calls MixHash(e.public_key) self.e = self.noise_protocol.dh_fn.generate_keypair() if isinstance(self.e, Empty) else self.e message_buffer += self.e.public_bytes self.symmetric_state.mix_hash(self.e.public_bytes) if self.noise_protocol.is_psk_handshake: self.symmetric_state.mix_key(self.e.public_bytes) elif token == TOKEN_S: # Appends EncryptAndHash(s.public_key) to the buffer message_buffer += self.symmetric_state.encrypt_and_hash(self.s.public_bytes) elif token == TOKEN_EE: # Calls MixKey(DH(e, re)) self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.e.private, self.re.public)) elif token == TOKEN_ES: # Calls MixKey(DH(e, rs)) if initiator, MixKey(DH(s, re)) if responder if self.initiator: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.e.private, self.rs.public)) else: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.s.private, self.re.public)) elif token == TOKEN_SE: # Calls MixKey(DH(s, re)) if initiator, MixKey(DH(e, rs)) if responder if self.initiator: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.s.private, self.re.public)) else: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.e.private, self.rs.public)) elif token == TOKEN_SS: # Calls MixKey(DH(s, rs)) self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.s.private, self.rs.public)) elif token == TOKEN_PSK: self.symmetric_state.mix_key_and_hash(self.noise_protocol.psks.pop(0)) else: raise NotImplementedError('Pattern token: {}'.format(token)) # Appends EncryptAndHash(payload) to the buffer message_buffer += self.symmetric_state.encrypt_and_hash(payload) # If there are no more message patterns returns two new CipherState objects by calling Split() if len(self.message_patterns) == 0: return self.symmetric_state.split()
Comments below are mostly copied from specification. :param payload: byte sequence which may be zero-length :param message_buffer: buffer-like object :return: None or result of SymmetricState.split() - tuple (CipherState, CipherState)
null
null
null
def read_message(self, message: Union[bytes, bytearray], payload_buffer: bytearray): # Fetches and deletes the next message pattern from message_patterns, then sequentially processes each token # from the message pattern dhlen = self.noise_protocol.dh_fn.dhlen message_pattern = self.message_patterns.pop(0) for token in message_pattern: if token == TOKEN_E: # Sets re to the next DHLEN bytes from the message. Calls MixHash(re.public_key). self.re = self.noise_protocol.keypair_class.from_public_bytes(bytes(message[:dhlen])) message = message[dhlen:] self.symmetric_state.mix_hash(self.re.public_bytes) if self.noise_protocol.is_psk_handshake: self.symmetric_state.mix_key(self.re.public_bytes) elif token == TOKEN_S: # Sets temp to the next DHLEN + 16 bytes of the message if HasKey() == True, or to the next DHLEN bytes # otherwise. Sets rs to DecryptAndHash(temp). if self.noise_protocol.cipher_state_handshake.has_key(): temp = bytes(message[:dhlen + 16]) message = message[dhlen + 16:] else: temp = bytes(message[:dhlen]) message = message[dhlen:] self.rs = self.noise_protocol.keypair_class.from_public_bytes( self.symmetric_state.decrypt_and_hash(temp) ) elif token == TOKEN_EE: # Calls MixKey(DH(e, re)). self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.e.private, self.re.public)) elif token == TOKEN_ES: # Calls MixKey(DH(e, rs)) if initiator, MixKey(DH(s, re)) if responder if self.initiator: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.e.private, self.rs.public)) else: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.s.private, self.re.public)) elif token == TOKEN_SE: # Calls MixKey(DH(s, re)) if initiator, MixKey(DH(e, rs)) if responder if self.initiator: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.s.private, self.re.public)) else: self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.e.private, self.rs.public)) elif token == TOKEN_SS: # Calls MixKey(DH(s, rs)) self.symmetric_state.mix_key(self.noise_protocol.dh_fn.dh(self.s.private, self.rs.public)) elif token == TOKEN_PSK: self.symmetric_state.mix_key_and_hash(self.noise_protocol.psks.pop(0)) else: raise NotImplementedError('Pattern token: {}'.format(token)) # Calls DecryptAndHash() on the remaining bytes of the message and stores the output into payload_buffer. payload_buffer += self.symmetric_state.decrypt_and_hash(bytes(message)) # If there are no more message patterns returns two new CipherState objects by calling Split() if len(self.message_patterns) == 0: return self.symmetric_state.split()
Comments below are mostly copied from specification. :param message: byte sequence containing a Noise handshake message :param payload_buffer: buffer-like object :return: None or result of SymmetricState.split() - tuple (CipherState, CipherState)
null
null
null
words = path.split('_') return words[0] + ''.join(word.title() for word in words[1:])
def mixedcase(path)
Removes underscores and capitalizes the neighbouring character
3.033369
2.888278
1.050234
display_log = self.debug if debug is not None: display_log = debug if display_log: print(message.format(**kwargs))
def _log(self, message, debug=None, **kwargs)
Outputs a formatted message in the console if the debug mode is activated. :param message: The message that will be printed :param debug: (optional) Overwrite of `Client.debug` :param kwargs: (optional) Arguments that will be passed to the `str.format()` method
4.011471
3.724406
1.077077
try: return self.session.request(*args, **kwargs) except ConnectionError: self.session.close() return self.session.request(*args, **kwargs)
def send_request(self, *args, **kwargs)
Wrapper for session.request Handle connection reset error even from pyopenssl
2.4492
2.281574
1.07347
if len(parts) != 0: # the chain will be extended with the parts and finally a # request will be triggered return self.__call__(*parts).request(method=method, **options) else: if 'url' not in options: # the last part constructs the URL options['url'] = self.url() for key, value in six.iteritems(self.config): # set the defaults in the options if value is not None: if isinstance(value, dict): # prevents overwriting default values in dicts copy = value.copy() if options.get(key): copy.update(options[key]) options[key] = copy else: options.setdefault(key, value) # at this point, we're ready to completely go down the chain return self._parent.request(method=method, **options)
def request(self, method, *parts, **options)
Requests a URL and returns a *Bunched* response. This method basically wraps the request method of the requests module and adds a `path` and `debug` option. :param method: The request method, e.g. 'get', 'post', etc. :param parts: (optional) Additional path parts to append to the URL :param options: (optional) Arguments that will be passed to the `requests.request` method :return: :class:`Bunch` object from JSON-parsed response
5.17371
5.346091
0.967756
path = urlsplit(self.target).path suffix = '/' if not path or path.endswith('/') else '' return '%s%s/%s%s' % (self._ui_address[:-1], self._proxy_prefix, self.route, suffix)
def address(self)
The full proxied address to this page
8.144258
6.886326
1.182671
req = proto.Proxy(route=route, target=target, link_name=link_name) self._client._call('AddProxy', req) return ProxiedPage(route, target, link_name, self.address, self.proxy_prefix)
def add_page(self, route, target, link_name=None)
Add a new proxied page to the Web UI. Parameters ---------- route : str The route for the proxied page. Must be a valid path *segment* in a url (e.g. ``foo`` in ``/foo/bar/baz``). Routes must be unique across the application. target : str The target address to be proxied to this page. Must be a valid url. link_name : str, optional If provided, will be the link text used in the Web UI. If not provided, the page will still be proxied, but no link will be added to the Web UI. Link names must be unique across the application. Returns ------- ProxiedPage
7.041178
6.997691
1.006214
req = proto.RemoveProxyRequest(route=route) self._client._call('RemoveProxy', req)
def remove_page(self, route)
Remove a proxied page from the Web UI. Parameters ---------- route : str The route for the proxied page. Must be a valid path *segment* in a url (e.g. ``foo`` in ``/foo/bar/baz``). Routes must be unique across the application.
14.178597
17.097296
0.829289
resp = self._client._call('GetProxies', proto.GetProxiesRequest()) return {i.route: ProxiedPage(i.route, i.target, i.link_name if i.link_name else None, self.address, self.proxy_prefix) for i in resp.proxy}
def get_pages(self)
Get all registered pages. Returns ------- pages : dict A ``dict`` of ``route`` to ``ProxiedPage`` for all pages.
8.579238
6.666468
1.286924
print("Connecting to server at %s:%d" % (host, port)) reader, writer = await asyncio.open_connection(host, port, loop=loop) writer.write(message.encode()) print('Sent: %r' % message) data = await reader.read(100) print('Received: %r' % data.decode()) writer.close()
async def tcp_echo_client(message, loop, host, port)
Generic python tcp echo client
1.952459
1.81842
1.073712
# Loop through all registered server addresses for address in app.kv.get_prefix('address.').values(): # Parse the host and port from the stored address host, port = address.decode().split(':') port = int(port) # Send the message to the echo server await tcp_echo_client(message, loop, host, port)
async def echo_all(app, message)
Send and recieve a message from all running echo servers
5.683684
5.120407
1.110006
address, _ = _read_driver() if address is None: raise DriverNotRunningError("No driver currently running") security = Security.from_default() return Client(address=address, security=security)
def from_global_driver(self)
Connect to the global driver.
10.213496
8.800984
1.160495
address, pid = _read_driver() if address is not None: try: Client(address=address) return address except ConnectionError: if pid_exists(pid): # PID exists, but we can't connect, reraise raise # PID doesn't exist, warn and continue as normal context.warn("Previous driver at %s, PID %d has died. Restarting." % (address, pid)) address, _ = _start_driver(set_global=True, keytab=keytab, principal=principal, log=log, log_level=log_level, java_options=java_options) return address
def start_global_driver(keytab=None, principal=None, log=None, log_level=None, java_options=None)
Start the global driver. No-op if the global driver is already running. Parameters ---------- keytab : str, optional Path to a keytab file to use when starting the driver. If not provided, the driver will login using the ticket cache instead. principal : str, optional The principal to use when starting the driver with a keytab. log : str, bool, or None, optional Sets the logging behavior for the driver. Values may be a path for logs to be written to, ``None`` to log to stdout/stderr, or ``False`` to turn off logging completely. Default is ``None``. log_level : str or skein.model.LogLevel, optional The driver log level. Sets the ``skein.log.level`` system property. One of {'ALL', 'TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL', 'OFF'} (from most to least verbose). Default is 'INFO'. java_options : str or list of str, optional Additional Java options to forward to the driver. Can also be configured by setting the environment variable ``SKEIN_DRIVER_JAVA_OPTIONS``. Returns ------- address : str The address of the driver
4.441958
4.649654
0.955331
address, pid = _read_driver() if address is None: return if not force: # Attempt to connect first, errors on failure try: Client(address=address) except ConnectionError: if pid_exists(pid): # PID exists, but we can't connect, reraise raise # PID doesn't exist, continue cleanup as normal try: os.kill(pid, signal.SIGTERM) except OSError as exc: # If we're forcing a kill, ignore EPERM as well, as we're not sure # if the process is a driver. ignore = (errno.ESRCH, errno.EPERM) if force else (errno.ESRCH,) if exc.errno not in ignore: # pragma: no cover raise try: os.remove(os.path.join(properties.config_dir, 'driver')) except OSError: # pragma: no cover pass
def stop_global_driver(force=False)
Stops the global driver if running. No-op if no global driver is running. Parameters ---------- force : bool, optional By default skein will check that the process associated with the driver PID is actually a skein driver. Setting ``force`` to ``True`` will kill the process in all cases.
4.433283
4.434514
0.999722
if self._proc is not None: self._proc.stdin.close() self._proc.wait()
def close(self)
Closes the java driver if started by this client. No-op otherwise.
3.73725
3.076779
1.214663
spec = ApplicationSpec._from_any(spec) resp = self._call('submit', spec.to_protobuf()) return resp.id
def submit(self, spec)
Submit a new skein application. Parameters ---------- spec : ApplicationSpec, str, or dict A description of the application to run. Can be an ``ApplicationSpec`` object, a path to a yaml/json file, or a dictionary description of an application specification. Returns ------- app_id : str The id of the submitted application.
10.923965
12.322774
0.886486
spec = ApplicationSpec._from_any(spec) app_id = self.submit(spec) try: return self.connect(app_id, security=spec.master.security) except BaseException: self.kill_application(app_id) raise
def submit_and_connect(self, spec)
Submit a new skein application, and wait to connect to it. If an error occurs before the application connects, the application is killed. Parameters ---------- spec : ApplicationSpec, str, or dict A description of the application to run. Can be an ``ApplicationSpec`` object, a path to a yaml/json file, or a dictionary description of an application specification. Returns ------- app_client : ApplicationClient
5.218
4.723303
1.104736
if wait: resp = self._call('waitForStart', proto.Application(id=app_id)) else: resp = self._call('getStatus', proto.Application(id=app_id)) report = ApplicationReport.from_protobuf(resp) if report.state is not ApplicationState.RUNNING: raise ApplicationNotRunningError( "%s is not running. Application state: " "%s" % (app_id, report.state)) if security is None: security = self.security return ApplicationClient('%s:%d' % (report.host, report.port), app_id, security=security)
def connect(self, app_id, wait=True, security=None)
Connect to a running application. Parameters ---------- app_id : str The id of the application. wait : bool, optional If true [default], blocks until the application starts. If False, will raise a ``ApplicationNotRunningError`` immediately if the application isn't running. security : Security, optional The security configuration to use to communicate with the application master. Defaults to the global configuration. Returns ------- app_client : ApplicationClient Raises ------ ApplicationNotRunningError If the application isn't running.
3.432454
3.323045
1.032924
if states is not None: states = tuple(ApplicationState(s) for s in states) else: states = (ApplicationState.SUBMITTED, ApplicationState.ACCEPTED, ApplicationState.RUNNING) started_begin = self._parse_datetime(started_begin, 'started_begin') started_end = self._parse_datetime(started_end, 'started_end') finished_begin = self._parse_datetime(finished_begin, 'finished_begin') finished_end = self._parse_datetime(finished_end, 'finished_end') req = proto.ApplicationsRequest( states=[str(s) for s in states], name=name, user=user, queue=queue, started_begin=datetime_to_millis(started_begin), started_end=datetime_to_millis(started_end), finished_begin=datetime_to_millis(finished_begin), finished_end=datetime_to_millis(finished_end) ) resp = self._call('getApplications', req) return sorted((ApplicationReport.from_protobuf(r) for r in resp.reports), key=lambda x: x.id)
def get_applications(self, states=None, name=None, user=None, queue=None, started_begin=None, started_end=None, finished_begin=None, finished_end=None)
Get the status of current skein applications. Parameters ---------- states : sequence of ApplicationState, optional If provided, applications will be filtered to these application states. Default is ``['SUBMITTED', 'ACCEPTED', 'RUNNING']``. name : str, optional Only select applications with this name. user : str, optional Only select applications with this user. queue : str, optional Only select applications in this queue. started_begin : datetime or str, optional Only select applications that started after this time (inclusive). Can be either a datetime or a string representation of one. String representations can use any of the following formats: - ``YYYY-M-D H:M:S`` (e.g. 2019-4-10 14:50:20) - ``YYYY-M-D H:M`` (e.g. 2019-4-10 14:50) - ``YYYY-M-D`` (e.g. 2019-4-10) - ``H:M:S`` (e.g. 14:50:20, today is used for date) - ``H:M`` (e.g. 14:50, today is used for date) started_end : datetime or str, optional Only select applications that started before this time (inclusive). Can be either a datetime or a string representation of one. finished_begin : datetime or str, optional Only select applications that finished after this time (inclusive). Can be either a datetime or a string representation of one. finished_end : datetime or str, optional Only select applications that finished before this time (inclusive). Can be either a datetime or a string representation of one. Returns ------- reports : list of ApplicationReport Examples -------- Get all the finished and failed applications >>> client.get_applications(states=['FINISHED', 'FAILED']) [ApplicationReport<name='demo'>, ApplicationReport<name='dask'>, ApplicationReport<name='demo'>] Get all applications named 'demo' started after 2019-4-10: >>> client.get_applications(name='demo', started_begin='2019-4-10') [ApplicationReport<name='demo'>, ApplicationReport<name='demo'>]
1.870192
1.985252
0.942043
if states is not None: states = tuple(NodeState(s) for s in states) else: states = () req = proto.NodesRequest(states=[str(s) for s in states]) resp = self._call('getNodes', req) return sorted((NodeReport.from_protobuf(r) for r in resp.reports), key=lambda x: x.id)
def get_nodes(self, states=None)
Get the status of nodes in the cluster. Parameters ---------- states : sequence of NodeState, optional If provided, nodes will be filtered to these node states. Default is all states. Returns ------- reports : list of NodeReport Examples -------- Get all the running nodes >>> client.get_nodes(states=['RUNNING']) [NodeReport<id='worker1.example.com:34721'>, NodeReport<id='worker2.example.com:34721'>]
4.106796
4.339851
0.946299
req = proto.QueueRequest(name=name) resp = self._call('getQueue', req) return Queue.from_protobuf(resp)
def get_queue(self, name)
Get information about a queue. Parameters ---------- name : str The queue name. Returns ------- queue : Queue Examples -------- >>> client.get_queue('myqueue') Queue<name='myqueue', percent_used=5.00>
5.57489
7.640421
0.729657
req = proto.QueueRequest(name=name) resp = self._call('getChildQueues', req) return [Queue.from_protobuf(q) for q in resp.queues]
def get_child_queues(self, name)
Get information about all children of a parent queue. Parameters ---------- name : str The parent queue name. Returns ------- queues : list of Queue Examples -------- >>> client.get_child_queues('myqueue') [Queue<name='child1', percent_used=10.00>, Queue<name='child2', percent_used=0.00>]
4.647468
6.380975
0.728332
resp = self._call('getAllQueues', proto.Empty()) return [Queue.from_protobuf(q) for q in resp.queues]
def get_all_queues(self)
Get information about all queues in the cluster. Returns ------- queues : list of Queue Examples -------- >>> client.get_all_queues() [Queue<name='default', percent_used=0.00>, Queue<name='myqueue', percent_used=5.00>, Queue<name='child1', percent_used=10.00>, Queue<name='child2', percent_used=0.00>]
8.135624
11.738522
0.693071
resp = self._call('getStatus', proto.Application(id=app_id)) return ApplicationReport.from_protobuf(resp)
def application_report(self, app_id)
Get a report on the status of a skein application. Parameters ---------- app_id : str The id of the application. Returns ------- report : ApplicationReport Examples -------- >>> client.application_report('application_1526134340424_0012') ApplicationReport<name='demo'>
9.971592
14.745194
0.67626
self._call('moveApplication', proto.MoveRequest(id=app_id, queue=queue))
def move_application(self, app_id, queue)
Move an application to a different queue. Parameters ---------- app_id : str The id of the application to move. queue : str The queue to move the application to.
8.726703
9.357815
0.932558
self._call('kill', proto.KillRequest(id=app_id, user=user))
def kill_application(self, app_id, user="")
Kill an application. Parameters ---------- app_id : str The id of the application to kill. user : str, optional The user to kill the application as. Requires the current user to have permissions to proxy as ``user``. Default is the current user.
8.820317
8.985387
0.981629
req = proto.ShutdownRequest(final_status=str(FinalStatus(status)), diagnostics=diagnostics) self._call('shutdown', req)
def shutdown(self, status='SUCCEEDED', diagnostics=None)
Shutdown the application. Stop all running containers and shutdown the application. Parameters ---------- status : FinalStatus, optional The final application status. Default is 'SUCCEEDED'. diagnostics : str, optional The application exit message, usually used for diagnosing failures. Can be seen in the YARN Web UI for completed applications under "diagnostics", as well as the ``diagnostic`` field of ``ApplicationReport`` objects. If not provided, a default will be used.
8.093008
8.24467
0.981605
resp = self._call('getApplicationSpec', proto.Empty()) return ApplicationSpec.from_protobuf(resp)
def get_specification(self)
Get the specification for the running application. Returns ------- spec : ApplicationSpec
19.130173
11.670379
1.639208
if 'instances' in kwargs: count = kwargs.pop('instances') warnings.warn("instances is deprecated, use count instead") assert not kwargs if count is not None and delta is not None: raise context.ValueError("cannot specify both `count` and `delta`") elif count is None and delta is None: raise context.ValueError("must specify either `count` or `delta`") if count and count < 0: raise context.ValueError("count must be >= 0") req = proto.ScaleRequest(service_name=service, count=count, delta=delta) resp = self._call('scale', req) return [Container.from_protobuf(c) for c in resp.containers]
def scale(self, service, count=None, delta=None, **kwargs)
Scale a service to a requested number of instances. Adds or removes containers to match the requested number of instances. The number of instances for the service can be specified either as a total count or a delta in that count. When choosing which containers to remove, containers are removed in order of state (``WAITING``, ``REQUESTED``, ``RUNNING``) followed by age (oldest to newest). When specified as a negative ``delta``, if the number of removed containers is greater than the number of existing containers, all containers are removed rather than throwing an error. This means that ``app.scale(delta=-1)`` will remove a container if one exists, otherwise it will do nothing. Parameters ---------- service : str The service to scale. count : int, optional The number of instances to scale to. delta : int, optional The change in number of instances. Returns ------- containers : list of Container A list of containers that were started or stopped.
2.789537
2.844409
0.980709
if not (0 <= progress <= 1.0): raise ValueError("progress must be between 0 and 1, got %.3f" % progress) self._call('SetProgress', proto.SetProgressRequest(progress=progress))
def set_progress(self, progress)
Update the progress for this application. For applications processing a fixed set of work it may be useful for diagnostics to set the progress as the application processes. Progress indicates job progression, and must be a float between 0 and 1. By default the progress is set at 0.1 for its duration, which is a good default value for applications that don't know their progress, (e.g. interactive applications). Parameters ---------- progress : float The application progress, must be a value between 0 and 1.
4.599558
6.114134
0.752283
if properties.application_id is None: raise context.ValueError("Not running inside a container") return cls(properties.appmaster_address, properties.application_id, security=Security.from_default())
def from_current(cls)
Create an application client from within a running container. Useful for connecting to the application master from a running container in a application.
13.938646
10.046567
1.387404
if services is not None: services = set(services) if states is not None: states = [str(ContainerState(s)) for s in states] req = proto.ContainersRequest(services=services, states=states) resp = self._call('getContainers', req) return sorted((Container.from_protobuf(c) for c in resp.containers), key=lambda x: (x.service_name, x.instance))
def get_containers(self, services=None, states=None)
Get information on containers in this application. Parameters ---------- services : sequence of str, optional If provided, containers will be filtered to these services. Default is all services. states : sequence of ContainerState, optional If provided, containers will be filtered by these container states. Default is ``['WAITING', 'REQUESTED', 'RUNNING']``. Returns ------- containers : list of Container
3.163641
3.32746
0.950768
if not isinstance(msg, cls._protobuf_cls): raise TypeError("Expected message of type " "%r" % cls._protobuf_cls.__name__) kwargs = {k: getattr(msg, k) for k in cls._get_params()} return cls(**kwargs)
def from_protobuf(cls, msg)
Create an instance from a protobuf message.
3.45282
3.2508
1.062145
self._validate() kwargs = {k: _convert(getattr(self, k), 'to_protobuf') for k in self._get_params()} return self._protobuf_cls(**kwargs)
def to_protobuf(self)
Convert object to a protobuf message
6.109772
5.246572
1.164526
self._validate() out = {} for k in self._get_params(): val = getattr(self, k) if not skip_nulls or val is not None: out[k] = _convert(val, 'to_dict', skip_nulls) return out
def to_dict(self, skip_nulls=True)
Convert object to a dict
3.214175
3.040621
1.057078
return json.dumps(self.to_dict(skip_nulls=skip_nulls))
def to_json(self, skip_nulls=True)
Convert object to a json string
2.68047
2.398128
1.117734
return yaml.safe_dump(self.to_dict(skip_nulls=skip_nulls), default_flow_style=False)
def to_yaml(self, skip_nulls=True)
Convert object to a yaml string
2.32149
2.164818
1.072372
source = AjaxDataSource(data_url='./data', polling_interval=INTERVAL, method='GET') # OHLC plot p = figure(plot_height=400, title='OHLC', sizing_mode='scale_width', tools="xpan,xwheel_zoom,xbox_zoom,reset", x_axis_type=None, y_axis_location="right", y_axis_label="Price ($)") p.x_range.follow = "end" p.x_range.follow_interval = 100 p.x_range.range_padding = 0 p.line(x='time', y='average', alpha=0.25, line_width=3, color='black', source=source) p.line(x='time', y='ma', alpha=0.8, line_width=2, color='steelblue', source=source) p.segment(x0='time', y0='low', x1='time', y1='high', line_width=2, color='black', source=source) p.segment(x0='time', y0='open', x1='time', y1='close', line_width=8, color='color', source=source, alpha=0.8) # MACD plot p2 = figure(plot_height=200, title='MACD', sizing_mode='scale_width', x_range=p.x_range, x_axis_label='Time (s)', tools="xpan,xwheel_zoom,xbox_zoom,reset", y_axis_location="right") p2.line(x='time', y='macd', color='darkred', line_width=2, source=source) p2.line(x='time', y='macd9', color='navy', line_width=2, source=source) p2.segment(x0='time', y0=0, x1='time', y1='macdh', line_width=6, color='steelblue', alpha=0.5, source=source) # Combine plots together plot = gridplot([[p], [p2]], toolbar_location="left", plot_width=1000) # Compose html from plots and template script, div = components(plot, theme=theme) html = template.render(resources=CDN.render(), script=script, div=div) return html
def build_html()
Build the html, to be served by IndexHandler
2.26982
2.270463
0.999717
# Update the simulated pricing data self.t += 1000 / INTERVAL self.average *= np.random.lognormal(0, 0.04) high = self.average * np.exp(np.abs(np.random.gamma(1, 0.03))) low = self.average / np.exp(np.abs(np.random.gamma(1, 0.03))) delta = high - low open = low + delta * np.random.uniform(0.05, 0.95) close = low + delta * np.random.uniform(0.05, 0.95) color = "darkgreen" if open < close else "darkred" for k, point in [('time', self.t), ('average', self.average), ('open', open), ('high', high), ('low', low), ('close', close), ('color', color)]: self.data[k].append(point) ema12 = self._ema(self.data['close'], self.kernel12) ema26 = self._ema(self.data['close'], self.kernel26) macd = ema12 - ema26 self.data['ma'].append(ema12) self.data['macd'].append(macd) macd9 = self._ema(self.data['macd'], self.kernel9) self.data['macd9'].append(macd9) self.data['macdh'].append(macd - macd9)
def update(self)
Compute the next element in the stream, and update the plot data
2.720609
2.658941
1.023193
try: service, instance = id.rsplit('_', 1) instance = int(instance) except (TypeError, ValueError): raise context.ValueError("Invalid container id %r" % id) return _proto.ContainerInstance(service_name=service, instance=instance)
def container_instance_from_string(id)
Create a ContainerInstance from an id string
4.594527
4.682654
0.98118
if isinstance(s, integer): out = s elif isinstance(s, float): out = math_ceil(s) elif isinstance(s, string): s = s.replace(' ', '') if not s: raise context.ValueError("Could not interpret %r as a byte unit" % s) if s[0].isdigit(): for i, c in enumerate(reversed(s)): if not c.isalpha(): break index = len(s) - i prefix = s[:index] suffix = s[index:] try: n = float(prefix) except ValueError: raise context.ValueError("Could not interpret %r as a number" % prefix) else: n = 1 suffix = s try: multiplier = _byte_sizes[suffix.lower()] except KeyError: raise context.ValueError("Could not interpret %r as a byte unit" % suffix) out = math_ceil(n * multiplier / (2 ** 20)) else: raise context.TypeError("memory must be an integer, got %r" % type(s).__name__) if out < 0: raise context.ValueError("memory must be positive") return out
def parse_memory(s)
Converts bytes expression to number of mebibytes. If no unit is specified, ``MiB`` is used.
2.682465
2.606272
1.029234
from .core import properties # Are we in a container started by skein? if properties.application_id is not None: if properties.container_dir is not None: cert_path = os.path.join(properties.container_dir, '.skein.crt') key_path = os.path.join(properties.container_dir, '.skein.pem') if os.path.exists(cert_path) and os.path.exists(key_path): return Security(cert_file=cert_path, key_file=key_path) raise context.FileNotFoundError( "Failed to resolve .skein.{crt,pem} in 'LOCAL_DIRS'") # Try to load from config_dir, and fallback to minting new credentials try: return cls.from_directory(properties.config_dir) except FileNotFoundError: pass new = cls.new_credentials() try: out = new.to_directory(properties.config_dir) context.warn("Skein global security credentials not found, " "writing now to %r." % properties.config_dir) except FileExistsError: # Race condition between competing processes, use the credentials # written by the other process. out = cls.from_directory(properties.config_dir) return out
def from_default(cls)
The default security configuration. Usually this loads the credentials stored in the configuration directory (``~/.skein`` by default). If these credentials don't already exist, new ones will be created. When run in a YARN container started by Skein, this loads the same security credentials as used for the current application.
4.389029
3.906806
1.123431
cert_path = os.path.join(directory, 'skein.crt') key_path = os.path.join(directory, 'skein.pem') for path, name in [(cert_path, 'cert'), (key_path, 'key')]: if not os.path.exists(path): raise context.FileNotFoundError( "Security %s file not found at %r" % (name, path) ) return Security(cert_file=cert_path, key_file=key_path)
def from_directory(cls, directory)
Create a security object from a directory. Relies on standard names for each file (``skein.crt`` and ``skein.pem``).
2.76791
2.217401
1.248268
self._validate() # Create directory if it doesn't exist makedirs(directory, exist_ok=True) cert_path = os.path.join(directory, 'skein.crt') key_path = os.path.join(directory, 'skein.pem') cert_bytes = self._get_bytes('cert') key_bytes = self._get_bytes('key') lock_path = os.path.join(directory, 'skein.lock') with lock_file(lock_path): for path, name in [(cert_path, 'skein.crt'), (key_path, 'skein.pem')]: if os.path.exists(path): if force: os.unlink(path) else: msg = ("%r file already exists, use `%s` to overwrite" % (name, '--force' if context.is_cli else 'force')) raise context.FileExistsError(msg) flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL for path, data in [(cert_path, cert_bytes), (key_path, key_bytes)]: with os.fdopen(os.open(path, flags, 0o600), 'wb') as fil: fil.write(data) return Security(cert_file=cert_path, key_file=key_path)
def to_directory(self, directory, force=False)
Write this security object to a directory. Parameters ---------- directory : str The directory to write the configuration to. force : bool, optional If security credentials already exist at this location, an error will be raised by default. Set to True to overwrite existing files. Returns ------- security : Security A new security object backed by the written files.
2.286139
2.244578
1.018516
from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.x509.oid import NameOID key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend()) key_bytes = key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()) subject = issuer = x509.Name( [x509.NameAttribute(NameOID.COMMON_NAME, u'skein-internal')]) now = datetime.utcnow() cert = (x509.CertificateBuilder() .subject_name(subject) .issuer_name(issuer) .public_key(key.public_key()) .serial_number(x509.random_serial_number()) .not_valid_before(now) .not_valid_after(now + timedelta(days=365)) .sign(key, hashes.SHA256(), default_backend())) cert_bytes = cert.public_bytes(serialization.Encoding.PEM) return cls(cert_bytes=cert_bytes, key_bytes=key_bytes)
def new_credentials(cls)
Create a new Security object with a new certificate/key pair.
1.489094
1.426739
1.043704
_origin = _pop_origin(kwargs) if isinstance(obj, string): obj = {'source': obj} cls._check_keys(obj) if _origin: if 'source' not in obj: raise context.TypeError("parameter 'source' is required but " "wasn't provided") obj = dict(obj) obj['source'] = cls._normpath(obj['source'], _origin) return cls(**obj)
def from_dict(cls, obj, **kwargs)
Create an instance from a dict. Keys in the dict should match parameter names
5.330721
5.256688
1.014084
if isinstance(spec, str): spec = cls.from_file(spec) elif isinstance(spec, dict): spec = cls.from_dict(spec) elif not isinstance(spec, cls): raise context.TypeError("spec must be either an ApplicationSpec, " "path, or dict, got " "%s" % type(spec).__name__) return spec
def _from_any(cls, spec)
Generic creation method for all types accepted as ``spec``
3.466144
3.581379
0.967824
format = _infer_format(path, format=format) origin = os.path.abspath(os.path.dirname(path)) with open(path) as f: data = f.read() if format == 'json': obj = json.loads(data) else: obj = yaml.safe_load(data) return cls.from_dict(obj, _origin=origin)
def from_file(cls, path, format='infer')
Create an instance from a json or yaml file. Parameters ---------- path : str The path to the file to load. format : {'infer', 'json', 'yaml'}, optional The file format. By default the format is inferred from the file extension.
2.50913
3.040721
0.825176
format = _infer_format(path, format=format) data = getattr(self, 'to_' + format)(skip_nulls=skip_nulls) with open(path, mode='w') as f: f.write(data)
def to_file(self, path, format='infer', skip_nulls=True)
Write object to a file. Parameters ---------- path : str The path to the file to load. format : {'infer', 'json', 'yaml'}, optional The file format. By default the format is inferred from the file extension. skip_nulls : bool, optional By default null values are skipped in the output. Set to True to output all fields.
2.69445
3.527479
0.763846
with _paths_lock: lock = _paths_to_locks.get(path) if lock is None: _paths_to_locks[path] = lock = _FileLock(path) return lock
def lock_file(path)
File based lock on ``path``. Creates a file based lock. When acquired, other processes or threads are prevented from acquiring the same lock until it is released.
2.998017
3.792768
0.790456
if LooseVersion(GRPC_VERSION) < '1.18.0': key = 'GRPC_ENABLE_FORK_SUPPORT' try: os.environ[key] = '0' yield finally: del os.environ[key] else: yield
def grpc_fork_support_disabled()
Temporarily disable fork support in gRPC. Fork + exec has always been supported, but the recent fork handling code in gRPC (>= 1.15) results in extraneous error logs currently. For now we explicitly disable fork support for gRPC clients we create.
3.056603
3.233634
0.945253
secs = int(td.total_seconds()) hours, secs = divmod(secs, 60 * 60) mins, secs = divmod(secs, 60) if hours: return '%dh %dm' % (hours, mins) if mins: return '%dm' % mins return '%ds' % secs
def humanize_timedelta(td)
Pretty-print a timedelta in a human readable format.
1.886485
1.897089
0.994411
if x is None: return None if hasattr(x, 'timestamp'): # Python >= 3.3 secs = x.timestamp() elif x.tzinfo is None: # Timezone naive secs = (time.mktime((x.year, x.month, x.day, x.hour, x.minute, x.second, -1, -1, -1)) + x.microsecond / 1e6) else: # Timezone aware secs = (x - _EPOCH).total_seconds() return int(secs * 1000)
def datetime_to_millis(x)
Convert a `datetime.datetime` to milliseconds since the epoch
2.096676
2.05025
1.022644
rows = [tuple(str(i) for i in r) for r in rows] columns = tuple(str(i).upper() for i in columns) if rows: widths = tuple(max(max(map(len, x)), len(c)) for x, c in zip(zip(*rows), columns)) else: widths = tuple(map(len, columns)) row_template = (' '.join('%%-%ds' for _ in columns)) % widths header = (row_template % tuple(columns)).strip() if rows: data = '\n'.join((row_template % r).strip() for r in rows) return '\n'.join([header, data]) else: return header
def format_table(columns, rows)
Formats an ascii table for given columns and rows. Parameters ---------- columns : list The column names rows : list of tuples The rows in the table. Each tuple must be the same length as ``columns``.
2.773843
2.918869
0.950314
attrs = super(Select2Mixin, self).build_attrs(*args, **kwargs) if self.is_required: attrs.setdefault('data-allow-clear', 'false') else: attrs.setdefault('data-allow-clear', 'true') attrs.setdefault('data-placeholder', '') attrs.setdefault('data-minimum-input-length', 0) if 'class' in attrs: attrs['class'] += ' django-select2' else: attrs['class'] = 'django-select2' return attrs
def build_attrs(self, *args, **kwargs)
Add select2 data attributes.
2.024241
1.824514
1.109469
if not self.is_required and not self.allow_multiple_selected: self.choices = list(chain([('', '')], self.choices)) return super(Select2Mixin, self).optgroups(name, value, attrs=attrs)
def optgroups(self, name, value, attrs=None)
Add empty option for clearable selects.
4.23398
3.384666
1.25093
lang = get_language() select2_js = (settings.SELECT2_JS,) if settings.SELECT2_JS else () select2_css = (settings.SELECT2_CSS,) if settings.SELECT2_CSS else () i18n_name = SELECT2_TRANSLATIONS.get(lang) if i18n_name not in settings.SELECT2_I18N_AVAILABLE_LANGUAGES: i18n_name = None i18n_file = ('%s/%s.js' % (settings.SELECT2_I18N_PATH, i18n_name),) if i18n_name else () return forms.Media( js=select2_js + i18n_file + ('django_select2/django_select2.js',), css={'screen': select2_css} )
def _get_media(self)
Construct Media as a dynamic property. .. Note:: For more information visit https://docs.djangoproject.com/en/stable/topics/forms/media/#media-as-a-dynamic-property
2.558922
2.472602
1.034911
self.attrs.setdefault('data-minimum-input-length', 1) self.attrs.setdefault('data-tags', 'true') self.attrs.setdefault('data-token-separators', '[",", " "]') return super(Select2TagMixin, self).build_attrs(*args, **kwargs)
def build_attrs(self, *args, **kwargs)
Add select2's tag attributes.
4.187578
2.646438
1.582345
attrs = super(HeavySelect2Mixin, self).build_attrs(*args, **kwargs) # encrypt instance Id self.widget_id = signing.dumps(id(self)) attrs['data-field_id'] = self.widget_id attrs.setdefault('data-ajax--url', self.get_url()) attrs.setdefault('data-ajax--cache', "true") attrs.setdefault('data-ajax--type', "GET") attrs.setdefault('data-minimum-input-length', 2) if self.dependent_fields: attrs.setdefault('data-select2-dependent-fields', " ".join(self.dependent_fields)) attrs['class'] += ' django-select2-heavy' return attrs
def build_attrs(self, *args, **kwargs)
Set select2's AJAX attributes.
3.590972
3.250586
1.104716
output = super(HeavySelect2Mixin, self).render(*args, **kwargs) self.set_to_cache() return output
def render(self, *args, **kwargs)
Render widget and register it in Django's cache.
8.14749
5.342226
1.525111
try: cache.set(self._get_cache_key(), { 'widget': self, 'url': self.get_url(), }) except (PicklingError, AttributeError): msg = "You need to overwrite \"set_to_cache\" or ensure that %s is serialisable." raise NotImplementedError(msg % self.__class__.__name__)
def set_to_cache(self)
Add widget object to Django's cache. You may need to overwrite this method, to pickle all information that is required to serve your JSON response view.
5.262645
4.43639
1.186245
queryset = self.get_queryset() cache.set(self._get_cache_key(), { 'queryset': [ queryset.none(), queryset.query, ], 'cls': self.__class__, 'search_fields': tuple(self.search_fields), 'max_results': int(self.max_results), 'url': str(self.get_url()), 'dependent_fields': dict(self.dependent_fields), })
def set_to_cache(self)
Add widget's attributes to Django's cache. Split the QuerySet, to not pickle the result set.
4.613831
4.174831
1.105154
if queryset is None: queryset = self.get_queryset() search_fields = self.get_search_fields() select = Q() term = term.replace('\t', ' ') term = term.replace('\n', ' ') for t in [t for t in term.split(' ') if not t == '']: select &= reduce(lambda x, y: x | Q(**{y: t}), search_fields, Q(**{search_fields[0]: t})) if dependent_fields: select &= Q(**dependent_fields) return queryset.filter(select).distinct()
def filter_queryset(self, request, term, queryset=None, **dependent_fields)
Return QuerySet filtered by search_fields matching the passed term. Args: request (django.http.request.HttpRequest): The request is being passed from the JSON view and can be used to dynamically alter the response queryset. term (str): Search term queryset (django.db.models.query.QuerySet): QuerySet to select choices from. **dependent_fields: Dependent fields and their values. If you want to inherit from ModelSelect2Mixin and later call to this method, be sure to pop everything from keyword arguments that is not a dependent field. Returns: QuerySet: Filtered QuerySet
2.599154
2.811117
0.924599
if self.search_fields: return self.search_fields raise NotImplementedError('%s, must implement "search_fields".' % self.__class__.__name__)
def get_search_fields(self)
Return list of lookup names.
4.918773
3.912687
1.257134
default = (None, [], 0) groups = [default] has_selected = False selected_choices = {str(v) for v in value} if not self.is_required and not self.allow_multiple_selected: default[1].append(self.create_option(name, '', '', False, 0)) if not isinstance(self.choices, ModelChoiceIterator): return super(ModelSelect2Mixin, self).optgroups(name, value, attrs=attrs) selected_choices = { c for c in selected_choices if c not in self.choices.field.empty_values } field_name = self.choices.field.to_field_name or 'pk' query = Q(**{'%s__in' % field_name: selected_choices}) for obj in self.choices.queryset.filter(query): option_value = self.choices.choice(obj)[0] option_label = self.label_from_instance(obj) selected = ( str(option_value) in value and (has_selected is False or self.allow_multiple_selected) ) if selected is True and has_selected is False: has_selected = True index = len(default[1]) subgroup = default[1] subgroup.append(self.create_option(name, option_value, option_label, selected_choices, index)) return groups
def optgroups(self, name, value, attrs=None)
Return only selected options and set QuerySet from `ModelChoicesIterator`.
2.838971
2.766497
1.026197
self.widget = self.get_widget_or_404() self.term = kwargs.get('term', request.GET.get('term', '')) self.object_list = self.get_queryset() context = self.get_context_data() return JsonResponse({ 'results': [ { 'text': self.widget.label_from_instance(obj), 'id': obj.pk, } for obj in context['object_list'] ], 'more': context['page_obj'].has_next() })
def get(self, request, *args, **kwargs)
Return a :class:`.django.http.JsonResponse`. Example:: { 'results': [ { 'text': "foo", 'id': 123 } ], 'more': true }
2.85787
2.573622
1.110447
kwargs = { model_field_name: self.request.GET.get(form_field_name) for form_field_name, model_field_name in self.widget.dependent_fields.items() if form_field_name in self.request.GET and self.request.GET.get(form_field_name, '') != '' } return self.widget.filter_queryset(self.request, self.term, self.queryset, **kwargs)
def get_queryset(self)
Get QuerySet from cached widget.
2.855512
2.55098
1.119379
field_id = self.kwargs.get('field_id', self.request.GET.get('field_id', None)) if not field_id: raise Http404('No "field_id" provided.') try: key = signing.loads(field_id) except BadSignature: raise Http404('Invalid "field_id".') else: cache_key = '%s%s' % (settings.SELECT2_CACHE_PREFIX, key) widget_dict = cache.get(cache_key) if widget_dict is None: raise Http404('field_id not found') if widget_dict.pop('url') != self.request.path: raise Http404('field_id was issued for the view.') qs, qs.query = widget_dict.pop('queryset') self.queryset = qs.all() widget_dict['queryset'] = self.queryset widget_cls = widget_dict.pop('cls') return widget_cls(**widget_dict)
def get_widget_or_404(self)
Get and return widget from cache. Raises: Http404: If if the widget can not be found or no id is provided. Returns: ModelSelect2Mixin: Widget from cache.
3.131185
3.05498
1.024944
i = path.rfind('.') module, attr = path[:i], path[i + 1:] try: mod = import_module(module) except (ImportError, ValueError) as e: error_message = 'Error importing widget for BleachField %s: "%s"' raise ImproperlyConfigured(error_message % (path, e)) try: cls = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured( 'Module "%s" does not define a "%s" widget' % (module, attr) ) return cls
def load_widget(path)
Load custom widget for the form field
2.438365
2.22598
1.095412
default_widget = forms.Textarea if hasattr(settings, 'BLEACH_DEFAULT_WIDGET'): default_widget = load_widget(settings.BLEACH_DEFAULT_WIDGET) return default_widget
def get_default_widget()
Get the default widget or the widget defined in settings
3.567223
3.711368
0.961161
if value in self.empty_values: try: return self.empty_value except AttributeError: # CharField.empty_value was introduced in Django 1.11; in prior # versions a unicode string was returned for empty values in # all cases. return u'' return bleach.clean(value, **self.bleach_options)
def to_python(self, value)
Strips any dodgy HTML tags from the input
6.028568
5.927064
1.017126
if config.get('name'): client = self.get_client(actor) try: repo = client.get_repo(config['name']) except Exception as e: self.raise_error(e) else: config['external_id'] = six.text_type(repo['id']) return config
def validate_config(self, organization, config, actor=None)
``` if config['foo'] and not config['bar']: raise PluginError('You cannot configure foo with bar') return config ```
3.730002
4.206147
0.886798
schema = field_meta['schema'] # set up some defaults for form fields fieldtype = 'text' fkwargs = { 'label': field_meta['name'], 'required': field_meta['required'], } # override defaults based on field configuration if (schema['type'] in ['securitylevel', 'priority'] or schema.get('custom') == JIRA_CUSTOM_FIELD_TYPES['select']): fieldtype = 'select' fkwargs['choices'] = self.make_choices(field_meta.get('allowedValues')) elif field_meta.get('autoCompleteUrl') and \ (schema.get('items') == 'user' or schema['type'] == 'user'): fieldtype = 'select' sentry_url = '/api/0/issues/%s/plugins/%s/autocomplete' % (group.id, self.slug) fkwargs['url'] = '%s?jira_url=%s' % ( sentry_url, quote_plus(field_meta['autoCompleteUrl']), ) fkwargs['has_autocomplete'] = True fkwargs['placeholder'] = 'Start typing to search for a user' elif schema['type'] in ['timetracking']: # TODO: Implement timetracking (currently unsupported alltogether) return None elif schema.get('items') in ['worklog', 'attachment']: # TODO: Implement worklogs and attachments someday return None elif schema['type'] == 'array' and schema['items'] != 'string': fieldtype = 'select' fkwargs.update( { 'multiple': True, 'choices': self.make_choices(field_meta.get('allowedValues')), 'default': [] } ) # break this out, since multiple field types could additionally # be configured to use a custom property instead of a default. if schema.get('custom'): if schema['custom'] == JIRA_CUSTOM_FIELD_TYPES['textarea']: fieldtype = 'textarea' fkwargs['type'] = fieldtype return fkwargs
def build_dynamic_field(self, group, field_meta)
Builds a field based on JIRA's meta field information
3.808418
3.64178
1.045757
client = JiraClient(config['instance_url'], config['username'], config['password']) try: client.get_projects_list() except ApiError as e: self.raise_error(e) return config
def validate_config(self, project, config, actor=None)
``` if config['foo'] and not config['bar']: raise PluginError('You cannot configure foo with bar') return config ```
5.312288
5.134959
1.034534
instance = self.get_option('instance', group.project) project = ( form_data.get('project') or self.get_option('default_project', group.project) ) client = self.get_client(request.user) title = form_data['title'] description = form_data['description'] link = absolute_uri(group.get_absolute_url(params={'referrer': 'vsts_plugin'})) try: created_item = client.create_work_item( instance=instance, project=project, title=title, comment=markdown(description), link=link, ) except Exception as e: self.raise_error(e, identity=client.auth) return { 'id': created_item['id'], 'url': created_item['_links']['html']['href'], 'title': title, }
def create_issue(self, request, group, form_data, **kwargs)
Creates the issue on the remote service and returns an issue ID.
3.763572
3.659888
1.02833
key = 'sentry-jira:' + md5(full_url, self.base_url).hexdigest() cached_result = cache.get(key) if not cached_result: cached_result = self.get(full_url) cache.set(key, cached_result, 60) return cached_result
def get_cached(self, full_url)
Basic Caching mechanism for requests and responses. It only caches responses based on URL TODO: Implement GET attr in cache as well. (see self.create_meta for example)
3.39906
3.375372
1.007018
# IP address to be anonymized address_packed = ip_address(six.text_type(address)).packed address_len = len(address_packed) if address_len == 4: # IPv4 ipv4_mask_packed = ip_address(ipv4_mask).packed __validate_ipv4_mask(ipv4_mask_packed) return __apply_mask(address_packed, ipv4_mask_packed, 4) elif address_len == 16: # IPv6 ipv6_mask_packed = ip_address(ipv6_mask).packed __validate_ipv6_mask(ipv6_mask_packed) return __apply_mask(address_packed, ipv6_mask_packed, 16) else: # Invalid address raise ValueError("Address does not consist of 4 (IPv4) or 16 (IPv6) " "octets")
def anonymize_ip( address, ipv4_mask=u"255.255.255.0", ipv6_mask=u"ffff:ffff:ffff:0000:0000:0000:0000:0000" )
Anonymize the provided IPv4 or IPv6 address by setting parts of the address to 0 :param str|int address: IP address to be anonymized :param str ipv4_mask: Mask that defines which parts of an IPv4 address are set to 0 (default: "255.255.255.0") :param str ipv6_mask: Mask that defines which parts of an IPv6 address are set to 0 (default: "ffff:ffff:ffff:0000:0000:0000:0000:0000") :return: Anonymized IP address :rtype: str
2.017529
2.131089
0.946713
anon_packed = bytearray() for i in range(0, nr_bytes): anon_packed.append(ord(mask_packed[i]) & ord(address_packed[i])) return six.text_type(ip_address(six.binary_type(anon_packed)))
def __apply_mask(address_packed, mask_packed, nr_bytes)
Perform a bitwise AND operation on all corresponding bytes between the mask and the provided address. Mask parts set to 0 will become 0 in the anonymized IP address as well :param bytes address_packed: Binary representation of the IP address to be anonymized :param bytes mask_packed: Binary representation of the corresponding IP address mask :param int nr_bytes: Number of bytes in the address (4 for IPv4, 16 for IPv6) :return: Anonymized IP address :rtype: str
3.290736
3.116785
1.055811
if key in params: param = params[key] if hasattr(param, "strftime"): params[key] = param.strftime(format)
def _format_date_param(params, key, format="%Y-%m-%d %H:%M:%S")
Utility function to convert datetime values to strings. If the value is already a str, or is not in the dict, no change is made. :param params: A `dict` of params that may contain a `datetime` value. :param key: The datetime value to be converted to a `str` :param format: The `strftime` format to be used to format the date. The default value is '%Y-%m-%d %H:%M:%S'
2.647527
4.756946
0.55656
params = { "message-id": message_id, "delivered": delivered, "timestamp": timestamp or datetime.now(pytz.utc), } # Ensure timestamp is a string: _format_date_param(params, "timestamp") return self.post(self.api_host, "/conversions/sms", params)
def submit_sms_conversion(self, message_id, delivered=True, timestamp=None)
Notify Nexmo that an SMS was successfully received. :param message_id: The `message-id` str returned by the send_message call. :param delivered: A `bool` indicating that the message was or was not successfully delivered. :param timestamp: A `datetime` object containing the time the SMS arrived. :return: The parsed response from the server. On success, the bytestring b'OK'
4.669402
5.045056
0.92554
lines = [ ] def write (line): lines.append(line) if self._defaults: write("[%s]\n" % DEFAULTSECT) for (key, value) in self._defaults.items(): write("%s = %s\n" % (key, str(value).replace('\n', '\n\t'))) write("\n") for section in self._sections: write("[%s]\n" % section) for (key, value) in self._sections[section].items(): if key == "__name__": continue if (value is not None) or (self._optcre == self.OPTCRE): key = " = ".join((key, str(value).replace('\n', '\n\t'))) write("%s\n" % (key)) write("\n") return "".join(lines)
def fmt(self)
Write an .ini-format representation of the configuration state.
2.768784
2.487876
1.112911
# figure out precise method name, specific to this use name = 'configure_%s_app' % self.parent.name # call generic set up method getattr(self.method, 'configure_app', self._no_op_setup)(self, self.parser) # call specific set up method getattr(self.method, name, self._no_op_setup)(self, self.parser)
def setup_application (self)
Allows us to use method, injected as dependency earlier to set up argparser before autocompletion/running the app.
7.537529
6.811041
1.106663
a_y = radians(a.y) b_y = radians(b.y) delta_x = radians(a.x - b.x) cos_x = (sin(a_y) * sin(b_y) + cos(a_y) * cos(b_y) * cos(delta_x)) return acos(cos_x) * earth_radius_km
def geo_distance(a, b)
Distance between two geo points in km. (p.x = long, p.y = lat)
2.191933
2.101909
1.04283
try: lines = open(filename).readlines() except FileNotFoundError: fatal("Could not open file: {!r}".format(filename)) return [line.strip() for line in lines]
def get_stripped_file_lines(filename)
Return lines of a file with whitespace removed
3.444984
3.450097
0.998518
if nameservers: resolver.nameservers = nameservers elif nameserver_filename: nameservers = get_stripped_file_lines(nameserver_filename) resolver.nameservers = nameservers else: # Use original nameservers pass return resolver
def update_resolver_nameservers(resolver, nameservers, nameserver_filename)
Update a resolver's nameservers. The following priority is taken: 1. Nameservers list provided as an argument 2. A filename containing a list of nameservers 3. The original nameservers associated with the resolver
3.263777
3.11022
1.049372
''' Returns a settings object to be used by a LocalLocustRunner. Arguments from_environment: get settings from environment variables locustfile: locustfile to use for loadtest classes: locust classes to use for load test host: host for load testing num_clients: number of clients to simulate in load test hatch_rate: number of clients per second to start reset_stats: Whether to reset stats after all clients are hatched run_time: The length of time to run the test for. Cannot exceed the duration limit set by lambda If from_environment is set to True then this function will attempt to set the attributes from environment variables. The environment variables are named LOCUST_ + attribute name in upper case. ''' settings = type('', (), {})() settings.from_environment = from_environment settings.locustfile = locustfile settings.classes = classes settings.host = host settings.num_clients = num_clients settings.hatch_rate = hatch_rate settings.reset_stats = reset_stats settings.run_time = run_time # Default settings that are not to be changed settings.no_web = True settings.master = False settings.show_task_ratio_json = False settings.list_commands = False settings.loglevel = 'INFO' settings.slave = False settings.only_summary = True settings.logfile = None settings.show_task_ratio = False settings.print_stats = False if from_environment: for attribute in ['locustfile', 'classes', 'host', 'run_time', 'num_clients', 'hatch_rate']: var_name = 'LOCUST_{0}'.format(attribute.upper()) var_value = os.environ.get(var_name) if var_value: setattr(settings, attribute, var_value) if settings.locustfile is None and settings.classes is None: raise Exception('One of locustfile or classes must be specified') if settings.locustfile and settings.classes: raise Exception('Only one of locustfile or classes can be specified') if settings.locustfile: docstring, classes = load_locustfile(settings.locustfile) settings.classes = [classes[n] for n in classes] else: if isinstance(settings.classes, str): settings.classes = settings.classes.split(',') for idx, val in enumerate(settings.classes): # This needs fixing settings.classes[idx] = eval(val) for attribute in ['classes', 'host', 'num_clients', 'hatch_rate']: val = getattr(settings, attribute, None) if not val: raise Exception('configuration error, attribute not set: {0}'.format(attribute)) if isinstance(val, str) and val.isdigit(): setattr(settings, attribute, int(val)) return settings
def create_settings(from_environment=False, locustfile=None, classes=None, host=None, num_clients=None, hatch_rate=None, reset_stats=False, run_time="3m")
Returns a settings object to be used by a LocalLocustRunner. Arguments from_environment: get settings from environment variables locustfile: locustfile to use for loadtest classes: locust classes to use for load test host: host for load testing num_clients: number of clients to simulate in load test hatch_rate: number of clients per second to start reset_stats: Whether to reset stats after all clients are hatched run_time: The length of time to run the test for. Cannot exceed the duration limit set by lambda If from_environment is set to True then this function will attempt to set the attributes from environment variables. The environment variables are named LOCUST_ + attribute name in upper case.
2.887942
1.943237
1.48615
''' Returns a dictionary of information about the AWS Lambda function invocation Arguments context: The context object from AWS Lambda. ''' runtime_info = { 'remaining_time': context.get_remaining_time_in_millis(), 'function_name': context.function_name, 'function_version': context.function_version, 'invoked_function_arn': context.invoked_function_arn, 'memory_limit': context.memory_limit_in_mb, 'aws_request_id': context.aws_request_id, 'log_group_name': context.log_group_name, 'log_stream_name': context.log_stream_name } return runtime_info
def get_lambda_runtime_info(context)
Returns a dictionary of information about the AWS Lambda function invocation Arguments context: The context object from AWS Lambda.
2.001306
1.552777
1.288856
return pyfftw.byte_align(array, n=n, dtype=dtype)
def pyfftw_byte_aligned(array, dtype=None, n=None)
Construct a byte-aligned array for efficient use by :mod:`pyfftw`. This function is a wrapper for :func:`pyfftw.byte_align` Parameters ---------- array : ndarray Input array dtype : dtype, optional (default None) Output array dtype n : int, optional (default None) Output array should be aligned to n-byte boundary Returns ------- a : ndarray Array with required byte-alignment
4.294971
5.465246
0.78587
return pyfftw.empty_aligned(shape, dtype, order, n)
def pyfftw_empty_aligned(shape, dtype, order='C', n=None)
Construct an empty byte-aligned array for efficient use by :mod:`pyfftw`. This function is a wrapper for :func:`pyfftw.empty_aligned` Parameters ---------- shape : sequence of ints Output array shape dtype : dtype Output array dtype order : {'C', 'F'}, optional (default 'C') Specify whether arrays should be stored in row-major (C-style) or column-major (Fortran-style) order n : int, optional (default None) Output array should be aligned to n-byte boundary Returns ------- a : ndarray Empty array with required byte-alignment
3.055282
5.680506
0.537854
ashp = list(shape) raxis = axes[-1] ashp[raxis] = ashp[raxis] // 2 + 1 cdtype = complex_dtype(dtype) return pyfftw.empty_aligned(ashp, cdtype, order, n)
def pyfftw_rfftn_empty_aligned(shape, axes, dtype, order='C', n=None)
Construct an empty byte-aligned array for efficient use by :mod:`pyfftw` functions :func:`pyfftw.interfaces.numpy_fft.rfftn` and :func:`pyfftw.interfaces.numpy_fft.irfftn`. The shape of the empty array is appropriate for the output of :func:`pyfftw.interfaces.numpy_fft.rfftn` applied to an array of the shape specified by parameter `shape`, and for the input of the corresponding :func:`pyfftw.interfaces.numpy_fft.irfftn` call that reverses this operation. Parameters ---------- shape : sequence of ints Output array shape axes : sequence of ints Axes on which the FFT will be computed dtype : dtype Real dtype from which the complex dtype of the output array is derived order : {'C', 'F'}, optional (default 'C') Specify whether arrays should be stored in row-major (C-style) or column-major (Fortran-style) order n : int, optional (default None) Output array should be aligned to n-byte boundary Returns ------- a : ndarray Empty array with required byte-alignment
4.325761
5.232877
0.826651
return pyfftw.interfaces.numpy_fft.irfftn( a, s=s, axes=axes, overwrite_input=False, planner_effort='FFTW_MEASURE', threads=pyfftw_threads)
def irfftn(a, s, axes=None)
Compute the inverse of the multi-dimensional discrete Fourier transform for real input. This function is a wrapper for :func:`pyfftw.interfaces.numpy_fft.irfftn`, with an interface similar to that of :func:`numpy.fft.irfftn`. Parameters ---------- a : array_like Input array s : sequence of ints Shape of the output along each transformed axis (input is cropped or zero-padded to match). This parameter is not optional because, unlike :func:`ifftn`, the output shape cannot be uniquely determined from the input shape. axes : sequence of ints, optional (default None) Axes over which to compute the inverse DFT. Returns ------- af : ndarray Inverse DFT of input array
2.722327
3.549263
0.767012
if axes is None: axes = list(range(x.ndim)) for ax in axes: x = fftpack.dct(x, type=2, axis=ax, norm='ortho') return x
def dctii(x, axes=None)
Compute a multi-dimensional DCT-II over specified array axes. This function is implemented by calling the one-dimensional DCT-II :func:`scipy.fftpack.dct` with normalization mode 'ortho' for each of the specified axes. Parameters ---------- a : array_like Input array axes : sequence of ints, optional (default None) Axes over which to compute the DCT-II. Returns ------- y : ndarray DCT-II of input array
2.50012
2.980174
0.838917
if axes is None: axes = list(range(x.ndim)) for ax in axes[::-1]: x = fftpack.idct(x, type=2, axis=ax, norm='ortho') return x
def idctii(x, axes=None)
Compute a multi-dimensional inverse DCT-II over specified array axes. This function is implemented by calling the one-dimensional inverse DCT-II :func:`scipy.fftpack.idct` with normalization mode 'ortho' for each of the specified axes. Parameters ---------- a : array_like Input array axes : sequence of ints, optional (default None) Axes over which to compute the inverse DCT-II. Returns ------- y : ndarray Inverse DCT-II of input array
2.468647
2.944952
0.838264
if np.isrealobj(a) and np.isrealobj(b): fft = rfftn ifft = irfftn else: fft = fftn ifft = ifftn dims = np.maximum([a.shape[i] for i in axes], [b.shape[i] for i in axes]) af = fft(a, dims, axes) bf = fft(b, dims, axes) return ifft(af * bf, dims, axes)
def fftconv(a, b, axes=(0, 1))
Compute a multi-dimensional convolution via the Discrete Fourier Transform. Note that the output has a phase shift relative to the output of :func:`scipy.ndimage.convolve` with the default ``origin`` parameter. Parameters ---------- a : array_like Input array b : array_like Input array axes : sequence of ints, optional (default (0, 1)) Axes on which to perform convolution Returns ------- ab : ndarray Convolution of input arrays, a and b, along specified axes
2.33497
2.399453
0.973126
# Convert negative axis to positive if axis < 0: axis = x.ndim + axis # If sum not on axis 0, roll specified axis to 0 position if axis == 0: xr = x yr = y else: xr = np.rollaxis(x, axis, 0) yr = np.rollaxis(y, axis, 0) # Efficient inner product on axis 0 if np.__version__ == '1.14.0': # Setting of optimize flag due to # https://github.com/numpy/numpy/issues/10343 ip = np.einsum(xr, [0, Ellipsis], yr, [0, Ellipsis], optimize=False)[np.newaxis, ...] else: ip = np.einsum(xr, [0, Ellipsis], yr, [0, Ellipsis])[np.newaxis, ...] # Roll axis back to original position if necessary if axis != 0: ip = np.rollaxis(ip, 0, axis + 1) return ip
def inner(x, y, axis=-1)
Compute inner product of x and y on specified axis, equivalent to :code:`np.sum(x * y, axis=axis, keepdims=True)`. Parameters ---------- x : array_like Input array x y : array_like Input array y axis : int, optional (default -1) Axis over which to compute the sum Returns ------- y : ndarray Inner product array equivalent to summing x*y over the specified axis
3.253307
3.313281
0.981899