code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
# Output is the same size as ts1 Ns1 = np.size(ts1) Ns2 = np.size(ts2) ts_out = np.zeros((Ns1,1), dtype='float64') ishift = int(np.floor(Ns2/2)) # origin of ts2 t1m = np.mean(ts1) t2m = np.mean(ts2) for k in range(0,Ns1): lstart = np.int(ishift-k) if lstart<0 : lstart=0 lend = np.int(ishift-k+Ns2) imax = np.int(np.min([Ns2,Ns1-k+ishift])) if lend>imax : lend=imax csum = 0 ts1sum = 0 ts1sum2 = 0 ts2sum = 0 ts2sum2 = 0 Nterms = lend-lstart for l in range(lstart,lend): csum += ts1[k+l-ishift]*ts2[l] ts1sum += ts1[k+l-ishift] ts1sum2 += ts1[k+l-ishift]*ts1[k+l-ishift] ts2sum += ts2[l] ts2sum2 += ts2[l]*ts2[l] ts1sum2 = np.max([t1m*t1m*100,ts1sum2])-ts1sum*ts1sum/Nterms ts2sum2 = np.max([t2m*t2m*100,ts2sum2])-ts2sum*ts2sum/Nterms #ts_out[k]=csum/np.sqrt(ts1sum2*ts2sum2) ts_out[k]=(csum-2.0*ts1sum*ts2sum/Nterms+ts1sum*ts2sum/Nterms/Nterms)/np.sqrt(ts1sum2*ts2sum2) best_shift = np.argmax(ts_out)-ishift return best_shift, ts_out
def zncc(ts1,ts2)
Zero mean normalised cross-correlation (ZNCC) This function does ZNCC of two signals, ts1 and ts2 Normalisation by very small values is avoided by doing max(nmin,nvalue) Parameters -------------- ts1 : ndarray Input signal 1 to be aligned with ts2 : ndarray Input signal 2 Returns -------------- best_shift : float The best shift of *ts1* to align it with *ts2* ts_out : ndarray The correlation result
2.273479
2.166867
1.049201
pyr1 = create_pyramid(ts1,nlevels) pyr2 = create_pyramid(ts2,nlevels) logger.debug("pyramid size = %d" % len(pyr1)) logger.debug("size of first element %d " % np.size(pyr1[0])) logger.debug("size of last element %d " % np.size(pyr1[-1])) ishift, corrfn = zncc(pyr1[-1],pyr2[-1]) for k in range(1,nlevels+1): ishift, corrfn = refine_correlation(pyr1[-k-1],pyr2[-k-1],ishift*2) return ishift
def find_shift_pyr(ts1,ts2,nlevels)
Find shift that best aligns two time series The shift that aligns the timeseries ts1 with ts2. This is sought using zero mean normalized cross correlation (ZNCC) in a coarse to fine search with an octave pyramid on nlevels levels. Parameters ---------------- ts1 : list_like The first timeseries ts2 : list_like The seconds timeseries nlevels : int Number of levels in pyramid Returns ---------------- ts1_shift : float How many samples to shift ts1 to align with ts2
2.892496
3.078788
0.939492
"Load gyro data collected by the arduino version of the L3G logging platform, and return the data (in rad/s), a time vector, and the sample rate (seconds)" file_data = open(filename, 'rb').read() parser = L3GArduinoParser() parser.parse(file_data[7:]) # Skip first "GYROLOG" header in file data = parser.data if parser.actual_data_rate: T = 1. / parser.actual_data_rate print("Found measured data rate %.3f ms (%.3f Hz)" % (1000*T, 1. / T)) else: T = 1. / parser.data_rate print("Using data rate provided by gyro (probably off by a few percent!) %.3f ms (%.3f Hz)" % (1000*T, 1. / T)) N = parser.data.shape[1] t = np.linspace(0, T*N, num=data.shape[1]) print(t.shape, data.shape) print("Loaded %d samples (%.2f seconds) with expected sample rate %.3f ms (%.3f Hz)" % (N, t[-1], T*1000.0, 1./T)) try: print("Actual sample rate is %.3f ms (%.3f Hz)" % (1000. / parser.actual_data_rate, parser.actual_data_rate, )) except TypeError: pass if remove_begin_spurious: to_remove = int(0.3/T) # Remove first three tenth of second data[:,:to_remove] = 0.0 if return_parser: return np.deg2rad(data), t, T, parser else: return np.deg2rad(data), t, T
def load_L3G_arduino(filename, remove_begin_spurious=False, return_parser=False)
Load gyro data collected by the arduino version of the L3G logging platform, and return the data (in rad/s), a time vector, and the sample rate (seconds)
4.139589
3.267858
1.266759
"Convert combined axis angle vector to rotation matrix" theta = np.linalg.norm(r) v = r/theta R = crisp.rotations.axis_angle_to_rotation_matrix(v, theta) return R
def to_rot_matrix(r)
Convert combined axis angle vector to rotation matrix
6.483169
5.216104
1.242914
assert num_sequences >= 2 # Create optical flow for user to select parts in logger.info("Calculating optical flow") flow = tracking.optical_flow_magnitude(image_sequence) # ) Prompt user for sync slices logger.debug("Prompting user for %d sequences" % num_sequences) imu_fake_timestamps = np.linspace(0,1,num=imu_gyro.shape[1]) sync_sequences = [timesync.manual_sync_pick(flow, imu_fake_timestamps, imu_gyro) for i in range(num_sequences)] return sync_sequences
def pick_manual(image_sequence, imu_gyro, num_sequences=2)
Select N matching sequences and return data indices. Parameters --------------- image_sequence : list_like A list, or generator, of image data imu_gyro : (3, N) ndarray Gyroscope data (angular velocities) num_sequences : int The number of matching sequences to pick Returns ---------------- sync_sequences : list List of (frame_pair, gyro_pair) tuples where each pair contains (a, b) which are indices of the (inclusive) range [a, b] that was chosen
7.692691
7.752953
0.992227
N = np.zeros((3,3)) for x in gyro_data.T: # Transpose because samples are stored as columns y = x.reshape(3,1) N += y.dot(y.T) (eig_val, eig_vec) = np.linalg.eig(N) i = np.argmax(eig_val) v = eig_vec[:,i] # Make sure v has correct sign s = 0 for x in gyro_data.T: # Transpose because samples are stored as columns s += v.T.dot(x.reshape(3,1)) v *= np.sign(s) return v
def principal_rotation_axis(gyro_data)
Get the principal rotation axis of angular velocity measurements. Parameters ------------- gyro_data : (3, N) ndarray Angular velocity measurements Returns ------------- v : (3,1) ndarray The principal rotation axis for the chosen sequence
3.199638
3.460771
0.924545
uri = node.attributes['uri'] doc_folder = os.path.dirname(self.builder.current_docname) if uri.startswith(doc_folder): # drop docname prefix uri = uri[len(doc_folder):] if uri.startswith("/"): uri = "." + uri self.add('\n\n![image](%s)\n\n' % uri)
def visit_image(self, node)
Image directive
4.226684
3.89903
1.084035
def dec(cls): # Need _make_method to ensure new variable picked up for each iteration # of the loop. The defined method picks up this new variable in its # scope. for key, (prefix, suffix) in pref_suff_map.items(): setattr(cls, 'visit_' + key, _make_method(prefix)) setattr(cls, 'depart_' + key, _make_method(suffix)) return cls return dec
def add_pref_suff(pref_suff_map)
Decorator adds visit, depart methods for prefix/suffix pairs
7.004432
5.521523
1.268569
def meth(self, node): pass def dec(cls): for element_name in pass_thrus: for meth_prefix in ('visit_', 'depart_'): meth_name = meth_prefix + element_name if hasattr(cls, meth_name): raise ValueError('method name {} already defined' .format(meth_name)) setattr(cls, meth_name, meth) return cls return dec
def add_pass_thru(pass_thrus)
Decorator adds explicit pass-through visit and depart methods
4.093315
3.43395
1.192013
string = ''.join(self.content) lines = string.splitlines(True) if len(lines) == 0: return texts = [self.first_prefix + lines[0]] for line in lines[1:]: if line.strip() == '': # avoid prefix for empty lines texts.append('\n') else: texts.append(self.prefix + line) self.base.append(''.join(texts))
def write(self)
Add ``self.contents`` with current ``prefix`` and ``first_prefix`` Add processed ``self.contents`` to ``self.base``. The first line has ``first_prefix`` prepended, further lines have ``prefix`` prepended. Empty (all whitespace) lines get written as bare carriage returns, to avoid ugly extra whitespace.
3.788306
2.97355
1.274001
key = self.get_public_key_hex() return ensure_bytes(hexlify(hash160(unhexlify(ensure_bytes(key)))))
def identifier(self)
Get the identifier for this node. Extended keys can be identified by the Hash160 (RIPEMD160 after SHA256) of the public key's `key`. This corresponds exactly to the data used in traditional Bitcoin addresses. It is not advised to represent this data in base58 format though, as it may be interpreted as an address that way (and wallet software is not required to accept payment to the chain key itself).
7.73345
6.368337
1.214359
if self.private_key: raise AssertionError("You already know the private key") if child_private_key.parent_fingerprint != self.fingerprint: raise ValueError("This is not a valid child") if child_private_key.child_number >= 0x80000000: raise ValueError( "Cannot crack private keys from private derivation") # Duplicate the public child derivation child_number_hex = long_to_hex(child_private_key.child_number, 8) data = self.get_public_key_hex() + child_number_hex I = hmac.new( unhexlify(ensure_bytes(self.chain_code)), msg=unhexlify(ensure_bytes(data)), digestmod=sha512).digest() I_L, I_R = I[:32], I[32:] # Public derivation is the same as private derivation plus some offset # knowing the child's private key allows us to find this offset just # by subtracting the child's private key from the parent I_L data privkey = PrivateKey(long_or_int(hexlify(I_L), 16), network=self.network) parent_private_key = child_private_key.private_key - privkey return self.__class__( chain_code=self.chain_code, depth=self.depth, parent_fingerprint=self.parent_fingerprint, child_number=self.child_number, private_key=parent_private_key, network=self.network)
def crack_private_key(self, child_private_key)
Crack the parent private key given a child private key. BIP32 has a vulnerability/feature that allows you to recover the master private key if you're given a master public key and any of its publicly-derived child private keys. This is a pretty serious security vulnerability that looks as innocuous as this: >>> w = Wallet.new_random_wallet() >>> child = w.get_child(0, is_prime=False) >>> w_pub = w.public_copy() >>> assert w_pub.private_key is None >>> master_public_key = w_pub.serialize_b58(private=False) >>> # Now you put master_public_key on your website >>> # and give somebody a private key >>> public_master = Wallet.deserialize(master_public_key) >>> cracked_private_master = public_master.crack_private_key(child) >>> assert w == cracked_private_master # :( Implementation details from http://bitcoinmagazine.com/8396/deterministic-wallets-advantages-flaw/ # nopep8
3.638722
3.519689
1.033819
# Add the network byte, creating the "extended key" extended_key_hex = self.private_key.get_extended_key() # BIP32 wallets have a trailing \01 byte extended_key_bytes = unhexlify(ensure_bytes(extended_key_hex)) + b'\01' # And return the base58-encoded result with a checksum return base58.b58encode_check(extended_key_bytes)
def export_to_wif(self)
Export a key to WIF. See https://en.bitcoin.it/wiki/Wallet_import_format for a full description.
5.974219
6.139271
0.973115
# Register all distributions and wheels with PyPI. We have to list the dist # directory and register each file individually because `twine` doesn't # handle globs. for filename in os.listdir(dist): full_path = os.path.join(dist, filename) if os.path.isfile(full_path): # This will fail if the project has never been uploaded, so use check=false _shell('twine register ' + shlex.quote(full_path), check=False) _shell('twine upload ' + shlex.quote(dist + '/*'))
def _pypi_push(dist)
Push created package to PyPI. Requires the following defined environment variables: - TWINE_USERNAME: The PyPI username to upload this package under - TWINE_PASSWORD: The password to the user's account Args: dist (str): The distribution to push. Must be a valid directory; shell globs are NOT allowed.
5.522558
5.49228
1.005513
# Ensure proper environment if not os.getenv(CIRCLECI_ENV_VAR): # pragma: no cover raise EnvironmentError('Must be on CircleCI to run this script') current_branch = os.getenv('CIRCLE_BRANCH') if (target == 'PROD') and (current_branch != 'master'): raise EnvironmentError( f'Refusing to deploy to production from branch {current_branch!r}. ' f'Production deploys can only be made from master.') if target in ('PROD', 'TEST'): pypi_username = os.getenv(f'{target}_PYPI_USERNAME') pypi_password = os.getenv(f'{target}_PYPI_PASSWORD') else: raise ValueError(f"Deploy target must be 'PROD' or 'TEST', got {target!r}.") if not (pypi_username and pypi_password): # pragma: no cover raise EnvironmentError( f"Missing '{target}_PYPI_USERNAME' and/or '{target}_PYPI_PASSWORD' " f"environment variables. These are required to push to PyPI.") # Twine requires these environment variables to be set. Subprocesses will # inherit these when we invoke them, so no need to pass them on the command # line. We want to avoid that in case something's logging each command run. os.environ['TWINE_USERNAME'] = pypi_username os.environ['TWINE_PASSWORD'] = pypi_password # Set up git on circle to push to the current branch _shell('git config --global user.email "[email protected]"') _shell('git config --global user.name "Circle CI"') _shell('git config push.default current') # Obtain the version to deploy ret = _shell('make version', stdout=subprocess.PIPE) version = ret.stdout.decode('utf-8').strip() print(f'Deploying version {version!r}...') # Tag the version _shell(f'git tag -f -a {version} -m "Version {version}"') # Update the version _shell( f'sed -i.bak "s/^__version__ = .*/__version__ = {version!r}/" */version.py') # Create a standard distribution and a wheel _shell('python setup.py sdist bdist_wheel') # Add the updated ChangeLog and AUTHORS _shell('git add ChangeLog AUTHORS */version.py') # Start the commit message with "Merge" so that PBR will ignore it in the # ChangeLog. Use [skip ci] to ensure CircleCI doesn't recursively deploy. _shell('git commit --no-verify -m "Merge autogenerated files [skip ci]"') # Push the distributions to PyPI. _pypi_push('dist') # Push the tag and AUTHORS / ChangeLog after successful PyPI deploy _shell('git push --follow-tags') print(f'Deployment complete. Latest version is {version}.')
def deploy(target)
Deploys the package and documentation. Proceeds in the following steps: 1. Ensures proper environment variables are set and checks that we are on Circle CI 2. Tags the repository with the new version 3. Creates a standard distribution and a wheel 4. Updates version.py to have the proper version 5. Commits the ChangeLog, AUTHORS, and version.py file 6. Pushes to PyPI 7. Pushes the tags and newly committed files Raises: `EnvironmentError`: - Not running on CircleCI - `*_PYPI_USERNAME` and/or `*_PYPI_PASSWORD` environment variables are missing - Attempting to deploy to production from a branch that isn't master
4.428761
3.910103
1.132645
return _s(dd['q']), _s(dd.get('r', NoResponse)), _s(dd.get('e', NoResponse))
def _get_triplet(dd)
Return a triplet from a dialogue dictionary. :param dd: Dialogue dictionary. :type dd: Dict[str, str] :return: (query, response, error response) :rtype: (str, str | NoResponse, str | NoResponse)
10.187814
5.40942
1.883347
try: data = yaml.load(content_or_fp, Loader=yaml.loader.BaseLoader) except Exception as e: raise type(e)('Malformed yaml file:\n%r' % format_exc()) try: ver = data['spec'] except: raise ValueError('The file does not specify a spec version') try: ver = tuple(map(int, (ver.split(".")))) except: raise ValueError("Invalid spec version format. Expect 'X.Y'" " (X and Y integers), found %s" % ver) if ver > SPEC_VERSION_TUPLE: raise ValueError('The spec version of the file is ' '%s but the parser is %s. ' 'Please update pyvisa-sim.' % (ver, SPEC_VERSION)) return data
def _load(content_or_fp)
YAML Parse a file or str and check version.
4.456128
4.204485
1.059851
with closing(pkg_resources.resource_stream(__name__, name)) as fp: rbytes = fp.read() return _load(StringIO(rbytes.decode('utf-8')))
def parse_resource(name)
Parse a resource file
4.930798
4.501225
1.095435
for dia in component_dict.get('dialogues', ()): try: comp.add_dialogue(*_get_pair(dia)) except Exception as e: msg = 'In device %s, malformed dialogue %s\n%r' raise Exception(msg % (name, dia, e)) for prop_name, prop_dict in component_dict.get('properties', {}).items(): try: getter = (_get_pair(prop_dict['getter']) if 'getter' in prop_dict else None) setter = (_get_triplet(prop_dict['setter']) if 'setter' in prop_dict else None) comp.add_property(prop_name, prop_dict.get('default', ''), getter, setter, prop_dict.get('specs', {})) except Exception as e: msg = 'In device %s, malformed property %s\n%r' raise type(e)(msg % (name, prop_name, format_exc()))
def update_component(name, comp, component_dict)
Get a component from a component dict.
2.903284
2.938359
0.988063
bases = definition_dict.get('bases', ()) if bases: bases = (loader.get_comp_dict(required_version=SPEC_VERSION_TUPLE[0], **b) for b in bases) return SimpleChainmap(definition_dict, *bases) else: return definition_dict
def get_bases(definition_dict, loader)
Collect dependencies.
7.010238
6.772827
1.035053
channel_dict = get_bases(channel_dict, loader) r_ids = resource_dict.get('channel_ids', {}).get(ch_name, []) ids = r_ids if r_ids else channel_dict.get('ids', {}) can_select = False if channel_dict.get('can_select') == 'False' else True channels = Channels(device, ids, can_select) update_component(ch_name, channels, channel_dict) return channels
def get_channel(device, ch_name, channel_dict, loader, resource_dict)
Get a channels from a channels dictionary. :param name: name of the device :param device_dict: device dictionary :rtype: Device
4.342382
5.048952
0.860056
device = Device(name, device_dict.get('delimiter', ';').encode('utf-8')) device_dict = get_bases(device_dict, loader) err = device_dict.get('error', {}) device.add_error_handler(err) for itype, eom_dict in device_dict.get('eom', {}).items(): device.add_eom(itype, *_get_pair(eom_dict)) update_component(name, device, device_dict) for ch_name, ch_dict in device_dict.get('channels', {}).items(): device.add_channels(ch_name, get_channel(device, ch_name, ch_dict, loader, resource_dict)) return device
def get_device(name, device_dict, loader, resource_dict)
Get a device from a device dictionary. :param name: name of the device :param device_dict: device dictionary :rtype: Device
4.064773
4.40455
0.922858
loader = Loader(filename, bundled) data = loader.data devices = Devices() # Iterate through the resources and generate each individual device # on demand. for resource_name, resource_dict in data.get('resources', {}).items(): device_name = resource_dict['device'] dd = loader.get_device_dict(device_name, resource_dict.get('filename', None), resource_dict.get('bundled', False), SPEC_VERSION_TUPLE[0]) devices.add_device(resource_name, get_device(device_name, dd, loader, resource_dict)) return devices
def get_devices(filename, bundled)
Get a Devices object from a file. :param filename: full path of the file to parse or name of the resource. :param is_resource: boolean indicating if it is a resource. :rtype: Devices
4.748345
4.665359
1.017788
value = self.validate_value(string_value) self._value = defaultdict(lambda: value)
def init_value(self, string_value)
Create an empty defaultdict holding the default value.
6.203954
4.605684
1.347021
value = self.validate_value(string_value) self._value[self._channel._selected] = value
def set_value(self, string_value)
Set the current value for a channel.
9.777198
7.539616
1.296777
self._dialogues['__default__'][to_bytes(query)] = to_bytes(response)
def add_dialogue(self, query, response)
Add dialogue to channel. :param query: query string :param response: response string
8.827997
12.356613
0.714435
self._properties[name] = ChannelProperty(self, name, default_value, specs) if getter_pair: query, response = getter_pair self._getters['__default__'][to_bytes(query)] = name, response if setter_triplet: query, response, error = setter_triplet self._setters.append((name, stringparser.Parser(query), to_bytes(response), to_bytes(error)))
def add_property(self, name, default_value, getter_pair, setter_triplet, specs)
Add property to channel :param name: property name :param default_value: default value as string :param getter_pair: (query, response) :param setter_triplet: (query, response, error) :param specs: specification of the Property
4.902797
4.232637
1.158332
if not self.can_select: ch_id = self._device._properties['selected_channel'].get_value() if ch_id in self._ids: self._selected = ch_id else: return response = self._match_dialog(query, self._dialogues['__default__']) if response is not None: return response response = self._match_getters(query, self._getters['__default__']) if response is not None: return response else: for ch_id in self._ids: self._selected = ch_id response = self._match_dialog(query, self._dialogues[ch_id]) if response is not None: return response response = self._match_getters(query, self._getters[ch_id]) if response is not None: return response return self._match_setters(query)
def match(self, query)
Try to find a match for a query in the channel commands.
3.054541
2.950619
1.03522
q = query.decode('utf-8') for name, parser, response, error_response in self._setters: try: parsed = parser(q) logger.debug('Found response in setter of %s' % name) except ValueError: continue try: if isinstance(parsed, dict) and 'ch_id' in parsed: self._selected = parsed['ch_id'] self._properties[name].set_value(parsed['0']) else: self._properties[name].set_value(parsed) return response except ValueError: if isinstance(error_response, bytes): return error_response return self._device.error_response('command_error') return None
def _match_setters(self, query)
Try to find a match
4.73665
4.668021
1.014702
from . import __version__ from .parser import SPEC_VERSION d = OrderedDict() d['Version'] = '%s' % __version__ d['Spec version'] = SPEC_VERSION return d
def get_debug_info()
Return a list of lines with backend info.
6.104523
5.657997
1.07892
try: open_timeout = int(open_timeout) except ValueError: raise ValueError('open_timeout (%r) must be an integer (or compatible type)' % open_timeout) try: parsed = rname.parse_resource_name(resource_name) except rname.InvalidResourceName: return 0, constants.StatusCode.error_invalid_resource_name # Loops through all session types, tries to parse the resource name and if ok, open it. cls = sessions.Session.get_session_class(parsed.interface_type_const, parsed.resource_class) sess = cls(session, resource_name, parsed) try: sess.device = self.devices[sess.attrs[constants.VI_ATTR_RSRC_NAME]] except KeyError: return 0, constants.StatusCode.error_resource_not_found return self._register(sess), constants.StatusCode.success
def open(self, session, resource_name, access_mode=constants.AccessModes.no_lock, open_timeout=constants.VI_TMO_IMMEDIATE)
Opens a session to the specified resource. Corresponds to viOpen function of the VISA library. :param session: Resource Manager session (should always be a session returned from open_default_resource_manager()). :param resource_name: Unique symbolic name of a resource. :param access_mode: Specifies the mode by which the resource is to be accessed. (constants.AccessModes) :param open_timeout: Specifies the maximum time period (in milliseconds) that this operation waits before returning an error. :return: Unique logical identifier reference to a session, return value of the library call. :rtype: session, :class:`pyvisa.constants.StatusCode`
4.489754
4.371113
1.027142
try: del self.sessions[session] return constants.StatusCode.success except KeyError: return constants.StatusCode.error_invalid_object
def close(self, session)
Closes the specified session, event, or find list. Corresponds to viClose function of the VISA library. :param session: Unique logical identifier to a session, event, or find list. :return: return value of the library call. :rtype: :class:`pyvisa.constants.StatusCode`
6.693085
5.155105
1.298341
# For each session type, ask for the list of connected resources and merge them into a single list. resources = self.devices.list_resources() resources = rname.filter(resources, query) if resources: return resources raise errors.VisaIOError(errors.StatusCode.error_resource_not_found.value)
def list_resources(self, session, query='?*::INSTR')
Returns a tuple of all connected devices matching query. :param query: regular expression used to match devices.
9.370044
9.623475
0.973665
try: sess = self.sessions[session] except KeyError: return b'', constants.StatusCode.error_invalid_object try: chunk, status = sess.read(count) if status == constants.StatusCode.error_timeout: raise errors.VisaIOError(constants.VI_ERROR_TMO) return chunk, status except AttributeError: return b'', constants.StatusCode.error_nonsupported_operation
def read(self, session, count)
Reads data from device or interface synchronously. Corresponds to viRead function of the VISA library. :param session: Unique logical identifier to a session. :param count: Number of bytes to be read. :return: data read, return value of the library call. :rtype: bytes, :class:`pyvisa.constants.StatusCode`
4.823313
3.288494
1.466724
try: sess = self.sessions[session] except KeyError: return constants.StatusCode.error_invalid_object try: return sess.write(data) except AttributeError: return constants.StatusCode.error_nonsupported_operation
def write(self, session, data)
Writes data to device or interface synchronously. Corresponds to viWrite function of the VISA library. :param session: Unique logical identifier to a session. :param data: data to be written. :type data: str :return: Number of bytes actually transferred, return value of the library call. :rtype: int, :class:`pyvisa.constants.StatusCode`
4.786569
3.668857
1.304649
try: return cls._session_classes[(interface_type, resource_class)] except KeyError: raise ValueError('No class registered for %s, %s' % (interface_type, resource_class))
def get_session_class(cls, interface_type, resource_class)
Return the session class for a given interface type and resource class. :type interface_type: constants.InterfaceType :type resource_class: str :return: Session
2.62733
3.181608
0.825787
def _internal(python_class): if (interface_type, resource_class) in cls._session_classes: logger.warning('%s is already registered in the ResourceManager. ' 'Overwriting with %s' % ((interface_type, resource_class), python_class)) python_class.session_type = (interface_type, resource_class) cls._session_classes[(interface_type, resource_class)] = python_class return python_class return _internal
def register(cls, interface_type, resource_class)
Register a session class for a given interface type and resource class. :type interface_type: constants.InterfaceType :type resource_class: str
3.208968
3.150506
1.018557
# Check that the attribute exists. try: attr = attributes.AttributesByID[attribute] except KeyError: return 0, constants.StatusCode.error_nonsupported_attribute # Check that the attribute is valid for this session type. if not attr.in_resource(self.session_type): return 0, constants.StatusCode.error_nonsupported_attribute # Check that the attribute is readable. if not attr.read: raise Exception('Do not now how to handle write only attributes.') # Return the current value of the default according the VISA spec return self.attrs.setdefault(attribute, attr.default), constants.StatusCode.success
def get_attribute(self, attribute)
Get an attribute from the session. :param attribute: :return: attribute value, status code :rtype: object, constants.StatusCode
5.762593
5.235809
1.100612
# Check that the attribute exists. try: attr = attributes.AttributesByID[attribute] except KeyError: return constants.StatusCode.error_nonsupported_attribute # Check that the attribute is valid for this session type. if not attr.in_resource(self.session_type): return constants.StatusCode.error_nonsupported_attribute # Check that the attribute is writable. if not attr.write: return constants.StatusCode.error_attribute_read_only try: self.attrs[attribute] = attribute_state except ValueError: return constants.StatusCode.error_nonsupported_attribute_state return constants.StatusCode.success
def set_attribute(self, attribute, attribute_state)
Get an attribute from the session. :param attribute: :return: attribute value, status code :rtype: object, constants.StatusCode
3.108644
2.909631
1.068398
if val is NoResponse: return val val = val.replace('\\r', '\r').replace('\\n', '\n') return val.encode()
def to_bytes(val)
Takes a text message and return a tuple
5.899492
5.874098
1.004323
specs = self.specs if 'type' in specs: value = specs['type'](string_value) else: value = string_value if 'min' in specs and value < specs['min']: raise ValueError if 'max' in specs and value > specs['max']: raise ValueError if 'valid' in specs and value not in specs['valid']: raise ValueError return value
def validate_value(self, string_value)
Validate that a value match the Property specs.
2.13781
1.953074
1.094588
if dialogues is None: dialogues = self._dialogues # Try to match in the queries if query in dialogues: response = dialogues[query] logger.debug('Found response in queries: %s' % repr(response)) return response
def _match_dialog(self, query, dialogues=None)
Tries to match in dialogues :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
4.305189
4.222422
1.019602
if getters is None: getters = self._getters if query in getters: name, response = getters[query] logger.debug('Found response in getter of %s' % name) response = response.format(self._properties[name].get_value()) return response.encode('utf-8')
def _match_getters(self, query, getters=None)
Tries to match in getters :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
5.239911
4.989408
1.050207
q = query.decode('utf-8') for name, parser, response, error_response in self._setters: try: value = parser(q) logger.debug('Found response in setter of %s' % name) except ValueError: continue try: self._properties[name].set_value(value) return response except ValueError: if isinstance(error_response, bytes): return error_response return self.error_response('command_error') return None
def _match_setters(self, query)
Tries to match in setters :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
4.99394
4.524316
1.1038
if isinstance(error_input, dict): error_response = error_input.get('response', {}) cerr = error_response.get('command_error', NoResponse) qerr = error_response.get('query_error', NoResponse) response_dict = {'command_error': cerr, 'query_error': qerr} register_list = error_input.get('status_register', []) for register_dict in register_list: query = register_dict['q'] register = StatusRegister(register_dict) self._status_registers[to_bytes(query)] = register for key in register.keys(): self._error_map[key] = register queue_list = error_input.get('error_queue', []) for queue_dict in queue_list: query = queue_dict['q'] err_queue = ErrorQueue(queue_dict) self._error_queues[to_bytes(query)] = err_queue else: response_dict = {'command_error': error_input, 'query_error': error_input} for key, value in response_dict.items(): self._error_response[key] = to_bytes(value)
def add_error_handler(self, error_input)
Add error handler to the device
2.77181
2.743768
1.01022
interface_type, resource_class = type_class.split(' ') interface_type = getattr(constants.InterfaceType, interface_type.lower()) self._eoms[(interface_type, resource_class)] = (to_bytes(query_termination), to_bytes(response_termination))
def add_eom(self, type_class, query_termination, response_termination)
Add default end of message for a given interface type and resource class. :param type_class: interface type and resource class as strings joined by space :param query_termination: end of message used in queries. :param response_termination: end of message used in responses.
4.773901
3.573305
1.33599
logger.debug('Writing into device input buffer: %r' % data) if not isinstance(data, bytes): raise TypeError('data must be an instance of bytes') if len(data) != 1: msg = 'data must have a length of 1, not %d' raise ValueError(msg % len(data)) self._input_buffer.extend(data) l = len(self._query_eom) if not self._input_buffer.endswith(self._query_eom): return try: message = bytes(self._input_buffer[:-l]) queries = (message.split(self.delimiter) if self.delimiter else [message]) for query in queries: response = self._match(query) eom = self._response_eom if response is None: response = self.error_response('command_error') if response is not NoResponse: self._output_buffer.extend(response) self._output_buffer.extend(eom) finally: self._input_buffer = bytearray()
def write(self, data)
Write data into the device input buffer. :param data: single element byte :type data: bytes
3.467506
3.293747
1.052754
if self._output_buffer: b, self._output_buffer = (self._output_buffer[0:1], self._output_buffer[1:]) return b return b''
def read(self)
Return a single byte from the output buffer
4.944784
3.672714
1.346357
response = self._match_dialog(query) if response is not None: return response response = self._match_getters(query) if response is not None: return response response = self._match_registers(query) if response is not None: return response response = self._match_errors_queues(query) if response is not None: return response response = self._match_setters(query) if response is not None: return response if response is None: for channel in self._channels.values(): response = channel.match(query) if response: return response return None
def _match(self, query)
Tries to match in dialogues, getters and setters and subcomponents :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
2.539156
2.202471
1.152867
if query in self._status_registers: register = self._status_registers[query] response = register.value logger.debug('Found response in status register: %s', repr(response)) register.clear() return response
def _match_registers(self, query)
Tries to match in status registers :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
6.71451
4.900095
1.370281
if query in self._error_queues: queue = self._error_queues[query] response = queue.value logger.debug('Found response in error queue: %s', repr(response)) return response
def _match_errors_queues(self, query)
Tries to match in error queues :param query: message tuple :type query: Tuple[bytes] :return: response if found or None :rtype: Tuple[bytes] | None
5.196151
4.665156
1.113821
if device.resource_name is not None: msg = 'The device %r is already assigned to %s' raise ValueError(msg % (device, device.resource_name)) device.resource_name = resource_name self._internal[device.resource_name] = device
def add_device(self, resource_name, device)
Bind device to resource name
3.745707
3.333735
1.123577
result = super(SequenceCursorPagination, self).get_ordering(*args, **kwargs) # Because paginate_queryset sets self.ordering after reading it...we # need to only modify it sometimes. (This allows re-use of the # paginator, which probably only happens in tests.) if result[0] != '#': result = ('#', ) + result return result
def get_ordering(self, *args, **kwargs)
Take whatever the expected ordering is and then first order by QuerySet.
12.158984
11.243523
1.081421
# Get the QuerySet number of the current instance. qs_order = getattr(instance, '#') # Strip the '#' and call the standard _get_position_from_instance. result = super(SequenceCursorPagination, self)._get_position_from_instance(instance, ordering[1:]) # Return a tuple of these two elements. return (qs_order, result)
def _get_position_from_instance(self, instance, ordering)
The position will be a tuple of values: The QuerySet number inside of the QuerySetSequence. Whatever the normal value taken from the ordering property gives.
7.343938
5.828568
1.25999
# Determine if we have a cursor, and if so then decode it. encoded = request.query_params.get(self.cursor_query_param) if encoded is None: return None try: querystring = b64decode(encoded.encode('ascii')).decode('ascii') tokens = urlparse.parse_qs(querystring, keep_blank_values=True) offset = tokens.get('o', ['0'])[0] offset = _positive_int(offset, cutoff=self.offset_cutoff) reverse = tokens.get('r', ['0'])[0] reverse = bool(int(reverse)) # The difference. Don't get just the 0th entry: get all entries. position = tokens.get('p', None) except (TypeError, ValueError): raise NotFound(self.invalid_cursor_message) return Cursor(offset=offset, reverse=reverse, position=position)
def decode_cursor(self, request)
Given a request with a cursor, return a `Cursor` instance. Differs from the standard CursorPagination to handle a tuple in the position field.
3.719568
3.621684
1.027027
assert len(it1) == len(it2),\ "Can not element-wise multiply iterables of different length." return list(map(mul, it1, it2))
def multiply_iterables(it1, it2)
Element-wise iterables multiplications.
3.9805
3.464877
1.148814
if isinstance(value1, Model) and isinstance(value2, Model): field_names = value1._meta.ordering # Assert that the ordering is the same between different models. if field_names != value2._meta.ordering: valid_field_names = (set(cls._get_field_names(value1)) & set(cls._get_field_names(value2))) raise FieldError( "Ordering differs between models. Choices are: %s" % ', '.join(valid_field_names)) # By default, order by the pk. if not field_names: field_names = ['pk'] # TODO Figure out if we don't need to generate this comparator every # time. return cls._generate_comparator(field_names)(value1, value2) return cmp(value1, value2)
def _cmp(cls, value1, value2)
Comparison method that takes into account Django's special rules when ordering by a field that is a model: 1. Try following the default ordering on the related model. 2. Order by the model's primary key, if there is no Meta.ordering.
3.524255
3.527294
0.999139
# Ensure that field names is a list and not a tuple. field_names = list(field_names) # For fields that start with a '-', reverse the ordering of the # comparison. reverses = [1] * len(field_names) for i, field_name in enumerate(field_names): if field_name[0] == '-': reverses[i] = -1 field_names[i] = field_name[1:] field_names = [f.replace(LOOKUP_SEP, '.') for f in field_names] def comparator(i1, i2): # Get a tuple of values for comparison. v1 = attrgetter(*field_names)(i1) v2 = attrgetter(*field_names)(i2) # If there's only one arg supplied, attrgetter returns a single # item, directly return the result in this case. if len(field_names) == 1: return cls._cmp(v1, v2) * reverses[0] # Compare each field for the two items, reversing if necessary. order = multiply_iterables(list(map(cls._cmp, v1, v2)), reverses) try: # The first non-zero element. return next(dropwhile(__not__, order)) except StopIteration: # Everything was equivalent. return 0 return comparator
def _generate_comparator(cls, field_names)
Construct a comparator function based on the field names. The comparator returns the first non-zero comparison value. Inputs: field_names (iterable of strings): The field names to sort on. Returns: A comparator function.
3.554152
3.686978
0.963974
# A list of tuples, each with: # * The iterable # * The QuerySet number # * The next value # # (Remember that each QuerySet is already sorted.) iterables = [] for i, qs in zip(self._queryset_idxs, self._querysets): it = iter(qs) try: value = next(it) except StopIteration: # If this is already empty, just skip it. continue # Set the QuerySet number so that the comparison works properly. setattr(value, '#', i) iterables.append((it, i, value)) # The offset of items returned. index = 0 # Create a comparison function based on the requested ordering. _comparator = self._generate_comparator(self._order_by) def comparator(tuple_1, tuple_2): # The last element in each tuple is the actual item to compare. return _comparator(tuple_1[2], tuple_2[2]) comparator = functools.cmp_to_key(comparator) # If in reverse mode, get the last value instead of the first value from # ordered_values below. if self._standard_ordering: next_value_ind = 0 else: next_value_ind = -1 # Continue until all iterables are empty. while iterables: # If there's only one iterator left, don't bother sorting. if len(iterables) > 1: # Sort the current values for each iterable. iterables = sorted(iterables, key=comparator) # The next ordering item is in the first position, unless we're # in reverse mode. it, i, value = iterables[next_value_ind] else: it, i, value = iterables[0] # Return the next value if we're within the slice of interest. if self._low_mark <= index: yield value index += 1 # We've left the slice of interest, we're done. if index == self._high_mark: return # Iterate the iterable that just lost a value. try: value = next(it) # Set the QuerySet number so that the comparison works properly. setattr(value, '#', i) iterables[next_value_ind] = it, i, value except StopIteration: # This iterator is done, remove it. del iterables[next_value_ind]
def _ordered_iterator(self)
Interleave the values of each QuerySet in order to handle the requested ordering. Also adds the '#' property to each returned item.
3.88498
3.764018
1.032136
for i, qs in zip(self._queryset_idxs, self._querysets): for item in qs: setattr(item, '#', i) yield item
def _unordered_iterator(self)
Return the value of each QuerySet, but also add the '#' property to each return item.
10.121241
5.013074
2.018969
# Ensure negate is a boolean. negate = bool(negate) for kwarg, value in kwargs.items(): parts = kwarg.split(LOOKUP_SEP) # Ensure this is being used to filter QuerySets. if parts[0] != '#': raise ValueError("Keyword '%s' is not a valid keyword to filter over, " "it must begin with '#'." % kwarg) # Don't allow __ multiple times. if len(parts) > 2: raise ValueError("Keyword '%s' must not contain multiple " "lookup seperators." % kwarg) # The actual lookup is the second part. try: lookup = parts[1] except IndexError: lookup = 'exact' # Math operators that all have the same logic. LOOKUP_TO_OPERATOR = { 'exact': eq, 'iexact': eq, 'gt': gt, 'gte': ge, 'lt': lt, 'lte': le, } try: operator = LOOKUP_TO_OPERATOR[lookup] # These expect integers, this matches the logic in # IntegerField.get_prep_value(). (Essentially treat the '#' # field as an IntegerField.) if value is not None: value = int(value) self._queryset_idxs = filter(lambda i: operator(i, value) != negate, self._queryset_idxs) continue except KeyError: # It wasn't one of the above operators, keep trying. pass # Some of these seem to get handled as bytes. if lookup in ('contains', 'icontains'): value = six.text_type(value) self._queryset_idxs = filter(lambda i: (value in six.text_type(i)) != negate, self._queryset_idxs) elif lookup == 'in': self._queryset_idxs = filter(lambda i: (i in value) != negate, self._queryset_idxs) elif lookup in ('startswith', 'istartswith'): value = six.text_type(value) self._queryset_idxs = filter(lambda i: six.text_type(i).startswith(value) != negate, self._queryset_idxs) elif lookup in ('endswith', 'iendswith'): value = six.text_type(value) self._queryset_idxs = filter(lambda i: six.text_type(i).endswith(value) != negate, self._queryset_idxs) elif lookup == 'range': # Inclusive include. start, end = value self._queryset_idxs = filter(lambda i: (start <= i <= end) != negate, self._queryset_idxs) else: # Any other field lookup is not supported, e.g. date, year, month, # day, week_day, hour, minute, second, isnull, search, regex, and # iregex. raise ValueError("Unsupported lookup '%s'" % lookup) # Convert back to a list on Python 3. self._queryset_idxs = list(self._queryset_idxs) # Finally, keep only the QuerySets we care about! self._querysets = [self._querysets[i] for i in self._queryset_idxs]
def _filter_or_exclude_querysets(self, negate, **kwargs)
Similar to QuerySet._filter_or_exclude, but run over the QuerySets in the QuerySetSequence instead of over each QuerySet's fields.
2.898537
2.877826
1.007197
self.reader.decoder.register(key_or_tag, f_val)
def register(self, key_or_tag, f_val)
Register a custom transit tag and decoder/parser function for use during reads.
6.955163
5.183164
1.341876
for o in self.reader.loadeach(stream): yield o
def readeach(self, stream, **kwargs)
Temporary hook for API while streaming reads are in experimental phase. Read each object from stream as available with generator. JSON blocks indefinitely waiting on JSON entities to arrive. MsgPack requires unpacker property to be fed stream using unpacker.feed() method.
18.121798
11.820041
1.533142
if is_cache_key(name) and (name in self.key_to_value): return self.key_to_value[name] return self.encache(name) if is_cacheable(name, as_map_key) else name
def decode(self, name, as_map_key=False)
Always returns the name
4.63131
4.481937
1.033328
if name in self.key_to_value: return self.key_to_value[name] return self.encache(name) if is_cacheable(name, as_map_key) else name
def encode(self, name, as_map_key=False)
Returns the name the first time and the key after that
4.280663
4.034328
1.06106
chunk = stream.read(1) while chunk in SKIP: chunk = stream.read(1) if chunk == "\"": chunk += stream.read(1) while not chunk.endswith("\""): if chunk[-1] == ESCAPE: chunk += stream.read(2) else: chunk += stream.read(1) return chunk
def read_chunk(stream)
Ignore whitespace outside of strings. If we hit a string, read it in its entirety.
3.009464
2.85209
1.055178
for s in yield_json(stream): yield json.loads(s, **kwargs)
def items(stream, **kwargs)
External facing items. Will return item from stream as available. Currently waits in loop waiting for next item. Can pass keywords that json.loads accepts (such as object_pairs_hook)
8.745399
8.741602
1.000434
buff = u"" arr_count = 0 obj_count = 0 while True: buff += read_chunk(stream) # If we finish parsing all objs or arrays, yield a finished JSON # entity. if buff.endswith('{'): obj_count += 1 if buff.endswith('['): arr_count += 1 if buff.endswith(']'): arr_count -= 1 if obj_count == arr_count == 0: json_item = copy(buff) buff = u"" yield json_item if buff.endswith('}'): obj_count -= 1 if obj_count == arr_count == 0: json_item = copy(buff) buff = u"" yield json_item
def yield_json(stream)
Uses array and object delimiter counts for balancing.
3.110245
2.861623
1.086881
for x in m.keys(): if len(self.handlers[x].tag(x)) != 1: return False return True
def are_stringable_keys(self, m)
Test whether the keys within a map are stringable - a simple map, that can be optimized and whose keys can be cached
7.444247
8.01079
0.929278
handler = self.handlers[obj] tag = handler.tag(obj) f = marshal_dispatch.get(tag) if f: f(self, obj, handler.string_rep(obj) if as_map_key else handler.rep(obj), as_map_key, cache) else: self.emit_encoded(tag, handler, obj, as_map_key, cache)
def marshal(self, obj, as_map_key, cache)
Marshal an individual obj, potentially as part of another container object (like a list/dictionary/etc). Specify if this object is a key to a map/dict, and pass in the current cache being used. This method should only be called by a top-level marshalling call and should not be considered an entry-point for integration.
4.173525
4.242138
0.983826
if not cache: cache = RollingCache() handler = self.handlers[obj] tag = handler.tag(obj) if tag: if len(tag) == 1: self.marshal(TaggedValue(QUOTE, obj), False, cache) else: self.marshal(obj, False, cache) self.flush() else: raise AssertionError("Handler must provide a non-nil tag: " + str(handler))
def marshal_top(self, obj, cache=None)
Given a complete object that needs to be marshaled into Transit data, and optionally a cache, dispatch accordingly, and flush the data directly into the IO stream.
6.21953
5.82721
1.067325
if self.are_stringable_keys(rep): return self.emit_map(rep, as_map_key, cache) return self.emit_cmap(rep, as_map_key, cache)
def dispatch_map(self, rep, as_map_key, cache)
Used to determine and dipatch the writing of a map - a simple map with strings as keys, or a complex map, whose keys are also compound types.
4.821033
4.500688
1.071177
self.emit_array_start(None) self.marshal(MAP_AS_ARR, False, cache) for k, v in m.items(): self.marshal(k, True, cache) self.marshal(v, False, cache) self.emit_array_end()
def emit_map(self, m, _, cache)
Emits array as per default JSON spec.
4.221272
3.610813
1.169064
if not cache: cache = RollingCache() return self._decode(node, cache, as_map_key)
def decode(self, node, cache=None, as_map_key=False)
Given a node of data (any supported decodeable obj - string, dict, list), return the decoded object. Optionally set the current decode cache [None]. If None, a new RollingCache is instantiated and used. You may also hit to the decoder that this node is to be treated as a map key [False]. This is used internally.
4.891914
2.816541
1.736852
if node: if node[0] == MAP_AS_ARR: # key must be decoded before value for caching to work. returned_dict = {} for k, v in pairs(node[1:]): key = self._decode(k, cache, True) val = self._decode(v, cache, as_map_key) returned_dict[key] = val return transit_types.frozendict(returned_dict) decoded = self._decode(node[0], cache, as_map_key) if isinstance(decoded, Tag): return self.decode_tag(decoded.tag, self._decode(node[1], cache, as_map_key)) return tuple(self._decode(x, cache, as_map_key) for x in node)
def decode_list(self, node, cache, as_map_key)
Special case decodes map-as-array. Otherwise lists are treated as Python lists. Arguments follow the same convention as the top-level 'decode' function.
3.899259
3.676497
1.060591
if is_cache_key(string): return self.parse_string(cache.decode(string, as_map_key), cache, as_map_key) if is_cacheable(string, as_map_key): cache.encode(string, as_map_key) return self.parse_string(string, cache, as_map_key)
def decode_string(self, string, cache, as_map_key)
Decode a string - arguments follow the same convention as the top-level 'decode' function.
2.812388
2.903454
0.968635
if key_or_tag == "default_decoder": self.options["default_decoder"] = obj else: self.decoders[key_or_tag] = obj
def register(self, key_or_tag, obj)
Register a custom Transit tag and new parsing function with the decoder. Also, you can optionally set the 'default_decoder' with this function. Your new tag and parse/decode function will be added to the interal dictionary of decoders for this Decoder object.
4.017104
2.625591
1.529981
if isinstance(u, pyversion.string_types): return uuid.UUID(u) # hack to remove signs a = ctypes.c_ulong(u[0]) b = ctypes.c_ulong(u[1]) combined = a.value << 64 | b.value return uuid.UUID(int=combined)
def from_rep(u)
Given a string, return a UUID object.
5.395499
4.896626
1.101881
in_port = self.match.get_field(OxmOfbMatchField.OFPXMT_OFB_IN_PORT) return int.from_bytes(in_port, 'big')
def in_port(self)
Retrieve the 'in_port' that generated the PacketIn. This method will look for the OXM_TLV with type OFPXMT_OFB_IN_PORT on the `oxm_match_fields` field from `match` field and return its value, if the OXM exists. Returns: The integer number of the 'in_port' that generated the PacketIn if it exists. Otherwise return None.
3.311373
3.509081
0.943658
begin = offset for attribute_name, class_attribute in self.get_class_attributes(): if type(class_attribute).__name__ != "Header": attribute = deepcopy(class_attribute) if attribute_name == 'actions': length = self.actions_len.value attribute.unpack(buff[begin:begin+length]) else: attribute.unpack(buff, begin) setattr(self, attribute_name, attribute) begin += attribute.get_size()
def unpack(self, buff, offset=0)
Unpack a binary message into this object's attributes. Unpack the binary value *buff* and update this object attributes based on the results. It is an inplace method and it receives the binary data of the message **without the header**. This class' unpack method is like the :meth:`.GenericMessage.unpack` one, except for the ``actions`` attribute which has a length determined by the ``actions_len`` attribute. Args: buff (bytes): Binary data package to be unpacked, without the header. offset (int): Where to begin unpacking.
4.135
3.591106
1.151456
if isinstance(self.actions, ListOfActions): self.actions_len = self.actions.get_size() else: self.actions_len = ListOfActions(self.actions).get_size()
def _update_actions_len(self)
Update the actions_len field based on actions value.
3.215125
2.703334
1.189319
is_valid_range = self.in_port > 0 and self.in_port <= Port.OFPP_MAX is_valid_virtual_in_ports = self.in_port in _VIRT_IN_PORTS if (is_valid_range or is_valid_virtual_in_ports) is False: raise ValidationError(f'{self.in_port} is not a valid input port.')
def _validate_in_port(self)
Validate in_port attribute. A valid port is either: * Greater than 0 and less than or equals to Port.OFPP_MAX * One of the valid virtual ports: Port.OFPP_LOCAL, Port.OFPP_CONTROLLER or Port.OFPP_NONE Raises: ValidationError: If in_port is an invalid port.
3.413191
2.902176
1.17608
backup = self.body if not value: value = self.body if hasattr(value, 'pack'): self.body = value.pack() stats_request_packed = super().pack() self.body = backup return stats_request_packed
def pack(self, value=None)
Pack according to :attr:`body_type`. Make `body` a binary pack before packing this object. Then, restore body.
5.541819
5.137377
1.078725
super().unpack(buff) class_name = self._get_body_class() buff = self.body.value self.body = FixedTypeList(pyof_class=class_name) self.body.unpack(buff)
def unpack(self, buff, offset=0)
Unpack according to :attr:`body_type`.
8.697592
7.293689
1.192482
if self.value <= 1: return InstructionsProperty elif self.value <= 3: return NextTablesProperty elif self.value <= 7: return ActionsProperty return OxmProperty
def find_class(self)
Return a class related with this type.
13.050096
11.625028
1.122586
property_type = UBInt16(enum_ref=TableFeaturePropType) property_type.unpack(buff, offset) self.__class__ = TableFeaturePropType(property_type.value).find_class() length = UBInt16() length.unpack(buff, offset=offset+2) super().unpack(buff[:offset+length.value], offset=offset)
def unpack(self, buff=None, offset=0)
Unpack *buff* into this object. This method will convert a binary data into a readable value according to the attribute format. Args: buff (bytes): Binary buffer. offset (int): Where to begin unpacking. Raises: :exc:`~.exceptions.UnpackException`: If unpack fails.
5.249997
6.78482
0.773786
length = UBInt16() length.unpack(buff, offset) super().unpack(buff[:offset+length.value], offset)
def unpack(self, buff=None, offset=0)
Unpack *buff* into this object. This method will convert a binary data into a readable value according to the attribute format. Args: buff (bytes): Binary buffer. offset (int): Where to begin unpacking. Raises: :exc:`~.exceptions.UnpackException`: If unpack fails.
5.09879
8.35833
0.610025
message_type = str(message_type) if message_type not in MESSAGE_TYPES: raise ValueError('"{}" is not known.'.format(message_type)) message_class = MESSAGE_TYPES.get(message_type) message_instance = message_class() return message_instance
def new_message_from_message_type(message_type)
Given an OpenFlow Message Type, return an empty message of that type. Args: messageType (:class:`~pyof.v0x01.common.header.Type`): Python-openflow message. Returns: Empty OpenFlow message of the requested message type. Raises: KytosUndefinedMessageType: Unkown Message_Type.
2.723369
3.665001
0.743075
message_type = header.message_type if not isinstance(message_type, Type): try: if isinstance(message_type, str): message_type = Type[message_type] elif isinstance(message_type, int): message_type = Type(message_type) except ValueError: raise ValueError message = new_message_from_message_type(message_type) message.header.xid = header.xid message.header.length = header.length return message
def new_message_from_header(header)
Given an OF Header, return an empty message of header's message_type. Args: header (~pyof.v0x01.common.header.Header): Unpacked OpenFlow Header. Returns: Empty OpenFlow message of the same type of message_type attribute from the given header. The header attribute of the message will be populated. Raises: KytosUndefinedMessageType: Unkown Message_Type.
2.415435
2.42661
0.995395
hdr_size = Header().get_size() hdr_buff, msg_buff = buffer[:hdr_size], buffer[hdr_size:] header = Header() header.unpack(hdr_buff) message = new_message_from_header(header) message.unpack(msg_buff) return message
def unpack_message(buffer)
Unpack the whole buffer, including header pack. Args: buffer (bytes): Bytes representation of a openflow message. Returns: object: Instance of openflow message.
2.901048
3.761763
0.771194
band_type = UBInt16(enum_ref=MeterBandType) band_type.unpack(buff, offset) self.__class__ = MeterBandType(band_type.value).find_class() length = UBInt16() length.unpack(buff, offset=offset+2) super().unpack(buff[:offset+length.value], offset)
def unpack(self, buff=None, offset=0)
Unpack *buff* into this object. This method will convert a binary data into a readable value according to the attribute format. Args: buff (bytes): Binary buffer. offset (int): Where to begin unpacking. Raises: :exc:`~.exceptions.UnpackException`: If unpack fails.
5.377796
6.651429
0.808517
buff = self.body if not value: value = self.body if value: if isinstance(value, (list, FixedTypeList)): obj = self._get_body_instance() obj.extend(value) elif hasattr(value, 'pack'): obj = value self.body = obj.pack() multipart_packed = super().pack() self.body = buff return multipart_packed
def pack(self, value=None)
Pack a StatsReply using the object's attributes. This method will pack the attribute body and multipart_type before pack the StatsReply object, then will return this struct as a binary data. Returns: stats_reply_packed (bytes): Binary data with StatsReply packed.
5.802858
5.345149
1.085631
super().unpack(buff[offset:]) self._unpack_body()
def unpack(self, buff, offset=0)
Unpack a binary message into this object's attributes. Unpack the binary value *buff* and update this object attributes based on the results. It is an inplace method and it receives the binary data of the message **without the header**. This class' unpack method is like the :meth:`.GenericMessage.unpack` one, except for the ``body`` attribute which has its type determined by the ``multipart_type`` attribute. Args: buff (bytes): Binary data package to be unpacked, without the header.
13.941381
16.626034
0.838527
obj = self._get_body_instance() obj.unpack(self.body.value) self.body = obj
def _unpack_body(self)
Unpack `body` replace it by the result.
6.626363
5.473217
1.210689
exp_header = ExperimenterMultipartHeader simple_body = {MultipartType.OFPMP_DESC: Desc, MultipartType.OFPMP_GROUP_FEATURES: GroupFeatures, MultipartType.OFPMP_METER_FEATURES: MeterFeatures, MultipartType.OFPMP_EXPERIMENTER: exp_header} array_of_bodies = {MultipartType.OFPMP_FLOW: FlowStats, MultipartType.OFPMP_AGGREGATE: AggregateStatsReply, MultipartType.OFPMP_TABLE: TableStats, MultipartType.OFPMP_PORT_STATS: PortStats, MultipartType.OFPMP_QUEUE: QueueStats, MultipartType.OFPMP_GROUP: GroupStats, MultipartType.OFPMP_GROUP_DESC: GroupDescStats, MultipartType.OFPMP_METER: MeterStats, MultipartType.OFPMP_METER_CONFIG: MeterConfig, MultipartType.OFPMP_TABLE_FEATURES: TableFeatures, MultipartType.OFPMP_PORT_DESC: Port} if isinstance(self.multipart_type, (int, UBInt16)): self.multipart_type = self.multipart_type.enum_ref( self.multipart_type.value) pyof_class = simple_body.get(self.multipart_type, None) if pyof_class: return pyof_class() array_of_class = array_of_bodies.get(self.multipart_type, None) if array_of_class: return FixedTypeList(pyof_class=array_of_class) return BinaryData(b'')
def _get_body_instance(self)
Return the body instance.
2.42622
2.406788
1.008074
unpack_length = UBInt16() unpack_length.unpack(buff, offset) super().unpack(buff[:offset+unpack_length], offset)
def unpack(self, buff, offset=0)
Unpack a binary message into this object's attributes. Pass the correct length for list unpacking. Args: buff (bytes): Binary data package to be unpacked. offset (int): Where to begin unpacking.
5.987797
7.714731
0.776151
length = UBInt16() length.unpack(buff, offset) length.unpack(buff, offset=offset+MeterStats.meter_id.get_size()) super().unpack(buff[:offset+length.value], offset=offset)
def unpack(self, buff=None, offset=0)
Unpack *buff* into this object. This method will convert a binary data into a readable value according to the attribute format. Args: buff (bytes): Binary buffer. offset (int): Where to begin unpacking. Raises: :exc:`~.exceptions.UnpackException`: If unpack fails.
7.673166
10.671535
0.719031
classes = {1: InstructionGotoTable, 2: InstructionWriteMetadata, 3: InstructionWriteAction, 4: InstructionApplyAction, 5: InstructionClearAction, 6: InstructionMeter} return classes.get(self.value, None)
def find_class(self)
Return a class related with this type.
8.286574
8.202942
1.010195
instruction_type = UBInt16(enum_ref=InstructionType) instruction_type.unpack(buff, offset) self.__class__ = InstructionType(instruction_type.value).find_class() length = UBInt16() length.unpack(buff, offset=offset+2) super().unpack(buff[:offset+length.value], offset)
def unpack(self, buff=None, offset=0)
Unpack *buff* into this object. This method will convert a binary data into a readable value according to the attribute format. Args: buff (bytes): Binary buffer. offset (int): Where to begin unpacking. Raises: :exc:`~.exceptions.UnpackException`: If unpack fails.
4.868726
5.87558
0.828637
super().unpack(buff, offset) # Recover field from field_and_hasmask. try: self.oxm_field = self._unpack_oxm_field() except ValueError as exception: raise UnpackException(exception) # The last bit of field_and_mask is oxm_hasmask self.oxm_hasmask = (self.oxm_field_and_mask & 1) == 1 # as boolean # Unpack oxm_value that has oxm_length bytes start = offset + 4 # 4 bytes: class, field_and_mask and length end = start + self.oxm_length self.oxm_value = buff[start:end]
def unpack(self, buff, offset=0)
Unpack the buffer into a OxmTLV. Args: buff (bytes): The binary data to be unpacked. offset (int): If we need to shift the beginning of the data.
5.329489
4.950391
1.076579