code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
m = Map(features=self._features, width=self._width, height=self._height, **self._attrs) m._folium_map = self._folium_map return m
def copy(self)
Copies the current Map into a new one and returns it.
7.53409
5.977267
1.260457
bounds = self._autobounds() attrs = {} midpoint = lambda a, b: (a + b)/2 attrs['location'] = ( midpoint(bounds['min_lat'], bounds['max_lat']), midpoint(bounds['min_lon'], bounds['max_lon']) ) # self._folium_map.fit_bounds( # [bounds['min_long'], bounds['min_lat']], # [bounds['max_long'], bounds['max_lat']] # ) # remove the following with new Folium release # rough approximation, assuming max_zoom is 18 import math try: lat_diff = bounds['max_lat'] - bounds['min_lat'] lon_diff = bounds['max_lon'] - bounds['min_lon'] area, max_area = lat_diff*lon_diff, 180*360 if area: factor = 1 + max(0, 1 - self._width/1000)/2 + max(0, 1-area**0.5)/2 zoom = math.log(area/max_area)/-factor else: zoom = self._default_zoom zoom = max(1, min(18, round(zoom))) attrs['zoom_start'] = zoom except ValueError as e: raise Exception('Check that your locations are lat-lon pairs', e) return attrs
def _autozoom(self)
Calculate zoom and location.
4.076408
3.988733
1.021981
bounds = {} def check(prop, compare, extreme, val): opp = min if compare is max else max bounds.setdefault(prop, val) bounds[prop] = opp(compare(bounds[prop], val), extreme) def bound_check(lat_lon): lat, lon = lat_lon check('max_lat', max, 90, lat) check('min_lat', min, -90, lat) check('max_lon', max, 180, lon) check('min_lon', min, -180, lon) lat_lons = [lat_lon for feature in self._features.values() for lat_lon in feature.lat_lons] if not lat_lons: lat_lons.append(self._default_lat_lon) for lat_lon in lat_lons: bound_check(lat_lon) return bounds
def _autobounds(self)
Simple calculation for bounds.
2.856877
2.773575
1.030034
attrs = self._attrs.copy() attrs.update({'width': self._width, 'height': self._height}) attrs.update(kwargs) return Map(self._features, **attrs)
def format(self, **kwargs)
Apply formatting.
4.971794
4.421539
1.124449
return { "type": "FeatureCollection", "features": [f.geojson(i) for i, f in self._features.items()] }
def geojson(self)
Render features as a FeatureCollection.
3.329526
2.749312
1.211039
# Set values and ids to both be simple sequences by inspecting values id_name, value_name = 'IDs', 'values' if isinstance(values, collections.abc.Mapping): assert not ids, 'IDs and a map cannot both be used together' if hasattr(values, 'columns') and len(values.columns) == 2: table = values ids, values = table.columns id_name, value_name = table.labels else: dictionary = values ids, values = list(dictionary.keys()), list(dictionary.values()) if len(ids) != len(values): assert len(ids) == 0 # Use indices as IDs ids = list(range(len(values))) m = self._create_map() data = pandas.DataFrame({id_name: ids, value_name: values}) attrs = { 'geo_str': json.dumps(self.geojson()), 'data': data, 'columns': [id_name, value_name], 'key_on': key_on, 'fill_color': palette, } kwargs.update(attrs) m.geo_json(**kwargs) colored = self.format() colored._folium_map = m return colored
def color(self, values, ids=(), key_on='feature.id', palette='YlOrBr', **kwargs)
Color map features by binning values. values -- a sequence of values or a table of keys and values ids -- an ID for each value; if none are provided, indices are used key_on -- attribute of each feature to match to ids palette -- one of the following color brewer palettes: 'BuGn', 'BuPu', 'GnBu', 'OrRd', 'PuBu', 'PuBuGn', 'PuRd', 'RdPu', 'YlGn', 'YlGnBu', 'YlOrBr', and 'YlOrRd'. Defaults from Folium: threshold_scale: list, default None Data range for D3 threshold scale. Defaults to the following range of quantiles: [0, 0.5, 0.75, 0.85, 0.9], rounded to the nearest order-of-magnitude integer. Ex: 270 rounds to 200, 5600 to 6000. fill_opacity: float, default 0.6 Area fill opacity, range 0-1. line_color: string, default 'black' GeoJSON geopath line color. line_weight: int, default 1 GeoJSON geopath line weight. line_opacity: float, default 1 GeoJSON geopath line opacity, range 0-1. legend_name: string, default None Title for data legend. If not passed, defaults to columns[1].
4.638939
4.380137
1.059085
result = self.copy() if type(feature) == Table: # if table of features e.g. Table.from_records(taz_map.features) if 'feature' in feature: feature = feature['feature'] # if marker table e.g. table with columns: latitudes,longitudes,popup,color,radius else: feature = Circle.map_table(feature) if type(feature) in [list, np.ndarray]: for f in feature: f._attrs['fill_color'] = color f._attrs['fill_opacity'] = opacity f.draw_on(result._folium_map) elif type(feature) == Map: for i in range(len(feature._features)): f = feature._features[i] f._attrs['fill_color'] = color f._attrs['fill_opacity'] = opacity f.draw_on(result._folium_map) elif type(feature) == Region: feature._attrs['fill_color'] = color feature._attrs['fill_opacity'] = opacity feature.draw_on(result._folium_map) return result
def overlay(self, feature, color='Blue', opacity=0.6)
Overlays ``feature`` on the map. Returns a new Map. Args: ``feature``: a ``Table`` of map features, a list of map features, a Map, a Region, or a circle marker map table. The features will be overlayed on the Map with specified ``color``. ``color`` (``str``): Color of feature. Defaults to 'Blue' ``opacity`` (``float``): Opacity of overlain feature. Defaults to 0.6. Returns: A new ``Map`` with the overlain ``feature``.
3.416663
3.084132
1.10782
assert path_or_json_or_string data = None if isinstance(path_or_json_or_string, (dict, list)): data = path_or_json_or_string try: data = json.loads(path_or_json_or_string) except ValueError: pass try: path = path_or_json_or_string if path.endswith('.gz') or path.endswith('.gzip'): import gzip contents = gzip.open(path, 'r').read().decode('utf-8') else: contents = open(path, 'r').read() data = json.loads(contents) except FileNotFoundError: pass # TODO web address assert data, 'MapData accepts a valid geoJSON object, geoJSON string, or path to a geoJSON file' return cls(cls._read_geojson_features(data))
def read_geojson(cls, path_or_json_or_string)
Read a geoJSON string, object, or file. Return a dict of features keyed by ID.
2.6692
2.508376
1.064115
if features is None: features = collections.OrderedDict() for i, feature in enumerate(data['features']): key = feature.get('id', prefix + str(i)) feature_type = feature['geometry']['type'] if feature_type == 'FeatureCollection': _read_geojson_features(feature, features, prefix + '.' + key) elif feature_type == 'Point': value = Circle._convert_point(feature) elif feature_type in ['Polygon', 'MultiPolygon']: value = Region(feature) else: # TODO Support all http://geojson.org/geojson-spec.html#geometry-objects value = None features[key] = value return features
def _read_geojson_features(data, features=None, prefix="")
Return a dict of features keyed by ID.
2.896204
2.715335
1.066611
f = getattr(folium_map, self._map_method_name) f(**self._folium_kwargs)
def draw_on(self, folium_map)
Add feature to Folium map object.
5.850292
5.062441
1.155627
m = Map(features=[self], width=self._width, height=self._height) self._folium_map = m.draw()
def _set_folium_map(self)
A map containing only the feature.
6.931531
5.357558
1.293786
lat, lon = self.lat_lon return { 'type': 'Feature', 'id': feature_id, 'geometry': { 'type': 'Point', 'coordinates': (lon, lat), }, }
def geojson(self, feature_id)
GeoJSON representation of the marker as a point.
2.411968
2.17133
1.110825
attrs = self._attrs.copy() attrs.update(kwargs) lat, lon = self.lat_lon return type(self)(lat, lon, **attrs)
def format(self, **kwargs)
Apply formatting.
4.930475
4.59205
1.073698
lon, lat = feature['geometry']['coordinates'] popup = feature['properties'].get('name', '') return cls(lat, lon)
def _convert_point(cls, feature)
Convert a GeoJSON point to a Marker.
5.692873
4.14677
1.372845
assert len(latitudes) == len(longitudes) assert areas is None or hasattr(cls, '_has_radius'), "A " + cls.__name__ + " has no radius" inputs = [latitudes, longitudes] if labels is not None: assert len(labels) == len(latitudes) inputs.append(labels) else: inputs.append(("",) * len(latitudes)) if colors is not None: assert len(colors) == len(latitudes) inputs.append(colors) if areas is not None: assert len(areas) == len(latitudes) inputs.append(np.array(areas) ** 0.5 / math.pi) ms = [cls(*args, **kwargs) for args in zip(*inputs)] return Map(ms)
def map(cls, latitudes, longitudes, labels=None, colors=None, areas=None, **kwargs)
Return markers from columns of coordinates, labels, & colors. The areas column is not applicable to markers, but sets circle areas.
2.659317
2.623686
1.01358
if self.type == 'Polygon': polygons = [self._geojson['geometry']['coordinates']] elif self.type == 'MultiPolygon': polygons = self._geojson['geometry']['coordinates'] return [ [ [_lat_lons_from_geojson(s) for s in ring ] for ring in polygon] for polygon in polygons]
def polygons(self)
Return a list of polygons describing the region. - Each polygon is a list of linear rings, where the first describes the exterior and the rest describe interior holes. - Each linear ring is a list of positions where the last is a repeat of the first. - Each position is a (lat, lon) pair.
5.583274
5.133382
1.08764
if self._geojson.get('id', feature_id) == feature_id: return self._geojson else: geo = self._geojson.copy() geo['id'] = feature_id return geo
def geojson(self, feature_id)
Return GeoJSON with ID substituted.
3.099638
2.774537
1.117173
attrs = self._attrs.copy() attrs.update(kwargs) return Region(self._geojson, **attrs)
def format(self, **kwargs)
Apply formatting.
7.453293
6.627429
1.124613
return _combinable(lambda x: (y <= x < z) or _equal_or_float_equal(x, y))
def between(y, z)
Greater than or equal to y and less than z.
16.292664
15.244952
1.068725
return _combinable(lambda x: (y <= x <= z) or _equal_or_float_equal(x, y) or _equal_or_float_equal(x, z))
def between_or_equal_to(y, z)
Greater than or equal to y and less than or equal to z.
6.623032
6.232934
1.062586
if len(column) == 0: val_width = 0 else: val_width = max(len(self.format_value(v)) for v in column) val_width = min(val_width, self.max_width) width = max(val_width, len(str(label)), self.min_width, len(self.etc)) def pad(value, label=False): if label: raw = value else: raw = self.format_value(value) if len(raw) > width: prefix = raw[:width-len(self.etc)] + self.etc else: prefix = raw return prefix.ljust(width) return pad
def format_column(self, label, column)
Return a formatting function that pads & truncates values.
2.883749
2.64179
1.091589
if isinstance(value, (bool, np.bool_)): return str(value) elif isinstance(value, (int, np.integer)): return '{:n}'.format(value) elif isinstance(value, (float, np.floating)): return '{:g}'.format(value) else: return str(value)
def format_value(value)
Pretty-print an arbitrary value.
2.03102
1.922031
1.056705
return self.convert_value is not Formatter.convert_value or \ self.convert_column is not Formatter.convert_column
def converts_values(self)
Whether this Formatter also converts values.
9.866375
5.77763
1.707686
if isinstance(value, str): value = value.replace(self.separator, '') if self.decimal_point not in value: return int(value) else: return float(value.replace(self.decimal_point, '.')) elif self.int_to_float: return float(value) else: return value
def convert_value(self, value)
Convert string 93,000.00 to float 93000.0.
2.7673
2.407076
1.149652
if isinstance(value, str): assert value.startswith(self.symbol), "Currency does not start with " + self.symbol value = value.lstrip(self.symbol) return super().convert_value(value)
def convert_value(self, value)
Convert value to float. If value is a string, ensure that the first character is the same as symbol ie. the value is in the currency this formatter is representing.
4.097363
3.013367
1.359729
assert all(values >= 0), 'Cannot normalize a column with negatives' total = sum(values) if total > 0: return values / total else: return values
def convert_column(self, values)
Normalize values.
4.661914
3.842098
1.213377
if value > MAX_VALUE: raise ValueError('value {!r} exceeds MAC address range'.format(value)) if value < 0: raise ValueError('value must not be negative') # todo: convert to the right byte order. the resulting # mac address is reversed on my machine compared to the # mac address displayed by the hello-myo SDK sample. # See issue #7 string = ('%x' % value).rjust(12, '0') assert len(string) == 12 result = ':'.join(''.join(pair) for pair in zip(*[iter(string)]*2)) return result.upper()
def encode(value)
Encodes the number *value* to a MAC address ASCII string in binary form. Raises a #ValueError if *value* is a negative number or exceeds the MAC address range.
7.284379
6.505877
1.119661
bstr = bstr.replace(b':', b'') if len(bstr) != 12: raise ValueError('not a valid MAC address: {!r}'.format(bstr)) try: return int(bstr, 16) except ValueError: raise ValueError('not a valid MAC address: {!r}'.format(bstr))
def decode(bstr)
Decodes an ASCII encoded binary MAC address tring into a number.
2.655046
2.067373
1.284261
if sum(bool(x) for x in [lib_name, bin_path, sdk_path]) > 1: raise ValueError('expected zero or one arguments') if sdk_path: if sys.platform.startswith('win32'): bin_path = os.path.join(sdk_path, 'bin') elif sys.platform.startswith('darwin'): bin_path = os.path.join(sdk_path, 'myo.framework') else: raise RuntimeError('unsupported platform: {!r}'.format(sys.platform)) if bin_path: lib_name = os.path.join(bin_path, _getdlname()) if not lib_name: lib_name = _getdlname() global libmyo libmyo = ffi.dlopen(lib_name)
def init(lib_name=None, bin_path=None, sdk_path=None)
Initialize the Myo SDK by loading the libmyo shared library. With no arguments, libmyo must be on your `PATH` or `LD_LIBRARY_PATH`. You can specify the exact path to libmyo with *lib_name*. Alternatively, you can specify the binaries directory that contains libmyo with *bin_path*. Finally, you can also pass the path to the Myo SDK root directory and it will figure out the path to libmyo by itself.
2.67926
2.442482
1.096941
if not callable(handler): if hasattr(handler, 'on_event'): handler = handler.on_event else: raise TypeError('expected callable or DeviceListener') with self._lock: if self._running: raise RuntimeError('a handler is already running in the Hub') self._running = True self._stop_requested = False self._stopped = False exc_box = [] def callback_on_error(*exc_info): exc_box.append(exc_info) with self._lock: self._stopped = True return HandlerResult.stop def callback(_, event): with self._lock: if self._stop_requested: self._stopped = True return HandlerResult.stop result = handler(Event(event)) if result is None or result is True: result = HandlerResult.continue_ elif result is False: result = HandlerResult.stop else: result = HandlerResult(result) if result == HandlerResult.stop: with self._lock: self._stopped = True return result cdecl = 'libmyo_handler_result_t(void*, libmyo_event_t)' callback = ffi.callback(cdecl, callback, onerror=callback_on_error) try: error = ErrorDetails() libmyo.libmyo_run(self._handle[0], duration_ms, callback, ffi.NULL, error.handle) error.raise_for_kind() if exc_box: six.reraise(*exc_box[0]) finally: with self._lock: self._running = False result = not self._stopped return result
def run(self, handler, duration_ms)
Runs the *handler* function for *duration_ms* milliseconds. The function must accept exactly one argument which is an #Event object. The handler must return either a #HandlerResult value, #False, #True or #None, whereas #False represents #HandlerResult.stop and #True and #None represent #HandlerResult.continue_. If the run did not complete due to the handler returning #HandlerResult.stop or #False or the procedure was cancelled via #Hub.stop(), this function returns #False. If the full *duration_ms* completed, #True is returned. This function blocks the caller until either *duration_ms* passed, the handler returned #HandlerResult.stop or #False or #Hub.stop() was called.
3.487113
3.25565
1.071096
timer = TimeoutManager(timeout) with self._cond: # As long as there are no Myo's connected, wait until we # get notified about a change. while not timer.check(): # Check if we found a Myo that is connected. for device in self._devices.values(): if device.connected: return device self._cond.wait(timer.remainder(interval)) return None
def wait_for_single_device(self, timeout=None, interval=0.5)
Waits until a Myo is was paired **and** connected with the Hub and returns it. If the *timeout* is exceeded, returns None. This function will not return a Myo that is only paired but not connected. # Parameters timeout: The maximum time to wait for a device. interval: The interval at which the function should exit sleeping. We can not sleep endlessly, otherwise the main thread can not be exit, eg. through a KeyboardInterrupt.
6.434569
6.318596
1.018354
if self.value is None: return True return (time.clock() - self.start) >= self.value
def check(self)
Returns #True if the time interval has passed.
8.866979
4.894676
1.811556
if value is None: value = time.clock() self.start = value if self.value_on_reset: self.value = self.value_on_reset
def reset(self, value=None)
Resets the start time of the interval to now or the specified value.
4.668906
4.144704
1.126475
if self.check(): self.reset(value) return True return False
def check_and_reset(self, value=None)
Combination of #check() and #reset().
10.064936
5.3499
1.881331
if self.value is None: return False return (time.clock() - self.start) >= self.value
def check(self)
Returns #True if the timeout is exceeded.
8.574418
4.686256
1.829695
if self.value is None: return max_value remainder = self.value - (time.clock() - self.start) if remainder < 0.0: return 0.0 elif max_value is not None and remainder > max_value: return max_value else: return remainder
def remainder(self, max_value=None)
Returns the time remaining for the timeout, or *max_value* if that remainder is larger.
2.998147
2.383884
1.257673
norm = self.magnitude() return Vector(self.x / norm, self.y / norm, self.z / norm)
def normalized(self)
Returns a normalized copy of this vector.
6.37509
3.481558
1.831102
return self.x * rhs.x + self.y * rhs.y + self.z * rhs.z
def dot(self, rhs)
Return the dot product of this vector and *rhs*.
3.841932
2.233499
1.72014
return Vector( self.y * rhs.z - self.z * rhs.y, self.z * rhs.x - self.x * rhs.z, self.x * rhs.y - self.y * rhs.x)
def cross(self, rhs)
Return the cross product of this vector and *rhs*.
2.304274
1.954728
1.178821
return math.acos(self.dot(rhs) / (self.magnitude() * rhs.magnitude()))
def angle_to(self, rhs)
Return the angle between this vector and *rhs* in radians.
4.56813
2.661349
1.716472
return math.sqrt(self.x ** 2 + self.y ** 2 + self.z ** 2 + self.w ** 2)
def magnitude(self)
Returns the magnitude of the quaternion.
3.669509
1.965991
1.866493
magnitude = self.magnitude() return Quaternion( self.x / magnitude, self.y / magnitude, self.z / magnitude, self.w / magnitude)
def normalized(self)
Returns the unit quaternion corresponding to the same rotation as this one.
6.310114
3.587089
1.759118
qvec = self * Quaternion(vec.x, vec.y, vec.z, 0) * ~self return type(vec)(qvec.x, qvec.y, qvec.z)
def rotate(self, vec)
Returns *vec* rotated by this #Quaternion. :param vec: A vector object. :return: object of type of *vec*
5.578356
4.607411
1.210735
x, y, z, w = self.x, self.y, self.z, self.w return math.atan2(2*y*w - 2*x*z, 1 - 2*y*y - 2*z*z)
def roll(self)
Calculates the Roll of the Quaternion.
2.58737
1.819817
1.421775
x, y, z, w = self.x, self.y, self.z, self.w return math.atan2(2*x*w - 2*y*z, 1 - 2*x*x - 2*z*z)
def pitch(self)
Calculates the Pitch of the Quaternion.
2.691728
1.807141
1.489495
x, y, z, w = self.x, self.y, self.z, self.w return math.asin(2*x*y + 2*z*w)
def yaw(self)
Calculates the Yaw of the Quaternion.
4.305593
2.377416
1.811039
x, y, z, w = self.x, self.y, self.z, self.w roll = math.atan2(2*y*w - 2*x*z, 1 - 2*y*y - 2*z*z) pitch = math.atan2(2*x*w - 2*y*z, 1 - 2*x*x - 2*z*z) yaw = math.asin(2*x*y + 2*z*w) return (roll, pitch, yaw)
def rpy(self)
Calculates the Roll, Pitch and Yaw of the Quaternion.
1.630826
1.423018
1.146033
source = Vector(source.x, source.y, source.z) dest = Vector(dest.x, dest.y, dest.z) cross = source.cross(dest) cos_theta = source.dot(dest) # Return identity if the vectors are the same direction. if cos_theta >= 1.0: return Quaternion.identity() # Product of the square of the magnitudes. k = math.sqrt(source.dot(source), dest.dot(dest)) # Return identity in the degenerate case. if k <= 0.0: return Quaternion.identity() # Special handling for vectors facing opposite directions. if cos_theta / k <= -1: x_axis = Vector(1, 0, 0) y_axis = Vector(0, 1, 1) if abs(source.dot(x_ais)) < 1.0: cross = source.cross(x_axis) else: cross = source.cross(y_axis) return Quaternion(cross.x, cross.y, cross.z, k + cos_theta)
def rotation_of(source, dest)
Returns a #Quaternion that represents a rotation from vector *source* to *dest*.
3.303803
3.276489
1.008336
sincomp = math.sin(angle / 2.0) return Quaternion( axis.x * sincomp, axis.y * sincomp, axis.z * sincomp, math.cos(angle / 2.0))
def from_axis_angle(axis, angle)
Returns a #Quaternion that represents the right-handed rotation of *angle* radians about the givne *axis*. :param axis: The unit vector representing the axis of rotation. :param angle: The angle of rotation, in radians.
3.204147
3.156461
1.015107
field_list = [] append = field_list.append for field in field_name_list: type_prefix = '' for char in field: if not char.islower(): break type_prefix += char append((field, _desc_type_dict[type_prefix])) result = type('some_descriptor', (LittleEndianStructure, ), {}) # Not using type()'s 3rd param to initialise class, as per ctypes # documentation: # _pack_ must already be defined when _fields_ is assigned, otherwise it # will have no effect. # pylint: disable=protected-access result._pack_ = 1 result._fields_ = field_list # pylint: enable=protected-access return result
def newStruct(field_name_list)
Create a ctype structure class based on USB standard field naming (type-prefixed).
5.685439
5.488249
1.035929
transfer = transfer_p.contents offset = 0 result = [] append = result.append for iso_transfer in _get_iso_packet_list(transfer): length = iso_transfer.length append(_get_iso_packet_buffer(transfer, offset, length)) offset += length return result
def get_iso_packet_buffer_list(transfer_p)
Python-specific helper extracting a list of iso packet buffers.
3.469218
3.341553
1.038205
result = [] extra_length = descriptor.extra_length if extra_length: extra = buffer_at(descriptor.extra.value, extra_length) append = result.append while extra: length = _string_item_to_int(extra[0]) if not 0 < length <= len(extra): raise ValueError( 'Extra descriptor %i is incomplete/invalid' % ( len(result), ), ) append(extra[:length]) extra = extra[length:] return result
def get_extra(descriptor)
Python-specific helper to access "extra" field of descriptors, because it's not as straight-forward as in C. Returns a list, where each entry is an individual extra descriptor.
4.496371
4.236236
1.061407
# As per ctypes.create_string_buffer, as of python 2.7.10 at least: # - int or long is a length # - str or unicode is an initialiser # Testing the latter confuses 2to3, so test the former. if isinstance(init_or_size, (int, long)): init_or_size = bytearray(init_or_size) return create_initialised_buffer(init_or_size)
def create_binary_buffer(init_or_size)
ctypes.create_string_buffer variant which does not add a trailing null when init_or_size is not a size.
6.959795
6.54132
1.063974
version = libusb1.libusb_get_version().contents return Version( version.major, version.minor, version.micro, version.nano, version.rc, version.describe, )
def getVersion()
Returns underlying libusb's version information as a 6-namedtuple (or 6-tuple if namedtuples are not avaiable): - major - minor - micro - nano - rc - describe Returns (0, 0, 0, 0, '', '') if libusb doesn't have required entry point.
4.75076
2.734876
1.737102
if self.__submitted: raise ValueError('Cannot close a submitted transfer') self.doom() self.__initialized = False # Break possible external reference cycles self.__callback = None self.__user_data = None # Break libusb_transfer reference cycles self.__ctypesCallbackWrapper = None # For some reason, overwriting callback is not enough to remove this # reference cycle - though sometimes it works: # self -> self.__dict__ -> libusb_transfer -> dict[x] -> dict[x] -> # CThunkObject -> __callbackWrapper -> self # So free transfer altogether. if self.__transfer is not None: self.__libusb_free_transfer(self.__transfer) self.__transfer = None self.__transfer_buffer = None # Break USBDeviceHandle reference cycle self.__before_submit = None self.__after_completion = None
def close(self)
Break reference cycles to allow instance to be garbage-collected. Raises if called on a submitted transfer.
11.220655
9.815034
1.143211
self.__submitted = False self.__after_completion(self) callback = self.__callback if callback is not None: callback(self) if self.__doomed: self.close()
def __callbackWrapper(self, transfer_p)
Makes it possible for user-provided callback to alter transfer when fired (ie, mark transfer as not submitted upon call).
7.043765
6.305279
1.117122
if self.__submitted: raise ValueError('Cannot alter a submitted transfer') if self.__doomed: raise DoomedTransferError('Cannot reuse a doomed transfer') if isinstance(buffer_or_len, (int, long)): length = buffer_or_len # pylint: disable=undefined-variable string_buffer, transfer_py_buffer = create_binary_buffer( length + CONTROL_SETUP_SIZE, ) # pylint: enable=undefined-variable else: length = len(buffer_or_len) string_buffer, transfer_py_buffer = create_binary_buffer( CONTROL_SETUP + buffer_or_len, ) self.__initialized = False self.__transfer_buffer = string_buffer # pylint: disable=undefined-variable self.__transfer_py_buffer = integer_memoryview( transfer_py_buffer, )[CONTROL_SETUP_SIZE:] # pylint: enable=undefined-variable self.__user_data = user_data libusb1.libusb_fill_control_setup( string_buffer, request_type, request, value, index, length) libusb1.libusb_fill_control_transfer( self.__transfer, self.__handle, string_buffer, self.__ctypesCallbackWrapper, None, timeout) self.__callback = callback self.__initialized = True
def setControl( self, request_type, request, value, index, buffer_or_len, callback=None, user_data=None, timeout=0)
Setup transfer for control use. request_type, request, value, index See USBDeviceHandle.controlWrite. request_type defines transfer direction (see ENDPOINT_OUT and ENDPOINT_IN)). buffer_or_len Either a string (when sending data), or expected data length (when receiving data). callback Callback function to be invoked on transfer completion. Called with transfer as parameter, return value ignored. user_data User data to pass to callback function. timeout Transfer timeout in milliseconds. 0 to disable.
3.519901
3.519555
1.000098
if self.__submitted: raise ValueError('Cannot alter a submitted transfer') if self.__doomed: raise DoomedTransferError('Cannot reuse a doomed transfer') string_buffer, self.__transfer_py_buffer = create_binary_buffer( buffer_or_len ) self.__initialized = False self.__transfer_buffer = string_buffer self.__user_data = user_data libusb1.libusb_fill_interrupt_transfer( self.__transfer, self.__handle, endpoint, string_buffer, sizeof(string_buffer), self.__ctypesCallbackWrapper, None, timeout) self.__callback = callback self.__initialized = True
def setInterrupt( self, endpoint, buffer_or_len, callback=None, user_data=None, timeout=0)
Setup transfer for interrupt use. endpoint Endpoint to submit transfer to. Defines transfer direction (see ENDPOINT_OUT and ENDPOINT_IN)). buffer_or_len Either a string (when sending data), or expected data length (when receiving data) To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray). callback Callback function to be invoked on transfer completion. Called with transfer as parameter, return value ignored. user_data User data to pass to callback function. timeout Transfer timeout in milliseconds. 0 to disable.
5.817943
5.972559
0.974112
if self.__submitted: raise ValueError('Cannot alter a submitted transfer') num_iso_packets = self.__num_iso_packets if num_iso_packets == 0: raise TypeError( 'This transfer canot be used for isochronous I/O. ' 'You must get another one with a non-zero iso_packets ' 'parameter.' ) if self.__doomed: raise DoomedTransferError('Cannot reuse a doomed transfer') string_buffer, transfer_py_buffer = create_binary_buffer(buffer_or_len) buffer_length = sizeof(string_buffer) if iso_transfer_length_list is None: iso_length, remainder = divmod(buffer_length, num_iso_packets) if remainder: raise ValueError( 'Buffer size %i cannot be evenly distributed among %i ' 'transfers' % ( buffer_length, num_iso_packets, ) ) iso_transfer_length_list = [iso_length] * num_iso_packets configured_iso_packets = len(iso_transfer_length_list) if configured_iso_packets > num_iso_packets: raise ValueError( 'Too many ISO transfer lengths (%i), there are ' 'only %i ISO transfers available' % ( configured_iso_packets, num_iso_packets, ) ) if sum(iso_transfer_length_list) > buffer_length: raise ValueError( 'ISO transfers too long (%i), there are only ' '%i bytes available' % ( sum(iso_transfer_length_list), buffer_length, ) ) transfer_p = self.__transfer self.__initialized = False self.__transfer_buffer = string_buffer self.__transfer_py_buffer = transfer_py_buffer self.__user_data = user_data libusb1.libusb_fill_iso_transfer( transfer_p, self.__handle, endpoint, string_buffer, buffer_length, configured_iso_packets, self.__ctypesCallbackWrapper, None, timeout) for length, iso_packet_desc in zip( iso_transfer_length_list, libusb1.get_iso_packet_list(transfer_p)): if length <= 0: raise ValueError( 'Negative/null length transfers are not possible.' ) iso_packet_desc.length = length self.__callback = callback self.__initialized = True
def setIsochronous( self, endpoint, buffer_or_len, callback=None, user_data=None, timeout=0, iso_transfer_length_list=None)
Setup transfer for isochronous use. endpoint Endpoint to submit transfer to. Defines transfer direction (see ENDPOINT_OUT and ENDPOINT_IN)). buffer_or_len Either a string (when sending data), or expected data length (when receiving data) To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray). callback Callback function to be invoked on transfer completion. Called with transfer as parameter, return value ignored. user_data User data to pass to callback function. timeout Transfer timeout in milliseconds. 0 to disable. iso_transfer_length_list List of individual transfer sizes. If not provided, buffer_or_len will be divided evenly among available transfers if possible, and raise ValueError otherwise.
3.12598
3.083035
1.01393
transfer_p = self.__transfer transfer = transfer_p.contents # pylint: disable=undefined-variable if transfer.type != TRANSFER_TYPE_ISOCHRONOUS: # pylint: enable=undefined-variable raise TypeError( 'This method cannot be called on non-iso transfers.' ) return libusb1.get_iso_packet_buffer_list(transfer_p)
def getISOBufferList(self)
Get individual ISO transfer's buffer. Returns a list with one item per ISO transfer, with their individually-configured sizes. Returned list is consistent with getISOSetupList return value. Should not be called on a submitted transfer. See also iterISO.
6.609962
5.693063
1.161056
transfer_p = self.__transfer transfer = transfer_p.contents # pylint: disable=undefined-variable if transfer.type != TRANSFER_TYPE_ISOCHRONOUS: # pylint: enable=undefined-variable raise TypeError( 'This method cannot be called on non-iso transfers.' ) return [ { 'length': x.length, 'actual_length': x.actual_length, 'status': x.status, } for x in libusb1.get_iso_packet_list(transfer_p) ]
def getISOSetupList(self)
Get individual ISO transfer's setup. Returns a list of dicts, each containing an individual ISO transfer parameters: - length - actual_length - status (see libusb1's API documentation for their signification) Returned list is consistent with getISOBufferList return value. Should not be called on a submitted transfer (except for 'length' values).
5.544175
3.482172
1.59216
transfer_p = self.__transfer transfer = transfer_p.contents # pylint: disable=undefined-variable if transfer.type != TRANSFER_TYPE_ISOCHRONOUS: # pylint: enable=undefined-variable raise TypeError( 'This method cannot be called on non-iso transfers.' ) buffer_position = transfer.buffer for iso_transfer in libusb1.get_iso_packet_list(transfer_p): yield ( iso_transfer.status, libusb1.buffer_at(buffer_position, iso_transfer.actual_length), ) buffer_position += iso_transfer.length
def iterISO(self)
Generator yielding (status, buffer) for each isochornous transfer. buffer is truncated to actual_length. This is more efficient than calling both getISOBufferList and getISOSetupList when receiving data. Should not be called on a submitted transfer.
5.929716
4.359985
1.360031
if self.__submitted: raise ValueError('Cannot alter a submitted transfer') transfer = self.__transfer.contents # pylint: disable=undefined-variable if transfer.type == TRANSFER_TYPE_CONTROL: # pylint: enable=undefined-variable raise ValueError( 'To alter control transfer buffer, use setControl' ) buff, transfer_py_buffer = create_binary_buffer(buffer_or_len) # pylint: disable=undefined-variable if transfer.type == TRANSFER_TYPE_ISOCHRONOUS and \ sizeof(buff) != transfer.length: # pylint: enable=undefined-variable raise ValueError( 'To alter isochronous transfer buffer length, use ' 'setIsochronous' ) self.__transfer_buffer = buff self.__transfer_py_buffer = transfer_py_buffer transfer.buffer = cast(buff, c_void_p) transfer.length = sizeof(buff)
def setBuffer(self, buffer_or_len)
Replace buffer with a new one. Allows resizing read buffer and replacing data sent. Note: resizing is not allowed for isochronous buffer (use setIsochronous). Note: disallowed on control transfers (use setControl).
3.510985
3.118184
1.125971
if self.__submitted: raise ValueError('Cannot submit a submitted transfer') if not self.__initialized: raise ValueError( 'Cannot submit a transfer until it has been initialized' ) if self.__doomed: raise DoomedTransferError('Cannot submit doomed transfer') self.__before_submit(self) self.__submitted = True result = libusb1.libusb_submit_transfer(self.__transfer) if result: self.__after_completion(self) self.__submitted = False raiseUSBError(result)
def submit(self)
Submit transfer for asynchronous handling.
4.345715
3.945899
1.101324
if not self.__submitted: # XXX: Workaround for a bug reported on libusb 1.0.8: calling # libusb_cancel_transfer on a non-submitted transfer might # trigger a segfault. raise self.__USBErrorNotFound self.__mayRaiseUSBError(self.__libusb_cancel_transfer(self.__transfer))
def cancel(self)
Cancel transfer. Note: cancellation happens asynchronously, so you must wait for TRANSFER_CANCELLED.
10.055736
9.188505
1.094382
if event not in EVENT_CALLBACK_SET: raise ValueError('Unknown event %r.' % (event, )) self.__event_callback_dict[event] = callback
def setEventCallback(self, event, callback)
Set a function to call for a given event. event must be one of: TRANSFER_COMPLETED TRANSFER_ERROR TRANSFER_TIMED_OUT TRANSFER_CANCELLED TRANSFER_STALL TRANSFER_NO_DEVICE TRANSFER_OVERFLOW
4.92134
4.811788
1.022767
next_usb_timeout = self.__context.getNextTimeout() if timeout is None or timeout < 0: usb_timeout = next_usb_timeout elif next_usb_timeout: usb_timeout = min(next_usb_timeout, timeout) else: usb_timeout = timeout event_list = self.__poller.poll(usb_timeout) if event_list: fd_set = self.__fd_set result = [(x, y) for x, y in event_list if x not in fd_set] if len(result) != len(event_list): self.__context.handleEventsTimeout() else: result = event_list self.__context.handleEventsTimeout() return result
def poll(self, timeout=None)
Poll for events. timeout can be a float in seconds, or None for no timeout. Returns a list of (descriptor, event) pairs.
3.141578
2.967379
1.058705
if fd in self.__fd_set: raise ValueError( 'This fd is a special USB event fd, it cannot be polled.' ) self.__poller.register(fd, events)
def register(self, fd, events)
Register an USB-unrelated fd to poller. Convenience method.
9.353114
6.389382
1.463853
if fd in self.__fd_set: raise ValueError( 'This fd is a special USB event fd, it must stay registered.' ) self.__poller.unregister(fd)
def unregister(self, fd)
Unregister an USB-unrelated fd from poller. Convenience method.
14.606015
10.060315
1.451845
handle = self.__handle if handle is None: return # Build a strong set from weak self.__transfer_set so we can doom # and close all contained transfers. # Because of backward compatibility, self.__transfer_set might be a # wrapper around WeakKeyDictionary. As it might be modified by gc, # we must pop until there is not key left instead of iterating over # it. weak_transfer_set = self.__transfer_set transfer_set = self.__set() while True: try: transfer = weak_transfer_set.pop() except self.__KeyError: break transfer_set.add(transfer) transfer.doom() inflight = self.__inflight for transfer in inflight: try: transfer.cancel() except (self.__USBErrorNotFound, self.__USBErrorNoDevice): pass while inflight: try: self.__context.handleEvents() except self.__USBErrorInterrupted: pass for transfer in transfer_set: transfer.close() self.__libusb_close(handle) self.__handle = None
def close(self)
Close this handle. If not called explicitely, will be called by destructor. This method cancels any in-flight transfer when it is called. As cancellation is not immediate, this method needs to let libusb handle events until transfers are actually cancelled. In multi-threaded programs, this can lead to stalls. To avoid this, do not close nor let GC collect a USBDeviceHandle which has in-flight transfers.
6.459082
5.973833
1.081229
configuration = c_int() mayRaiseUSBError(libusb1.libusb_get_configuration( self.__handle, byref(configuration), )) return configuration.value
def getConfiguration(self)
Get the current configuration number for this device.
7.014629
5.166122
1.357813
mayRaiseUSBError( libusb1.libusb_claim_interface(self.__handle, interface), ) return _ReleaseInterface(self, interface)
def claimInterface(self, interface)
Claim (= get exclusive access to) given interface number. Required to receive/send data. Can be used as a context manager: with handle.claimInterface(0): # do stuff # handle.releaseInterface(0) gets automatically called
9.595828
10.122807
0.947941
mayRaiseUSBError(libusb1.libusb_set_interface_alt_setting( self.__handle, interface, alt_setting, ))
def setInterfaceAltSetting(self, interface, alt_setting)
Set interface's alternative setting (both parameters are integers).
8.353295
6.99028
1.194987
result = libusb1.libusb_kernel_driver_active(self.__handle, interface) if result == 0: return False elif result == 1: return True raiseUSBError(result)
def kernelDriverActive(self, interface)
Tell whether a kernel driver is active on given interface number.
3.281023
3.032872
1.08182
mayRaiseUSBError(libusb1.libusb_set_auto_detach_kernel_driver( self.__handle, bool(enable), ))
def setAutoDetachKernelDriver(self, enable)
Control automatic kernel driver detach. enable (bool) True to enable auto-detach, False to disable it.
7.644601
9.887437
0.773163
descriptor_string, _ = create_binary_buffer(STRING_LENGTH) result = libusb1.libusb_get_string_descriptor( self.__handle, 0, 0, descriptor_string, sizeof(descriptor_string), ) # pylint: disable=undefined-variable if result == ERROR_PIPE: # pylint: enable=undefined-variable # From libusb_control_transfer doc: # control request not supported by the device return [] mayRaiseUSBError(result) langid_list = cast(descriptor_string, POINTER(c_uint16)) return [ libusb1.libusb_le16_to_cpu(langid_list[offset]) for offset in xrange(1, cast(descriptor_string, POINTER(c_ubyte))[0] // 2) ]
def getSupportedLanguageList(self)
Return a list of USB language identifiers (as integers) supported by current device for its string descriptors. Note: language identifiers seem (I didn't check them all...) very similar to windows language identifiers, so you may want to use locales.windows_locale to get an rfc3066 representation. The 5 standard HID language codes are missing though.
5.994578
5.632467
1.06429
if descriptor == 0: return None descriptor_string = bytearray(STRING_LENGTH) try: received = mayRaiseUSBError(libusb1.libusb_get_string_descriptor( self.__handle, descriptor, lang_id, create_binary_buffer(descriptor_string)[0], STRING_LENGTH, )) # pylint: disable=undefined-variable except USBErrorNotFound: # pylint: enable=undefined-variable return None if received < 2 or descriptor_string[1] != DT_STRING: raise ValueError('Invalid string descriptor') return descriptor_string[2:min( received, descriptor_string[0], )].decode('UTF-16-LE', errors=errors)
def getStringDescriptor(self, descriptor, lang_id, errors='strict')
Fetch description string for given descriptor and in given language. Use getSupportedLanguageList to know which languages are available. Return value is a unicode string. Return None if there is no such descriptor on device.
5.399126
5.44792
0.991043
if descriptor == 0: return None descriptor_string = bytearray(STRING_LENGTH) try: received = mayRaiseUSBError(libusb1.libusb_get_string_descriptor_ascii( self.__handle, descriptor, create_binary_buffer(descriptor_string)[0], STRING_LENGTH, )) # pylint: disable=undefined-variable except USBErrorNotFound: # pylint: enable=undefined-variable return None return descriptor_string[:received].decode('ASCII', errors=errors)
def getASCIIStringDescriptor(self, descriptor, errors='strict')
Fetch description string for given descriptor in first available language. Return value is a unicode string. Return None if there is no such descriptor on device.
5.845347
5.883859
0.993455
# pylint: disable=undefined-variable request_type = (request_type & ~ENDPOINT_DIR_MASK) | ENDPOINT_OUT # pylint: enable=undefined-variable data, _ = create_initialised_buffer(data) return self._controlTransfer(request_type, request, value, index, data, sizeof(data), timeout)
def controlWrite( self, request_type, request, value, index, data, timeout=0)
Synchronous control write. request_type: request type bitmask (bmRequestType), see constants TYPE_* and RECIPIENT_*. request: request id (some values are standard). value, index, data: meaning is request-dependent. timeout: in milliseconds, how long to wait for device acknowledgement. Set to 0 to disable. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns the number of bytes actually sent.
5.215849
5.134244
1.015894
# pylint: disable=undefined-variable request_type = (request_type & ~ENDPOINT_DIR_MASK) | ENDPOINT_IN # pylint: enable=undefined-variable data, data_buffer = create_binary_buffer(length) transferred = self._controlTransfer( request_type, request, value, index, data, length, timeout, ) return data_buffer[:transferred]
def controlRead( self, request_type, request, value, index, length, timeout=0)
Synchronous control read. timeout: in milliseconds, how long to wait for data. Set to 0 to disable. See controlWrite for other parameters description. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns received data.
4.42962
4.921632
0.900031
# pylint: disable=undefined-variable endpoint = (endpoint & ~ENDPOINT_DIR_MASK) | ENDPOINT_OUT # pylint: enable=undefined-variable data, _ = create_initialised_buffer(data) return self._bulkTransfer(endpoint, data, sizeof(data), timeout)
def bulkWrite(self, endpoint, data, timeout=0)
Synchronous bulk write. endpoint: endpoint to send data to. data: data to send. timeout: in milliseconds, how long to wait for device acknowledgement. Set to 0 to disable. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns the number of bytes actually sent. May raise an exception from the USBError family. USBErrorTimeout exception has a "transferred" property giving the number of bytes sent up to the timeout.
7.5772
6.656876
1.138252
# pylint: disable=undefined-variable endpoint = (endpoint & ~ENDPOINT_DIR_MASK) | ENDPOINT_IN # pylint: enable=undefined-variable data, data_buffer = create_binary_buffer(length) try: transferred = self._bulkTransfer(endpoint, data, length, timeout) except USBErrorTimeout as exception: exception.received = data_buffer[:exception.transferred] raise return data_buffer[:transferred]
def bulkRead(self, endpoint, length, timeout=0)
Synchronous bulk read. timeout: in milliseconds, how long to wait for data. Set to 0 to disable. See bulkWrite for other parameters description. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns received data. May raise an exception from the USBError family. USBErrorTimeout exception has a "received" property giving the bytes received up to the timeout.
4.853123
4.569557
1.062055
# pylint: disable=undefined-variable endpoint = (endpoint & ~ENDPOINT_DIR_MASK) | ENDPOINT_OUT # pylint: enable=undefined-variable data, _ = create_initialised_buffer(data) return self._interruptTransfer(endpoint, data, sizeof(data), timeout)
def interruptWrite(self, endpoint, data, timeout=0)
Synchronous interrupt write. endpoint: endpoint to send data to. data: data to send. timeout: in milliseconds, how long to wait for device acknowledgement. Set to 0 to disable. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns the number of bytes actually sent. May raise an exception from the USBError family. USBErrorTimeout exception has a "transferred" property giving the number of bytes sent up to the timeout.
6.193925
7.142055
0.867247
# pylint: disable=undefined-variable endpoint = (endpoint & ~ENDPOINT_DIR_MASK) | ENDPOINT_IN # pylint: enable=undefined-variable data, data_buffer = create_binary_buffer(length) try: transferred = self._interruptTransfer( endpoint, data, length, timeout, ) except USBErrorTimeout as exception: exception.received = data_buffer[:exception.transferred] raise return data_buffer[:transferred]
def interruptRead(self, endpoint, length, timeout=0)
Synchronous interrupt write. timeout: in milliseconds, how long to wait for data. Set to 0 to disable. See interruptWrite for other parameters description. To avoid memory copies, use an object implementing the writeable buffer interface (ex: bytearray) for the "data" parameter. Returns received data. May raise an exception from the USBError family. USBErrorTimeout exception has a "received" property giving the bytes received up to the timeout.
4.681789
4.650245
1.006783
result = USBTransfer( self.__handle, iso_packets, self.__inflight_add, self.__inflight_remove, ) self.__transfer_set.add(result) return result
def getTransfer(self, iso_packets=0)
Get an USBTransfer instance for asynchronous use. iso_packets: the number of isochronous transfer descriptors to allocate.
7.502648
6.675685
1.123877
port_list = (c_uint8 * PATH_MAX_DEPTH)() result = libusb1.libusb_get_port_numbers( self.device_p, port_list, len(port_list)) mayRaiseUSBError(result) return list(port_list[:result])
def getPortNumberList(self)
Get the port number of each hub toward device.
5.500513
5.180278
1.061818
result = libusb1.libusb_get_max_packet_size(self.device_p, endpoint) mayRaiseUSBError(result) return result
def getMaxPacketSize(self, endpoint)
Get device's max packet size for given endpoint. Warning: this function will not always give you the expected result. See https://libusb.org/ticket/77 . You should instead consult the endpoint descriptor of current configuration and alternate setting.
6.111666
5.706667
1.070969
result = libusb1.libusb_get_max_iso_packet_size(self.device_p, endpoint) mayRaiseUSBError(result) return result
def getMaxISOPacketSize(self, endpoint)
Get the maximum size for a single isochronous packet for given endpoint. Warning: this function will not always give you the expected result. See https://libusb.org/ticket/77 . You should instead consult the endpoint descriptor of current configuration and alternate setting.
5.8564
5.801775
1.009415
handle = libusb1.libusb_device_handle_p() mayRaiseUSBError(libusb1.libusb_open(self.device_p, byref(handle))) result = USBDeviceHandle(self.__context, handle, self) self.__close_set.add(result) return result
def open(self)
Open device. Returns an USBDeviceHandle instance.
5.883771
4.468984
1.316579
assert self.__context_refcount == 0 mayRaiseUSBError(libusb1.libusb_init(byref(self.__context_p))) return self
def open(self)
Finish context initialisation, as is normally done in __enter__ . This happens automatically on the first method call needing access to the uninitialised properties, but with a warning. Call this method ONLY if your usage pattern prevents you from using the with USBContext() as contewt: form: this means there are ways to avoid calling close(), which can cause issues particularly hard to debug (ex: interpreter hangs on exit).
16.203451
11.489764
1.410251
self.__auto_open = False self.__context_cond.acquire() try: while self.__context_refcount and self.__context_p: self.__context_cond.wait() self._exit() finally: self.__context_cond.notifyAll() self.__context_cond.release()
def close(self)
Close (destroy) this USB context, and all related instances. When this method has been called, methods on its instance will become mosty no-ops, returning None until explicitly re-opened (by calling open() or __enter__()). Note: "exit" is a deprecated alias of "close".
4.920887
4.518615
1.089025
device_p_p = libusb1.libusb_device_p_p() libusb_device_p = libusb1.libusb_device_p device_list_len = libusb1.libusb_get_device_list(self.__context_p, byref(device_p_p)) mayRaiseUSBError(device_list_len) try: for device_p in device_p_p[:device_list_len]: try: # Instanciate our own libusb_device_p object so we can free # libusb-provided device list. Is this a bug in ctypes that # it doesn't copy pointer value (=pointed memory address) ? # At least, it's not so convenient and forces using such # weird code. device = USBDevice(self, libusb_device_p(device_p.contents)) except USBError: if not skip_on_error: raise else: self.__close_set.add(device) yield device finally: libusb1.libusb_free_device_list(device_p_p, 1)
def getDeviceIterator(self, skip_on_error=False)
Return an iterator over all USB devices currently plugged in, as USBDevice instances. skip_on_error (bool) If True, ignore devices which raise USBError.
5.053764
5.152528
0.980832
return list( self.getDeviceIterator( skip_on_error=skip_on_access_error or skip_on_error, ), )
def getDeviceList(self, skip_on_access_error=False, skip_on_error=False)
Return a list of all USB devices currently plugged in, as USBDevice instances. skip_on_error (bool) If True, ignore devices which raise USBError. skip_on_access_error (bool) DEPRECATED. Alias for skip_on_error.
4.963563
4.864328
1.020401
for device in self.getDeviceIterator( skip_on_error=skip_on_access_error or skip_on_error, ): if device.getVendorID() == vendor_id and \ device.getProductID() == product_id: return device
def getByVendorIDAndProductID( self, vendor_id, product_id, skip_on_access_error=False, skip_on_error=False)
Get the first USB device matching given vendor and product ids. Returns an USBDevice instance, or None if no present device match. skip_on_error (bool) (see getDeviceList) skip_on_access_error (bool) (see getDeviceList)
2.706419
2.537776
1.066453
result = self.getByVendorIDAndProductID( vendor_id, product_id, skip_on_access_error=skip_on_access_error, skip_on_error=skip_on_error) if result is not None: return result.open()
def openByVendorIDAndProductID( self, vendor_id, product_id, skip_on_access_error=False, skip_on_error=False)
Get the first USB device matching given vendor and product ids. Returns an USBDeviceHandle instance, or None if no present device match. skip_on_error (bool) (see getDeviceList) skip_on_access_error (bool) (see getDeviceList)
1.899483
2.026415
0.937361
pollfd_p_p = libusb1.libusb_get_pollfds(self.__context_p) if not pollfd_p_p: errno = get_errno() if errno: raise OSError(errno) else: # Assume not implemented raise NotImplementedError( 'Your libusb does not seem to implement pollable FDs') try: result = [] append = result.append fd_index = 0 while pollfd_p_p[fd_index]: append(( pollfd_p_p[fd_index].contents.fd, pollfd_p_p[fd_index].contents.events, )) fd_index += 1 finally: _free(pollfd_p_p) return result
def getPollFDList(self)
Return file descriptors to be used to poll USB events. You should not have to call this method, unless you are integrating this class with a polling mechanism.
3.260379
3.049298
1.069223
if tv is None: tv = 0 tv_s = int(tv) real_tv = libusb1.timeval(tv_s, int((tv - tv_s) * 1000000)) mayRaiseUSBError( libusb1.libusb_handle_events_timeout( self.__context_p, byref(real_tv), ), )
def handleEventsTimeout(self, tv=0)
Handle any pending event. If tv is 0, will return immediately after handling already-pending events. Otherwise, defines the maximum amount of time to wait for events, in seconds.
4.851507
4.494545
1.079421
if added_cb is None: added_cb = self.__null_pointer else: added_cb = libusb1.libusb_pollfd_added_cb_p(added_cb) if removed_cb is None: removed_cb = self.__null_pointer else: removed_cb = libusb1.libusb_pollfd_removed_cb_p(removed_cb) if user_data is None: user_data = self.__null_pointer self.__added_cb = added_cb self.__removed_cb = removed_cb self.__poll_cb_user_data = user_data self.__libusb_set_pollfd_notifiers( self.__context_p, self.__cast(added_cb, libusb1.libusb_pollfd_added_cb_p), self.__cast(removed_cb, libusb1.libusb_pollfd_removed_cb_p), user_data, )
def setPollFDNotifiers( self, added_cb=None, removed_cb=None, user_data=None)
Give libusb1 methods to call when it should add/remove file descriptor for polling. You should not have to call this method, unless you are integrating this class with a polling mechanism.
1.902077
1.862452
1.021276
timeval = libusb1.timeval() result = libusb1.libusb_get_next_timeout( self.__context_p, byref(timeval)) if result == 0: return None elif result == 1: return timeval.tv_sec + (timeval.tv_usec * 0.000001) raiseUSBError(result)
def getNextTimeout(self)
Returns the next internal timeout that libusb needs to handle, in seconds, or None if no timeout is needed. You should not have to call this method, unless you are integrating this class with a polling mechanism.
2.986083
2.822882
1.057814
if tv is None: tv = 0 tv_s = int(tv) real_tv = libusb1.timeval(tv_s, int((tv - tv_s) * 1000000)) libusb1.libusb_wait_for_event(self.__context_p, byref(real_tv))
def waitForEvent(self, tv=0)
See libusb_wait_for_event doc.
3.880817
3.347007
1.159489