code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if not position: position = (2. / 5 + offset, 3. / 5, 0) self._labels["right"] = (label, position, rotation, kwargs)
def right_axis_label(self, label, position=None, rotation=-60, offset=0.08, **kwargs)
Sets the label on the right axis. Parameters ---------- label: String The axis label position: 3-Tuple of floats, None The position of the text label rotation: float, -60 The angle of rotation of the label offset: float, Used to compute the distance of the label from the axis kwargs: Any kwargs to pass through to matplotlib.
5.013289
6.144981
0.815835
if not position: position = (0.5, -offset / 2., 0.5) self._labels["bottom"] = (label, position, rotation, kwargs)
def bottom_axis_label(self, label, position=None, rotation=0, offset=0.02, **kwargs)
Sets the label on the bottom axis. Parameters ---------- label: String The axis label position: 3-Tuple of floats, None The position of the text label rotation: float, 0 The angle of rotation of the label offset: float, Used to compute the distance of the label from the axis kwargs: Any kwargs to pass through to matplotlib.
4.981016
6.563703
0.758873
if not position: position = (1, offset / 2, 0) self._corner_labels["right"] = (label, position, rotation, kwargs)
def right_corner_label(self, label, position=None, rotation=0, offset=0.08, **kwargs)
Sets the label on the right corner (complements left axis). Parameters ---------- label: String The axis label position: 3-Tuple of floats, None The position of the text label rotation: float, 0 The angle of rotation of the label offset: float, Used to compute the distance of the label from the axis kwargs: Any kwargs to pass through to matplotlib.
5.23683
6.743186
0.776611
if not position: position = (-offset / 2, offset / 2, 0) self._corner_labels["left"] = (label, position, rotation, kwargs)
def left_corner_label(self, label, position=None, rotation=0, offset=0.08, **kwargs)
Sets the label on the left corner (complements right axis.) Parameters ---------- label: string The axis label position: 3-Tuple of floats, None The position of the text label rotation: float, 0 The angle of rotation of the label offset: float, Used to compute the distance of the label from the axis kwargs: Any kwargs to pass through to matplotlib.
4.34592
5.552965
0.782631
if not position: position = (-offset / 2, 1 + offset, 0) self._corner_labels["top"] = (label, position, rotation, kwargs)
def top_corner_label(self, label, position=None, rotation=0, offset=0.2, **kwargs)
Sets the label on the bottom axis. Parameters ---------- label: String The axis label position: 3-Tuple of floats, None The position of the text label rotation: float, 0 The angle of rotation of the label offset: float, Used to compute the distance of the label from the axis kwargs: Any kwargs to pass through to matplotlib.
4.642789
5.769115
0.804766
ax = self.get_axes() plotting.clear_matplotlib_ticks(ax=ax, axis=axis)
def clear_matplotlib_ticks(self, axis="both")
Clears the default matplotlib ticks.
4.701805
4.075795
1.153592
for k in ['b', 'l', 'r']: self._ticks[k] = numpy.linspace( self._axis_limits[k][0], self._axis_limits[k][1], self._boundary_scale / float(multiple) + 1 ).tolist()
def get_ticks_from_axis_limits(self, multiple=1)
Taking self._axis_limits and self._boundary_scale get the scaled ticks for all three axes and store them in self._ticks under the keys 'b' for bottom, 'l' for left and 'r' for right axes.
4.473866
2.42248
1.846812
for k in ['b', 'l', 'r']: self.ticks(ticks=self._ticks[k], locations=locations, axis=k, clockwise=clockwise, multiple=multiple, axes_colors=axes_colors, tick_formats=tick_formats, **kwargs)
def set_custom_ticks(self, locations=None, clockwise=False, multiple=1, axes_colors=None, tick_formats=None, **kwargs)
Having called get_ticks_from_axis_limits, set the custom ticks on the plot.
3.097377
3.226574
0.959958
ax = self.get_axes() # Remove any previous labels for mpl_object in self._to_remove: mpl_object.remove() self._to_remove = [] # Redraw the labels with the appropriate angles label_data = list(self._labels.values()) label_data.extend(self._corner_labels.values()) for (label, position, rotation, kwargs) in label_data: transform = ax.transAxes x, y = project_point(position) # Calculate the new angle. position = numpy.array([x, y]) new_rotation = ax.transData.transform_angles( numpy.array((rotation,)), position.reshape((1, 2)))[0] text = ax.text(x, y, label, rotation=new_rotation, transform=transform, horizontalalignment="center", **kwargs) text.set_rotation_mode("anchor") self._to_remove.append(text)
def _redraw_labels(self)
Redraw axis labels, typically after draw or resize events.
3.513057
3.42688
1.025147
return convert_coordinates_sequence(points,self._boundary_scale, self._axis_limits, axisorder)
def convert_coordinates(self, points, axisorder='blr')
Convert data coordinates to simplex coordinates for plotting in the case that axis limits have been applied.
15.999428
16.233688
0.985569
if isinstance(cmap, matplotlib.colors.Colormap): return cmap if isinstance(cmap, str): cmap_name = cmap else: cmap_name = DEFAULT_COLOR_MAP_NAME return plt.get_cmap(cmap_name)
def get_cmap(cmap=None)
Loads a matplotlib colormap if specified or supplies the default. Parameters ---------- cmap: string or matplotlib.colors.Colormap instance The name of the Matplotlib colormap to look up. Returns ------- The desired Matplotlib colormap Raises ------ ValueError if colormap name is not recognized by Matplotlib
2.632056
3.003437
0.876348
cmap = get_cmap(cmap) if upper - lower == 0: rgba = cmap(0) else: rgba = cmap((value - lower) / float(upper - lower)) hex_ = rgb2hex(rgba) return hex_
def colormapper(value, lower=0, upper=1, cmap=None)
Maps values to colors by normalizing within [a,b], obtaining rgba from the given matplotlib color map for heatmap polygon coloring. Parameters ---------- value: float The value to be colormapped lower: float Lower bound of colors upper: float Upper bound of colors cmap: String or matplotlib.colors.Colormap (optional) Colormap object to prevent repeated lookup Returns ------- hex_, float The value mapped to an appropriate RGBA color value
3.175906
3.466409
0.916195
# http://stackoverflow.com/questions/8342549/matplotlib-add-colorbar-to-a-sequence-of-line-plots norm = plt.Normalize(vmin=vmin, vmax=vmax) sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm) sm._A = [] cb = plt.colorbar(sm, ax=ax, **kwargs) if cbarlabel is not None: cb.set_label(cbarlabel) if scientific: cb.locator = matplotlib.ticker.LinearLocator(numticks=7) cb.formatter = matplotlib.ticker.ScalarFormatter() cb.formatter.set_powerlimits((0, 0)) cb.update_ticks()
def colorbar_hack(ax, vmin, vmax, cmap, scientific=False, cbarlabel=None, **kwargs)
Colorbar hack to insert colorbar on ternary plot. Called by heatmap, not intended for direct usage. Parameters ---------- vmin: float Minimum value to portray in colorbar vmax: float Maximum value to portray in colorbar cmap: Matplotlib colormap Matplotlib colormap to use
1.912824
2.093921
0.913513
key_size = len(list(data.keys())[0]) if not keys: keys = triangle_coordinates(i, j, k) # Reduce key from (i, j, k) to (i, j) if necessary keys = [tuple(key[:key_size]) for key in keys] # Sum over the values of the points to blend try: s = sum(data[key] for key in keys) value = s / 3. except KeyError: value = None return value
def blend_value(data, i, j, k, keys=None)
Computes the average value of the three vertices of a triangle in the simplex triangulation, where two of the vertices are on the lower horizontal.
4.926064
4.625726
1.064928
keys = alt_triangle_coordinates(i, j, k) return blend_value(data, i, j, k, keys=keys)
def alt_blend_value(data, i, j, k)
Computes the average value of the three vertices of a triangle in the simplex triangulation, where two of the vertices are on the upper horizontal.
6.012591
6.920155
0.868852
return [(i, j, k), (i + 1, j, k - 1), (i, j + 1, k - 1)]
def triangle_coordinates(i, j, k)
Computes coordinates of the constituent triangles of a triangulation for the simplex. These triangules are parallel to the lower axis on the lower side. Parameters ---------- i,j,k: enumeration of the desired triangle Returns ------- A numpy array of coordinates of the hexagon (unprojected)
2.773155
4.531641
0.611954
return [(i, j + 1, k - 1), (i + 1, j, k - 1), (i + 1, j + 1, k - 2)]
def alt_triangle_coordinates(i, j, k)
Computes coordinates of the constituent triangles of a triangulation for the simplex. These triangules are parallel to the lower axis on the upper side. Parameters ---------- i,j,k: enumeration of the desired triangle Returns ------- A numpy array of coordinates of the hexagon (unprojected)
2.75353
3.648213
0.754761
zero = numpy.array([0, 0, 0]) alpha = numpy.array([-1./3, 2./3, 0]) deltaup = numpy.array([1./3, 1./3, 0]) deltadown = numpy.array([2./3, -1./3, 0]) i_vec = numpy.array([0, 1./2, -1./2]) i_vec_down = numpy.array([1./2, -1./2, 0]) deltaX_vec = numpy.array([1./2, 0, -1./2]) d = dict() # Corner Points d["100"] = [zero, -deltaX_vec, -deltadown, -i_vec_down] d["010"] = [zero, i_vec_down, -alpha, -i_vec] d["001"] = [zero, i_vec, deltaup, deltaX_vec] # On the Edges d["011"] = [i_vec, deltaup, deltadown, -alpha, -i_vec] d["101"] = [-deltaX_vec, -deltadown, alpha, deltaup, deltaX_vec] d["110"] = [i_vec_down, -alpha, -deltaup, -deltadown, -i_vec_down] # Interior point d["111"] = [alpha, deltaup, deltadown, -alpha, -deltaup, -deltadown] return d
def generate_hexagon_deltas()
Generates a dictionary of the necessary additive vectors to generate the hexagon points for the hexagonal heatmap.
2.562848
2.46174
1.041072
signature = "" for x in [i, j, k]: if x == 0: signature += "0" else: signature += "1" deltas = hexagon_deltas[signature] center = numpy.array([i, j, k]) return numpy.array([center + x for x in deltas])
def hexagon_coordinates(i, j, k)
Computes coordinates of the constituent hexagons of a hexagonal heatmap. Parameters ---------- i, j, k: enumeration of the desired hexagon Returns ------- A numpy array of coordinates of the hexagon (unprojected)
3.5945
4.248951
0.845973
# We'll project the coordinates inside this function to prevent # passing around permutation more than necessary project = functools.partial(project_point, permutation=permutation) if isinstance(data, dict): data_gen = data.items() else: # Only works with style == 'h' data_gen = data for key, value in data_gen: if value is None: continue i = key[0] j = key[1] k = scale - i - j if style == 'h': vertices = hexagon_coordinates(i, j, k) yield (map(project, vertices), value) elif style == 'd': # Upright triangles if (i <= scale) and (j <= scale) and (k >= 0): vertices = triangle_coordinates(i, j, k) yield (map(project, vertices), value) # Upside-down triangles if (i < scale) and (j < scale) and (k >= 1): vertices = alt_triangle_coordinates(i, j, k) value = blend_value(data, i, j, k) yield (map(project, vertices), value) elif style == 't': # Upright triangles if (i < scale) and (j < scale) and (k > 0): vertices = triangle_coordinates(i, j, k) value = blend_value(data, i, j, k) yield (map(project, vertices), value) # If not on the boundary add the upside-down triangle if (i < scale) and (j < scale) and (k > 1): vertices = alt_triangle_coordinates(i, j, k) value = alt_blend_value(data, i, j, k) yield (map(project, vertices), value)
def polygon_generator(data, scale, style, permutation=None)
Generator for the vertices of the polygon to be colored and its color, depending on style. Called by heatmap.
2.893037
2.857077
1.012586
if not ax: fig, ax = pyplot.subplots() # If use_rgba, make the RGBA values numpy arrays so that they can # be averaged. if use_rgba: for k, v in data.items(): data[k] = numpy.array(v) else: cmap = get_cmap(cmap) if vmin is None: vmin = min(data.values()) if vmax is None: vmax = max(data.values()) style = style.lower()[0] if style not in ["t", "h", 'd']: raise ValueError("Heatmap style must be 'triangular', 'dual-triangular', or 'hexagonal'") vertices_values = polygon_generator(data, scale, style, permutation=permutation) # Draw the polygons and color them for vertices, value in vertices_values: if value is None: continue if not use_rgba: color = colormapper(value, vmin, vmax, cmap=cmap) else: color = value # rgba tuple (r,g,b,a) all in [0,1] # Matplotlib wants a list of xs and a list of ys xs, ys = unzip(vertices) ax.fill(xs, ys, facecolor=color, edgecolor=color) if not cb_kwargs: cb_kwargs = dict() if colorbar: colorbar_hack(ax, vmin, vmax, cmap, scientific=scientific, cbarlabel=cbarlabel, **cb_kwargs) return ax
def heatmap(data, scale, vmin=None, vmax=None, cmap=None, ax=None, scientific=False, style='triangular', colorbar=True, permutation=None, use_rgba=False, cbarlabel=None, cb_kwargs=None)
Plots heatmap of given color values. Parameters ---------- data: dictionary A dictionary mapping the i, j polygon to the heatmap color, where i + j + k = scale. scale: Integer The scale used to partition the simplex. vmin: float, None The minimum color value, used to normalize colors. Computed if absent. vmax: float, None The maximum color value, used to normalize colors. Computed if absent. cmap: String or matplotlib.colors.Colormap, None The name of the Matplotlib colormap to use. ax: Matplotlib AxesSubplot, None The subplot to draw on. scientific: Bool, False Whether to use scientific notation for colorbar numbers. style: String, "triangular" The style of the heatmap, "triangular", "dual-triangular" or "hexagonal" colorbar: bool, True Show colorbar. permutation: string, None A permutation of the coordinates use_rgba: bool, False Use rgba color values cbarlabel: string, None Text label for the colorbar cb_kwargs: dict dict of kwargs to pass to colorbar Returns ------- ax: The matplotlib axis
3.477262
3.288613
1.057364
# Apply the function to a simplex partition data = dict() for i, j, k in simplex_iterator(scale=scale, boundary=boundary): data[(i, j)] = func(normalize([i, j, k])) # Pass everything to the heatmapper ax = heatmap(data, scale, cmap=cmap, ax=ax, style=style, scientific=scientific, colorbar=colorbar, permutation=permutation, vmin=vmin, vmax=vmax, cbarlabel=cbarlabel, cb_kwargs=cb_kwargs) return ax
def heatmapf(func, scale=10, boundary=True, cmap=None, ax=None, scientific=False, style='triangular', colorbar=True, permutation=None, vmin=None, vmax=None, cbarlabel=None, cb_kwargs=None)
Computes func on heatmap partition coordinates and plots heatmap. In other words, computes the function on lattice points of the simplex (normalized points) and creates a heatmap from the values. Parameters ---------- func: Function A function of 3-tuples to be heatmapped scale: Integer The scale used to partition the simplex boundary: Bool, True Include the boundary points or not cmap: String, None The name of the Matplotlib colormap to use ax: Matplotlib axis object, None The axis to draw the colormap on style: String, "triangular" The style of the heatmap, "triangular", "dual-triangular" or "hexagonal" scientific: Bool, False Whether to use scientific notation for colorbar numbers. colorbar: bool, True Show colorbar. permutation: string, None A permutation of the coordinates vmin: float The minimum color value, used to normalize colors. vmax: float The maximum color value, used to normalize colors. cb_kwargs: dict dict of kwargs to pass to colorbar Returns ------- ax, The matplotlib axis
3.456018
3.429278
1.007798
coord_str = [] for c in coordinates: coord_str.append(",".join(map(str, c))) coord_str = " ".join(coord_str) polygon = '<polygon points="%s" style="fill:%s;stroke:%s;stroke-width:0"/>\n' % (coord_str, color, color) return polygon
def svg_polygon(coordinates, color)
Create an svg triangle for the stationary heatmap. Parameters ---------- coordinates: list The coordinates defining the polygon color: string RGB color value e.g. #26ffd1 Returns ------- string, the svg string for the polygon
2.317237
2.380141
0.973571
style = style.lower()[0] if style not in ["t", "h", 'd']: raise ValueError("Heatmap style must be 'triangular', 'dual-triangular', or 'hexagonal'") if not isinstance(data, dict): if not style == 'h': raise ValueError("Data can only be given as a generator for hexagonal style heatmaps because of blending for adjacent polygons.") elif vmax is None or vmin is None: raise ValueError("vmax and vmin must be supplied for data given as a generator.") cmap = get_cmap(cmap) if not vmin: vmin = min(data.values()) if not vmax: vmax = max(data.values()) height = scale * numpy.sqrt(3) / 2 + 2 output_file = open(filename, 'w') output_file.write('<svg height="%s" width="%s">\n' % (height, scale)) vertices_values = polygon_generator(data, scale, style, permutation=permutation) # Draw the polygons and color them for vertices, value in vertices_values: color = colormapper(value, vmin, vmax, cmap=cmap) output_file.write(svg_polygon(vertices, color)) output_file.write('</svg>\n')
def svg_heatmap(data, scale, filename, vmax=None, vmin=None, style='h', permutation=None, cmap=None)
Create a heatmap in SVG format. Intended for use with very large datasets, which would require large amounts of RAM using matplotlib. You can convert the image to another format with e.g. ImageMagick: convert -density 1200 -resize -rotate 180 1000x1000 your.svg your.png Parameters ---------- data: dictionary or k, v generator A dictionary mapping the i, j polygon to the heatmap color, where i + j + k = scale. If using a generator, style must be 'h'. scale: Integer The scale used to partition the simplex. filename: string The filename to write the SVG data to. vmin: float The minimum color value, used to normalize colors. vmax: float The maximum color value, used to normalize colors. cmap: String or matplotlib.colors.Colormap, None The name of the Matplotlib colormap to use. style: String, "h" The style of the heatmap, "triangular", "dual-triangular" or "hexagonal" permutation: string, None A permutation of the coordinates
3.726393
3.344691
1.114122
pp1 = project_point(p1, permutation=permutation) pp2 = project_point(p2, permutation=permutation) ax.add_line(Line2D((pp1[0], pp2[0]), (pp1[1], pp2[1]), **kwargs))
def line(ax, p1, p2, permutation=None, **kwargs)
Draws a line on `ax` from p1 to p2. Parameters ---------- ax: Matplotlib AxesSubplot, None The subplot to draw on. p1: 2-tuple The (x,y) starting coordinates p2: 2-tuple The (x,y) ending coordinates kwargs: Any kwargs to pass through to Matplotlib.
2.168669
2.361858
0.918205
p1 = (0, i, scale - i) p2 = (scale - i, i, 0) line(ax, p1, p2, **kwargs)
def horizontal_line(ax, scale, i, **kwargs)
Draws the i-th horizontal line parallel to the lower axis. Parameters ---------- ax: Matplotlib AxesSubplot The subplot to draw on. scale: float, 1.0 Simplex scale size. i: float The index of the line to draw kwargs: Dictionary Any kwargs to pass through to Matplotlib.
3.955693
4.303259
0.919232
p1 = (i, scale - i, 0) p2 = (i, 0, scale - i) line(ax, p1, p2, **kwargs)
def left_parallel_line(ax, scale, i, **kwargs)
Draws the i-th line parallel to the left axis. Parameters ---------- ax: Matplotlib AxesSubplot The subplot to draw on. scale: float Simplex scale size. i: float The index of the line to draw kwargs: Dictionary Any kwargs to pass through to Matplotlib.
3.861351
4.240463
0.910597
p1 = (0, scale - i, i) p2 = (scale - i, 0, i) line(ax, p1, p2, **kwargs)
def right_parallel_line(ax, scale, i, **kwargs)
Draws the i-th line parallel to the right axis. Parameters ---------- ax: Matplotlib AxesSubplot The subplot to draw on. scale: float Simplex scale size. i: float The index of the line to draw kwargs: Dictionary Any kwargs to pass through to Matplotlib.
4.089645
4.33649
0.943077
# Set default color as black. if axes_colors is None: axes_colors = dict() for _axis in ['l', 'r', 'b']: if _axis not in axes_colors.keys(): axes_colors[_axis] = 'black' horizontal_line(ax, scale, 0, color=axes_colors['b'], **kwargs) left_parallel_line(ax, scale, 0, color=axes_colors['l'], **kwargs) right_parallel_line(ax, scale, 0, color=axes_colors['r'], **kwargs) return ax
def boundary(ax, scale, axes_colors=None, **kwargs)
Plots the boundary of the simplex. Creates and returns matplotlib axis if none given. Parameters ---------- ax: Matplotlib AxesSubplot, None The subplot to draw on. scale: float Simplex scale size. kwargs: Any kwargs to pass through to matplotlib. axes_colors: dict Option for coloring boundaries different colors. e.g. {'l': 'g'} for coloring the left axis boundary green
2.476812
2.553336
0.97003
if not base: base = dict() if not updates: updates = dict() z = base.copy() z.update(updates) return z
def merge_dicts(base, updates)
Given two dicts, merge them into a new dict as a shallow copy. Parameters ---------- base: dict The base dictionary. updates: dict Secondary dictionary whose values override the base.
2.922514
3.111161
0.939365
if 'linewidth' not in kwargs: kwargs["linewidth"] = 0.5 if 'linestyle' not in kwargs: kwargs["linestyle"] = ':' horizontal_kwargs = merge_dicts(kwargs, horizontal_kwargs) left_kwargs = merge_dicts(kwargs, left_kwargs) right_kwargs = merge_dicts(kwargs, right_kwargs) if not multiple: multiple = 1. ## Draw grid-lines # Parallel to horizontal axis for i in arange(0, scale, multiple): horizontal_line(ax, scale, i, **horizontal_kwargs) # Parallel to left and right axes for i in arange(0, scale + multiple, multiple): left_parallel_line(ax, scale, i, **left_kwargs) right_parallel_line(ax, scale, i, **right_kwargs) return ax
def gridlines(ax, scale, multiple=None, horizontal_kwargs=None, left_kwargs=None, right_kwargs=None, **kwargs)
Plots grid lines excluding boundary. Parameters ---------- ax: Matplotlib AxesSubplot, None The subplot to draw on. scale: float Simplex scale size. multiple: float, None Specifies which inner gridelines to draw. For example, if scale=30 and multiple=6, only 5 inner gridlines will be drawn. horizontal_kwargs: dict, None Any kwargs to pass through to matplotlib for horizontal gridlines left_kwargs: dict, None Any kwargs to pass through to matplotlib for left parallel gridlines right_kwargs: dict, None Any kwargs to pass through to matplotlib for right parallel gridlines kwargs: Any kwargs to pass through to matplotlib, if not using horizontal_kwargs, left_kwargs, or right_kwargs
2.442732
2.378164
1.02715
# Solve the items. item_set = set() for candidate in prev_candidates: for item in candidate: item_set.add(item) items = sorted(item_set) # Create the temporary candidates. These will be filtered below. tmp_next_candidates = (frozenset(x) for x in combinations(items, length)) # Return all the candidates if the length of the next candidates is 2 # because their subsets are the same as items. if length < 3: return list(tmp_next_candidates) # Filter candidates that all of their subsets are # in the previous candidates. next_candidates = [ candidate for candidate in tmp_next_candidates if all( True if frozenset(x) in prev_candidates else False for x in combinations(candidate, length - 1)) ] return next_candidates
def create_next_candidates(prev_candidates, length)
Returns the apriori candidates as a list. Arguments: prev_candidates -- Previous candidates as a list. length -- The lengths of the next candidates.
4.070376
4.005435
1.016213
# Parse arguments. max_length = kwargs.get('max_length') # For testing. _create_next_candidates = kwargs.get( '_create_next_candidates', create_next_candidates) # Process. candidates = transaction_manager.initial_candidates() length = 1 while candidates: relations = set() for relation_candidate in candidates: support = transaction_manager.calc_support(relation_candidate) if support < min_support: continue candidate_set = frozenset(relation_candidate) relations.add(candidate_set) yield SupportRecord(candidate_set, support) length += 1 if max_length and length > max_length: break candidates = _create_next_candidates(relations, length)
def gen_support_records(transaction_manager, min_support, **kwargs)
Returns a generator of support records with given transactions. Arguments: transaction_manager -- Transactions as a TransactionManager instance. min_support -- A minimum support (float). Keyword arguments: max_length -- The maximum length of relations (integer).
3.317315
3.244557
1.022424
items = record.items for combination_set in combinations(sorted(items), len(items) - 1): items_base = frozenset(combination_set) items_add = frozenset(items.difference(items_base)) confidence = ( record.support / transaction_manager.calc_support(items_base)) lift = confidence / transaction_manager.calc_support(items_add) yield OrderedStatistic( frozenset(items_base), frozenset(items_add), confidence, lift)
def gen_ordered_statistics(transaction_manager, record)
Returns a generator of ordered statistics as OrderedStatistic instances. Arguments: transaction_manager -- Transactions as a TransactionManager instance. record -- A support record as a SupportRecord instance.
4.171836
4.117819
1.013118
min_confidence = kwargs.get('min_confidence', 0.0) min_lift = kwargs.get('min_lift', 0.0) for ordered_statistic in ordered_statistics: if ordered_statistic.confidence < min_confidence: continue if ordered_statistic.lift < min_lift: continue yield ordered_statistic
def filter_ordered_statistics(ordered_statistics, **kwargs)
Filter OrderedStatistic objects. Arguments: ordered_statistics -- A OrderedStatistic iterable object. Keyword arguments: min_confidence -- The minimum confidence of relations (float). min_lift -- The minimum lift of relations (float).
2.075826
2.113262
0.982285
# Parse the arguments. min_support = kwargs.get('min_support', 0.1) min_confidence = kwargs.get('min_confidence', 0.0) min_lift = kwargs.get('min_lift', 0.0) max_length = kwargs.get('max_length', None) # Check arguments. if min_support <= 0: raise ValueError('minimum support must be > 0') # For testing. _gen_support_records = kwargs.get( '_gen_support_records', gen_support_records) _gen_ordered_statistics = kwargs.get( '_gen_ordered_statistics', gen_ordered_statistics) _filter_ordered_statistics = kwargs.get( '_filter_ordered_statistics', filter_ordered_statistics) # Calculate supports. transaction_manager = TransactionManager.create(transactions) support_records = _gen_support_records( transaction_manager, min_support, max_length=max_length) # Calculate ordered stats. for support_record in support_records: ordered_statistics = list( _filter_ordered_statistics( _gen_ordered_statistics(transaction_manager, support_record), min_confidence=min_confidence, min_lift=min_lift, ) ) if not ordered_statistics: continue yield RelationRecord( support_record.items, support_record.support, ordered_statistics)
def apriori(transactions, **kwargs)
Executes Apriori algorithm and returns a RelationRecord generator. Arguments: transactions -- A transaction iterable object (eg. [['A', 'B'], ['B', 'C']]). Keyword arguments: min_support -- The minimum support of relations (float). min_confidence -- The minimum confidence of relations (float). min_lift -- The minimum lift of relations (float). max_length -- The maximum length of the relation (integer).
2.261325
2.187632
1.033686
output_funcs = { 'json': dump_as_json, 'tsv': dump_as_two_item_tsv, } default_output_func_key = 'json' parser = argparse.ArgumentParser() parser.add_argument( '-v', '--version', action='version', version='%(prog)s {0}'.format(__version__)) parser.add_argument( 'input', metavar='inpath', nargs='*', help='Input transaction file (default: stdin).', type=argparse.FileType('r'), default=[sys.stdin]) parser.add_argument( '-o', '--output', metavar='outpath', help='Output file (default: stdout).', type=argparse.FileType('w'), default=sys.stdout) parser.add_argument( '-l', '--max-length', metavar='int', help='Max length of relations (default: infinite).', type=int, default=None) parser.add_argument( '-s', '--min-support', metavar='float', help='Minimum support ratio (must be > 0, default: 0.1).', type=float, default=0.1) parser.add_argument( '-c', '--min-confidence', metavar='float', help='Minimum confidence (default: 0.5).', type=float, default=0.5) parser.add_argument( '-t', '--min-lift', metavar='float', help='Minimum lift (default: 0.0).', type=float, default=0.0) parser.add_argument( '-d', '--delimiter', metavar='str', help='Delimiter for items of transactions (default: tab).', type=str, default='\t') parser.add_argument( '-f', '--out-format', metavar='str', help='Output format ({0}; default: {1}).'.format( ', '.join(output_funcs.keys()), default_output_func_key), type=str, choices=output_funcs.keys(), default=default_output_func_key) args = parser.parse_args(argv) args.output_func = output_funcs[args.out_format] return args
def parse_args(argv)
Parse commandline arguments. Arguments: argv -- An argument list without the program name.
1.919409
1.947576
0.985537
delimiter = kwargs.get('delimiter', '\t') for transaction in csv.reader(input_file, delimiter=delimiter): yield transaction if transaction else ['']
def load_transactions(input_file, **kwargs)
Load transactions and returns a generator for transactions. Arguments: input_file -- An input file. Keyword arguments: delimiter -- The delimiter of the transaction.
4.53423
6.019134
0.753303
def default_func(value): if isinstance(value, frozenset): return sorted(value) raise TypeError(repr(value) + " is not JSON serializable") converted_record = record._replace( ordered_statistics=[x._asdict() for x in record.ordered_statistics]) json.dump( converted_record._asdict(), output_file, default=default_func, ensure_ascii=False) output_file.write(os.linesep)
def dump_as_json(record, output_file)
Dump an relation record as a json value. Arguments: record -- A RelationRecord instance to dump. output_file -- A file to output.
3.720722
3.792789
0.980999
for ordered_stats in record.ordered_statistics: if len(ordered_stats.items_base) != 1: continue if len(ordered_stats.items_add) != 1: continue output_file.write('{0}\t{1}\t{2:.8f}\t{3:.8f}\t{4:.8f}{5}'.format( list(ordered_stats.items_base)[0], list(ordered_stats.items_add)[0], record.support, ordered_stats.confidence, ordered_stats.lift, os.linesep))
def dump_as_two_item_tsv(record, output_file)
Dump a relation record as TSV only for 2 item relations. Arguments: record -- A RelationRecord instance to dump. output_file -- A file to output.
2.938244
2.993121
0.981666
# For tests. _parse_args = kwargs.get('_parse_args', parse_args) _load_transactions = kwargs.get('_load_transactions', load_transactions) _apriori = kwargs.get('_apriori', apriori) args = _parse_args(sys.argv[1:]) transactions = _load_transactions( chain(*args.input), delimiter=args.delimiter) result = _apriori( transactions, max_length=args.max_length, min_support=args.min_support, min_confidence=args.min_confidence) for record in result: args.output_func(record, args.output)
def main(**kwargs)
Executes Apriori algorithm and print its result.
3.10535
2.702076
1.149246
for item in transaction: if item not in self.__transaction_index_map: self.__items.append(item) self.__transaction_index_map[item] = set() self.__transaction_index_map[item].add(self.__num_transaction) self.__num_transaction += 1
def add_transaction(self, transaction)
Add a transaction. Arguments: transaction -- A transaction as an iterable object (eg. ['A', 'B']).
2.792526
3.022372
0.923951
# Empty items is supported by all transactions. if not items: return 1.0 # Empty transactions supports no items. if not self.num_transaction: return 0.0 # Create the transaction index intersection. sum_indexes = None for item in items: indexes = self.__transaction_index_map.get(item) if indexes is None: # No support for any set that contains a not existing item. return 0.0 if sum_indexes is None: # Assign the indexes on the first time. sum_indexes = indexes else: # Calculate the intersection on not the first time. sum_indexes = sum_indexes.intersection(indexes) # Calculate and return the support. return float(len(sum_indexes)) / self.__num_transaction
def calc_support(self, items)
Returns a support for items. Arguments: items -- Items as an iterable object (eg. ['A', 'B']).
4.498247
4.552814
0.988015
self.context[self.form_context_name] = self.payment_form_cls() return TemplateResponse(self.request, self.payment_template, self.context)
def render_payment_form(self)
Display the DirectPayment for entering payment information.
4.063928
3.806342
1.067673
warn_untested() form = self.payment_form_cls(self.request.POST) if form.is_valid(): success = form.process(self.request, self.item) if success: return HttpResponseRedirect(self.success_url) else: self.context['errors'] = self.errors['processing'] self.context[self.form_context_name] = form self.context.setdefault("errors", self.errors['form']) return TemplateResponse(self.request, self.payment_template, self.context)
def validate_payment_form(self)
Try to validate and then process the DirectPayment form.
3.861161
3.635082
1.062194
wpp = PayPalWPP(self.request) try: nvp_obj = wpp.setExpressCheckout(self.item) except PayPalFailure: warn_untested() self.context['errors'] = self.errors['paypal'] return self.render_payment_form() else: return HttpResponseRedirect(express_endpoint_for_token(nvp_obj.token))
def redirect_to_express(self)
First step of ExpressCheckout. Redirect the request to PayPal using the data returned from setExpressCheckout.
10.078469
8.789568
1.14664
warn_untested() initial = dict(token=self.request.GET['token'], PayerID=self.request.GET['PayerID']) self.context[self.form_context_name] = self.confirm_form_cls(initial=initial) return TemplateResponse(self.request, self.confirm_template, self.context)
def render_confirm_form(self)
Second step of ExpressCheckout. Display an order confirmation form which contains hidden fields with the token / PayerID from PayPal.
4.780371
3.925839
1.217668
wpp = PayPalWPP(self.request) pp_data = dict(token=self.request.POST['token'], payerid=self.request.POST['PayerID']) self.item.update(pp_data) # @@@ This check and call could be moved into PayPalWPP. try: if self.is_recurring(): warn_untested() nvp = wpp.createRecurringPaymentsProfile(self.item) else: nvp = wpp.doExpressCheckoutPayment(self.item) self.handle_nvp(nvp) except PayPalFailure: self.context['errors'] = self.errors['processing'] return self.render_payment_form() else: return HttpResponseRedirect(self.success_url)
def validate_confirm_form(self)
Third and final step of ExpressCheckout. Request has pressed the confirmation but and we can send the final confirmation to PayPal using the data from the POST'ed form.
5.675585
5.262178
1.078562
if request is not None: from paypal.pro.helpers import strip_ip_port self.ipaddress = strip_ip_port(request.META.get('REMOTE_ADDR', '')) if (hasattr(request, "user") and request.user.is_authenticated): self.user = request.user else: self.ipaddress = '' # No storing credit card info. query_data = dict((k, v) for k, v in paypal_request.items() if k not in self.RESTRICTED_FIELDS) self.query = urlencode(query_data) self.response = urlencode(paypal_response) # Was there a flag on the play? ack = paypal_response.get('ack', False) if ack != "Success": if ack == "SuccessWithWarning": warn_untested() self.flag_info = paypal_response.get('l_longmessage0', '') else: self.set_flag(paypal_response.get('l_longmessage0', ''), paypal_response.get('l_errorcode', ''))
def init(self, request, paypal_request, paypal_response)
Initialize a PayPalNVP instance from a HttpRequest.
4.396083
4.384484
1.002646
self.flag = True self.flag_info += info if code is not None: self.flag_code = code
def set_flag(self, info, code=None)
Flag this instance for investigation.
4.288593
3.329955
1.287883
warn_untested() from paypal.pro.helpers import PayPalWPP wpp = PayPalWPP(request) # Change the model information into a dict that PayPal can understand. params = model_to_dict(self, exclude=self.ADMIN_FIELDS) params['acct'] = self.acct params['creditcardtype'] = self.creditcardtype params['expdate'] = self.expdate params['cvv2'] = self.cvv2 params.update(item) # Create recurring payment: if 'billingperiod' in params: return wpp.createRecurringPaymentsProfile(params, direct=True) # Create single payment: else: return wpp.doDirectPayment(params)
def process(self, request, item)
Do a direct payment.
6.626845
6.158909
1.075977
if not self.query: return None from django.http import QueryDict roughdecode = dict(item.split('=', 1) for item in self.query.split('&')) encoding = roughdecode.get('charset', None) if encoding is None: encoding = DEFAULT_ENCODING query = self.query.encode('ascii') data = QueryDict(query, encoding=encoding) return data.dict()
def posted_data_dict(self)
All the data that PayPal posted to us, as a correctly parsed dictionary of values.
4.388509
4.186848
1.048165
self.response = self._postback().decode('ascii') self.clear_flag() self._verify_postback() if not self.flag: if self.is_transaction(): if self.payment_status not in self.PAYMENT_STATUS_CHOICES: self.set_flag("Invalid payment_status. (%s)" % self.payment_status) if duplicate_txn_id(self): self.set_flag("Duplicate txn_id. (%s)" % self.txn_id) self.save()
def verify(self)
Verifies an IPN and a PDT. Checks for obvious signs of weirdness in the payment and flags appropriately.
5.143481
4.40412
1.167879
warn_untested() if not check_secret(form_instance, secret): self.set_flag("Invalid secret. (%s)") % secret self.save()
def verify_secret(self, form_instance, secret)
Verifies an IPN payment over SSL using EWP.
13.368071
12.934469
1.033523
if request.method == 'GET': # PDT only - this data is currently unused self.query = request.META.get('QUERY_STRING', '') elif request.method == 'POST': # The following works if paypal sends an ASCII bytestring, which it does. self.query = request.body.decode('ascii') self.ipaddress = request.META.get('REMOTE_ADDR', '')
def initialize(self, request)
Store the data we'll need to make the postback from the request object.
6.941413
6.507138
1.066738
from M2Crypto import BIO, SMIME, X509 # Iterate through the fields and pull out the ones that have a value. plaintext = 'cert_id=%s\n' % self.cert_id for name, field in self.fields.items(): value = None if name in self.initial: value = self.initial[name] elif field.initial is not None: value = field.initial if value is not None: plaintext += u'%s=%s\n' % (name, value) plaintext = plaintext.encode('utf-8') # Begin crypto weirdness. s = SMIME.SMIME() s.load_key_bio(BIO.openfile(self.private_cert), BIO.openfile(self.public_cert)) p7 = s.sign(BIO.MemoryBuffer(plaintext), flags=SMIME.PKCS7_BINARY) x509 = X509.load_cert_bio(BIO.openfile(self.paypal_cert)) sk = X509.X509_Stack() sk.push(x509) s.set_x509_stack(sk) s.set_cipher(SMIME.Cipher('des_ede3_cbc')) tmp = BIO.MemoryBuffer() p7.write_der(tmp) p7 = s.encrypt(tmp, flags=SMIME.PKCS7_BINARY) out = BIO.MemoryBuffer() p7.write(out) return out.read().decode()
def _encrypt(self)
Use your key thing to encrypt things.
2.558968
2.528347
1.012111
warn_untested() if time_obj is None: time_obj = time.gmtime() return time.strftime(PayPalNVP.TIMESTAMP_FORMAT, time_obj)
def paypal_time(time_obj=None)
Returns a time suitable for PayPal time fields.
5.945
5.128236
1.159268
naive = datetime.datetime.strptime(s, PayPalNVP.TIMESTAMP_FORMAT) if not settings.USE_TZ: return naive else: # TIMESTAMP_FORMAT is UTC return timezone.make_aware(naive, timezone.utc)
def paypaltime2datetime(s)
Convert a PayPal time string to a DateTime.
5.131896
4.92305
1.042422
pp_params = dict(token=token) if commit: pp_params['useraction'] = 'commit' return express_endpoint() % urlencode(pp_params)
def express_endpoint_for_token(token, commit=False)
Returns the PayPal Express Checkout endpoint for a token. Pass 'commit=True' if you will not prompt for confirmation when the user returns to your site.
7.614001
5.921741
1.285771
# IPv4 with or without port if '.' in ip_address: cleaned_ip = ip_address.split(':')[0] # IPv6 with port elif ']:' in ip_address: # Remove the port following last ':', and then strip first and last chars for []. cleaned_ip = ip_address.rpartition(':')[0][1:-1] # IPv6 without port else: cleaned_ip = ip_address return cleaned_ip
def strip_ip_port(ip_address)
Strips the port from an IPv4 or IPv6 address, returns a unicode object.
4.585656
4.482904
1.022921
defaults = {"method": "DoDirectPayment", "paymentaction": "Sale"} required = ["creditcardtype", "acct", "expdate", "cvv2", "ipaddress", "firstname", "lastname", "street", "city", "state", "countrycode", "zip", "amt", ] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) # @@@ Could check cvv2match / avscode are both 'X' or '0' # qd = django.http.QueryDict(nvp_obj.response) # if qd.get('cvv2match') not in ['X', '0']: # nvp_obj.set_flag("Invalid cvv2match: %s" % qd.get('cvv2match') # if qd.get('avscode') not in ['X', '0']: # nvp_obj.set_flag("Invalid avscode: %s" % qd.get('avscode') return nvp_obj
def doDirectPayment(self, params)
Call PayPal DoDirectPayment method.
4.059707
3.853988
1.053378
if self._is_recurring(params): params = self._recurring_setExpressCheckout_adapter(params) defaults = {"method": "SetExpressCheckout", "noshipping": 1} required = ["returnurl", "cancelurl", "paymentrequest_0_amt"] nvp_obj = self._fetch(params, required, defaults) if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) return nvp_obj
def setExpressCheckout(self, params)
Initiates an Express Checkout transaction. Optionally, the SetExpressCheckout API operation can set up billing agreements for reference transactions and recurring payments. Returns a NVP instance - check for token and payerid to continue!
7.326344
6.329762
1.157444
defaults = {"method": "CreateRecurringPaymentsProfile"} required = ["profilestartdate", "billingperiod", "billingfrequency", "amt"] # Direct payments require CC data if direct: required + ["creditcardtype", "acct", "expdate", "firstname", "lastname"] else: required + ["token", "payerid"] nvp_obj = self._fetch(params, required, defaults) # Flag if profile_type != ActiveProfile if nvp_obj.flag: raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) return nvp_obj
def createRecurringPaymentsProfile(self, params, direct=False)
Set direct to True to indicate that this is being called as a directPayment. Returns True PayPal successfully creates the profile otherwise False.
9.388157
8.728981
1.075516
defaults = {"method": "ManageRecurringPaymentsProfileStatus"} required = ["profileid", "action"] nvp_obj = self._fetch(params, required, defaults) # TODO: This fail silently check should be using the error code, but its not easy to access flag_info_test_string = 'Invalid profile status for cancel action; profile should be active or suspended' if nvp_obj.flag and not (fail_silently and nvp_obj.flag_info == flag_info_test_string): raise PayPalFailure(nvp_obj.flag_info, nvp=nvp_obj) return nvp_obj
def manangeRecurringPaymentsProfileStatus(self, params, fail_silently=False)
Requires `profileid` and `action` params. Action must be either "Cancel", "Suspend", or "Reactivate".
9.284051
7.763157
1.195912
params['l_billingtype0'] = "RecurringPayments" params['l_billingagreementdescription0'] = params['desc'] REMOVE = ["billingfrequency", "billingperiod", "profilestartdate", "desc"] for k in params.keys(): if k in REMOVE: del params[k] return params
def _recurring_setExpressCheckout_adapter(self, params)
The recurring payment interface to SEC is different than the recurring payment interface to ECP. This adapts a normal call to look like a SEC call.
9.644998
9.301783
1.036898
defaults.update(params) pp_params = self._check_and_update_params(required, defaults) pp_string = self.signature + urlencode(pp_params) response = self._request(pp_string) response_params = self._parse_response(response) log.debug('PayPal Request:\n%s\n', pprint.pformat(defaults)) log.debug('PayPal Response:\n%s\n', pprint.pformat(response_params)) # Gather all NVP parameters to pass to a new instance. nvp_params = {} tmpd = defaults.copy() tmpd.update(response_params) for k, v in tmpd.items(): if k in self.NVP_FIELDS: nvp_params[str(k)] = v # PayPal timestamp has to be formatted. if 'timestamp' in nvp_params: nvp_params['timestamp'] = paypaltime2datetime(nvp_params['timestamp']) nvp_obj = PayPalNVP(**nvp_params) nvp_obj.init(self.request, params, response_params) nvp_obj.save() return nvp_obj
def _fetch(self, params, required, defaults)
Make the NVP request and store the response.
3.548358
3.380221
1.049741
return requests.post(self.endpoint, data=data.encode("ascii")).content
def _request(self, data)
Moved out to make testing easier.
9.090006
7.339067
1.238578
for r in required: if r not in params: raise PayPalError("Missing required param: %s" % r) # Upper case all the parameters for PayPal. return (dict((k.upper(), v) for k, v in params.items()))
def _check_and_update_params(self, required, params)
Ensure all required parameters were passed to the API call and format them correctly.
4.459238
4.347729
1.025648
q = QueryDict(response, encoding='UTF-8').dict() return {k.lower(): v for k, v in q.items()}
def _parse_response(self, response)
Turn the PayPal response into a dict
6.851712
4.892966
1.400319
return requests.post(self.get_endpoint(), data=dict(cmd="_notify-synch", at=IDENTITY_TOKEN, tx=self.tx)).content
def _postback(self)
Perform PayPal PDT Postback validation. Sends the transaction ID and business token to PayPal which responses with SUCCESS or FAILED.
36.319828
21.130991
1.718794
# get latest similar transaction(s) similars = (ipn_obj.__class__._default_manager .filter(txn_id=ipn_obj.txn_id) .exclude(id=ipn_obj.id) .exclude(flag=True) .order_by('-created_at')[:1]) if len(similars) > 0: # we have a similar transaction, has the payment_status changed? return similars[0].payment_status == ipn_obj.payment_status return False
def duplicate_txn_id(ipn_obj)
Returns True if a record with this transaction id exists and its payment_status has not changed. This function has been completely changed from its previous implementation where it used to specifically only check for a Pending->Completed transition.
3.551808
3.252006
1.09219
warn_untested() # @@@ Moved here as temporary fix to avoid dependancy on auth.models. # @@@ amount is mc_gross on the IPN - where should mapping logic go? # @@@ amount / mc_gross is not nessecarily returned as it was sent - how to use it? 10.00 vs. 10.0 # @@@ the secret should be based on the invoice or custom fields as well - otherwise its always the same. # Build the secret with fields availible in both PaymentForm and the IPN. Order matters. if secret_fields is None: secret_fields = ['business', 'item_name'] data = "" for name in secret_fields: if hasattr(form_instance, 'cleaned_data'): if name in form_instance.cleaned_data: data += unicode(form_instance.cleaned_data[name]) else: # Initial data passed into the constructor overrides defaults. if name in form_instance.initial: data += unicode(form_instance.initial[name]) elif name in form_instance.fields and form_instance.fields[name].initial is not None: data += unicode(form_instance.fields[name].initial) secret = get_sha1_hexdigest(settings.SECRET_KEY, data) return secret
def make_secret(form_instance, secret_fields=None)
Returns a secret for use in a EWP form or an IPN verification based on a selection of variables in params. Should only be used with SSL.
7.592196
7.126224
1.065388
pdt_obj = None txn_id = request.GET.get('tx') failed = False if txn_id is not None: # If an existing transaction with the id tx exists: use it try: pdt_obj = PayPalPDT.objects.get(txn_id=txn_id) except PayPalPDT.DoesNotExist: # This is a new transaction so we continue processing PDT request pass if pdt_obj is None: form = PayPalPDTForm(request.GET) if form.is_valid(): try: pdt_obj = form.save(commit=False) except Exception as e: warn_untested() error = repr(e) failed = True else: warn_untested() error = form.errors failed = True if failed: warn_untested() pdt_obj = PayPalPDT() pdt_obj.set_flag("Invalid form. %s" % error) pdt_obj.initialize(request) if not failed: # The PDT object gets saved during verify pdt_obj.verify() else: pass # we ignore any PDT requests that don't have a transaction id return (pdt_obj, failed)
def process_pdt(request)
Payment data transfer implementation: https://developer.paypal.com/webapps/developer/docs/classic/products/payment-data-transfer/ This function returns a tuple of (pdt_obj, failed) pdt_obj is an object of type PayPalPDT failed is a flag that is True if the input data didn't pass basic validation. Note: even for failed=False You must still check the pdt_obj is not flagged i.e. pdt_obj.flag == False
3.823407
3.461971
1.104402
self.number = re.sub(r'[^\d]', '', self.number) return self.number.isdigit()
def is_number(self)
True if there is at least one digit in number.
4.886489
3.15454
1.549034
double = 0 total = 0 for i in range(len(self.number) - 1, -1, -1): for c in str((double + 1) * int(self.number[i])): total = total + int(c) double = (double + 1) % 2 return (total % 10) == 0
def is_mod10(self)
Returns True if number is valid according to mod10.
3.169503
2.973481
1.065923
for card, pattern in CARDS.items(): if pattern.match(self.number): return card return None
def get_type(self)
Return the type if it matches one of the cards.
9.961981
5.695068
1.74923
if self.is_number() and not self.is_test() and self.is_mod10(): return self.get_type() return None
def verify(self)
Returns the card type if valid else None.
8.179981
4.84149
1.689559
if value: value = value.replace('-', '').replace(' ', '') self.card_type = verify_credit_card(value) if self.card_type is None: raise forms.ValidationError("Invalid credit card number.") return value
def clean(self, value)
Raises a ValidationError if the card is not valid and stashes card type.
3.941182
3.154361
1.249439
# TODO: Clean up code so that we don't need to set None here and have a lot # of if checks just to determine if flag is set. flag = None ipn_obj = None # Avoid the RawPostDataException. See original issue for details: # https://github.com/spookylukey/django-paypal/issues/79 if not request.META.get('CONTENT_TYPE', '').startswith( 'application/x-www-form-urlencoded'): raise AssertionError(CONTENT_TYPE_ERROR) logger.debug("PayPal incoming POST data: %s", request.body) # Clean up the data as PayPal sends some weird values such as "N/A" # Also, need to cope with custom encoding, which is stored in the body (!). # Assuming the tolerant parsing of QueryDict and an ASCII-like encoding, # such as windows-1252, latin1 or UTF8, the following will work: encoding = request.POST.get('charset', None) encoding_missing = encoding is None if encoding_missing: encoding = DEFAULT_ENCODING try: data = QueryDict(request.body, encoding=encoding).copy() except LookupError: warn_untested() data = None flag = "Invalid form - invalid charset" if data is not None: if hasattr(PayPalIPN._meta, 'get_fields'): date_fields = [f.attname for f in PayPalIPN._meta.get_fields() if f.__class__.__name__ == 'DateTimeField'] else: date_fields = [f.attname for f, m in PayPalIPN._meta.get_fields_with_model() if f.__class__.__name__ == 'DateTimeField'] for date_field in date_fields: if data.get(date_field) == 'N/A': del data[date_field] form = PayPalIPNForm(data) if form.is_valid(): try: # When commit = False, object is returned without saving to DB. ipn_obj = form.save(commit=False) except Exception as e: flag = "Exception while processing. (%s)" % e else: formatted_form_errors = ["{0}: {1}".format(k, ", ".join(v)) for k, v in form.errors.items()] flag = "Invalid form. ({0})".format(", ".join(formatted_form_errors)) if ipn_obj is None: ipn_obj = PayPalIPN() # Set query params and sender's IP address ipn_obj.initialize(request) if flag is not None: # We save errors in the flag field ipn_obj.set_flag(flag) else: # Secrets should only be used over SSL. if request.is_secure() and 'secret' in request.GET: warn_untested() ipn_obj.verify_secret(form, request.GET['secret']) else: ipn_obj.verify() ipn_obj.save() ipn_obj.send_signals() if encoding_missing: # Wait until we have an ID to log warning logger.warning("No charset passed with PayPalIPN: %s. Guessing %s", ipn_obj.id, encoding) return HttpResponse("OKAY")
def ipn(request)
PayPal IPN endpoint (notify_url). Used by both PayPal Payments Pro and Payments Standard to confirm transactions. http://tinyurl.com/d9vu9d PayPal IPN Simulator: https://developer.paypal.com/cgi-bin/devscr?cmd=_ipn-link-session
4.400817
4.446222
0.989788
warn_untested() from paypal.pro.helpers import PayPalWPP wpp = PayPalWPP(request) params = self.cleaned_data params['creditcardtype'] = self.fields['acct'].card_type params['expdate'] = self.cleaned_data['expdate'].strftime("%m%Y") params['ipaddress'] = request.META.get("REMOTE_ADDR", "") params.update(item) try: # Create single payment: if 'billingperiod' not in params: wpp.doDirectPayment(params) # Create recurring payment: else: wpp.createRecurringPaymentsProfile(params, direct=True) except PayPalFailure: return False return True
def process(self, request, item)
Process a PayPal direct payment.
6.554373
6.018354
1.089064
return requests.post(self.get_endpoint(), data=b"cmd=_notify-validate&" + self.query.encode("ascii")).content
def _postback(self)
Perform PayPal Postback validation.
16.683668
8.915154
1.871383
if self.flag: invalid_ipn_received.send(sender=self) return else: valid_ipn_received.send(sender=self)
def send_signals(self)
Shout for the world to hear whether a txn was successful.
7.029922
5.833211
1.205155
if isinstance(root, (Delete, BinaryExpression, BooleanClauseList)): for child in root.get_children(): yc = gen_columns_from_children(child) for it in yc: yield it elif isinstance(root, sa.Column): yield root
def gen_columns_from_children(root)
Generates columns that are being used in child elements of the delete query this will be used to determine tables for the using clause. :param root: the delete query :return: a generator of columns
4.727725
4.655375
1.015541
# Set empty strings for the default where clause and using clause whereclause = '' usingclause = '' # determine if the delete query needs a ``USING`` injected # by inspecting the whereclause's children & their children... # first, the where clause text is buit, if applicable # then, the using clause text is built, if applicable # note: # the tables in the using clause are sorted in the order in # which they first appear in the where clause. delete_stmt_table = compiler.process(element.table, asfrom=True, **kwargs) whereclause_tuple = element.get_children() if whereclause_tuple: usingclause_tables = [] whereclause = ' WHERE {clause}'.format( clause=compiler.process(*whereclause_tuple, **kwargs) ) whereclause_columns = gen_columns_from_children(element) for col in whereclause_columns: table = compiler.process(col.table, asfrom=True, **kwargs) if table != delete_stmt_table and table not in usingclause_tables: usingclause_tables.append(table) if usingclause_tables: usingclause = ' USING {clause}'.format( clause=', '.join(usingclause_tables) ) return 'DELETE FROM {table}{using}{where}'.format( table=delete_stmt_table, using=usingclause, where=whereclause)
def visit_delete_stmt(element, compiler, **kwargs)
Adds redshift-dialect specific compilation rule for the delete statement. Redshift DELETE syntax can be found here: https://docs.aws.amazon.com/redshift/latest/dg/r_DELETE.html .. :code-block: sql DELETE [ FROM ] table_name [ { USING } table_name, ...] [ WHERE condition ] By default, SqlAlchemy compiles DELETE statements with the syntax: .. :code-block: sql DELETE [ FROM ] table_name [ WHERE condition ] problem illustration: >>> from sqlalchemy import Table, Column, Integer, MetaData, delete >>> from sqlalchemy_redshift.dialect import RedshiftDialect >>> meta = MetaData() >>> table1 = Table( ... 'table_1', ... meta, ... Column('pk', Integer, primary_key=True) ... ) ... >>> table2 = Table( ... 'table_2', ... meta, ... Column('pk', Integer, primary_key=True) ... ) ... >>> del_stmt = delete(table1).where(table1.c.pk==table2.c.pk) >>> str(del_stmt.compile(dialect=RedshiftDialect())) 'DELETE FROM table_1 USING table_2 WHERE table_1.pk = table_2.pk' >>> str(del_stmt) 'DELETE FROM table_1 , table_2 WHERE table_1.pk = table_2.pk' >>> del_stmt2 = delete(table1) >>> str(del_stmt2) 'DELETE FROM table_1' >>> del_stmt3 = delete(table1).where(table1.c.pk > 1000) >>> str(del_stmt3) 'DELETE FROM table_1 WHERE table_1.pk > :pk_1' >>> str(del_stmt3.compile(dialect=RedshiftDialect())) 'DELETE FROM table_1 WHERE table_1.pk > %(pk_1)s'
4.394636
4.519215
0.972433
key = str(self) if key.startswith('"') and key.endswith('"'): return key[1:-1] return key
def unquoted(self)
Return *key* with one level of double quotes removed. Redshift stores some identifiers without quotes in internal tables, even though the name must be quoted elsewhere. In particular, this happens for tables named as a keyword.
3.624329
3.285135
1.103251
cols = self._get_redshift_columns(connection, table_name, schema, **kw) if not self._domains: self._domains = self._load_domains(connection) domains = self._domains columns = [] for col in cols: column_info = self._get_column_info( name=col.name, format_type=col.format_type, default=col.default, notnull=col.notnull, domains=domains, enums=[], schema=col.schema, encode=col.encode) columns.append(column_info) return columns
def get_columns(self, connection, table_name, schema=None, **kw)
Return information about columns in `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_columns`.
3.548943
3.942072
0.900273
constraints = self._get_redshift_constraints(connection, table_name, schema, **kw) pk_constraints = [c for c in constraints if c.contype == 'p'] if not pk_constraints: return {'constrained_columns': [], 'name': ''} pk_constraint = pk_constraints[0] m = PRIMARY_KEY_RE.match(pk_constraint.condef) colstring = m.group('columns') constrained_columns = SQL_IDENTIFIER_RE.findall(colstring) return { 'constrained_columns': constrained_columns, 'name': pk_constraint.conname, }
def get_pk_constraint(self, connection, table_name, schema=None, **kw)
Return information about the primary key constraint on `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`.
2.767156
2.895914
0.955538
constraints = self._get_redshift_constraints(connection, table_name, schema, **kw) fk_constraints = [c for c in constraints if c.contype == 'f'] uniques = defaultdict(lambda: defaultdict(dict)) for con in fk_constraints: uniques[con.conname]["key"] = con.conkey uniques[con.conname]["condef"] = con.condef fkeys = [] for conname, attrs in uniques.items(): m = FOREIGN_KEY_RE.match(attrs['condef']) colstring = m.group('referred_columns') referred_columns = SQL_IDENTIFIER_RE.findall(colstring) referred_table = m.group('referred_table') referred_schema = m.group('referred_schema') colstring = m.group('columns') constrained_columns = SQL_IDENTIFIER_RE.findall(colstring) fkey_d = { 'name': conname, 'constrained_columns': constrained_columns, 'referred_schema': referred_schema, 'referred_table': referred_table, 'referred_columns': referred_columns, } fkeys.append(fkey_d) return fkeys
def get_foreign_keys(self, connection, table_name, schema=None, **kw)
Return information about foreign keys in `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_pk_constraint`.
2.54669
2.613147
0.974568
return self._get_table_or_view_names('r', connection, schema, **kw)
def get_table_names(self, connection, schema=None, **kw)
Return a list of table names for `schema`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_table_names`.
6.522583
8.054289
0.809827
return self._get_table_or_view_names('v', connection, schema, **kw)
def get_view_names(self, connection, schema=None, **kw)
Return a list of view names for `schema`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_names`.
5.210397
7.111248
0.732698
view = self._get_redshift_relation(connection, view_name, schema, **kw) return sa.text(view.view_definition)
def get_view_definition(self, connection, view_name, schema=None, **kw)
Return view definition. Given a :class:`.Connection`, a string `view_name`, and an optional string `schema`, return the view definition. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_view_definition`.
6.797515
9.387239
0.724123
constraints = self._get_redshift_constraints(connection, table_name, schema, **kw) constraints = [c for c in constraints if c.contype == 'u'] uniques = defaultdict(lambda: defaultdict(dict)) for con in constraints: uniques[con.conname]["key"] = con.conkey uniques[con.conname]["cols"][con.attnum] = con.attname return [ {'name': None, 'column_names': [uc["cols"][i] for i in uc["key"]]} for name, uc in uniques.items() ]
def get_unique_constraints(self, connection, table_name, schema=None, **kw)
Return information about unique constraints in `table_name`. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.get_unique_constraints`.
3.755239
3.926428
0.956401
def keyfunc(column): num = int(column.sortkey) # If sortkey is interleaved, column numbers alternate # negative values, so take abs. return abs(num) table = self._get_redshift_relation(connection, table_name, schema, **kw) columns = self._get_redshift_columns(connection, table_name, schema, **kw) sortkey_cols = sorted([col for col in columns if col.sortkey], key=keyfunc) interleaved = any([int(col.sortkey) < 0 for col in sortkey_cols]) sortkey = [col.name for col in sortkey_cols] interleaved_sortkey = None if interleaved: interleaved_sortkey = sortkey sortkey = None distkeys = [col.name for col in columns if col.distkey] distkey = distkeys[0] if distkeys else None return { 'redshift_diststyle': table.diststyle, 'redshift_distkey': distkey, 'redshift_sortkey': sortkey, 'redshift_interleaved_sortkey': interleaved_sortkey, }
def get_table_options(self, connection, table_name, schema, **kw)
Return a dictionary of options specified when the table of the given name was created. Overrides interface :meth:`~sqlalchemy.engine.Inspector.get_table_options`.
2.970525
3.059241
0.971001
default_args = { 'sslmode': 'verify-full', 'sslrootcert': pkg_resources.resource_filename( __name__, 'redshift-ca-bundle.crt' ), } cargs, cparams = super(RedshiftDialect, self).create_connect_args( *args, **kwargs ) default_args.update(cparams) return cargs, default_args
def create_connect_args(self, *args, **kwargs)
Build DB-API compatible connection arguments. Overrides interface :meth:`~sqlalchemy.engine.interfaces.Dialect.create_connect_args`.
2.911794
2.748935
1.059245
template = el = element qs = template.format( manifest='MANIFEST' if el.manifest else '', header='HEADER' if el.header else '', delimiter=( 'DELIMITER AS :delimiter' if el.delimiter is not None else '' ), encrypted='ENCRYPTED' if el.encrypted else '', fixed_width='FIXEDWIDTH AS :fixed_width' if el.fixed_width else '', gzip='GZIP' if el.gzip else '', add_quotes='ADDQUOTES' if el.add_quotes else '', escape='ESCAPE' if el.escape else '', null='NULL AS :null_as' if el.null is not None else '', allow_overwrite='ALLOWOVERWRITE' if el.allow_overwrite else '', parallel='PARALLEL OFF' if not el.parallel else '', region='REGION :region' if el.region is not None else '', max_file_size=( 'MAXFILESIZE :max_file_size MB' if el.max_file_size is not None else '' ), ) query = sa.text(qs) if el.delimiter is not None: query = query.bindparams(sa.bindparam( 'delimiter', value=element.delimiter, type_=sa.String, )) if el.fixed_width: query = query.bindparams(sa.bindparam( 'fixed_width', value=_process_fixed_width(el.fixed_width), type_=sa.String, )) if el.null is not None: query = query.bindparams(sa.bindparam( 'null_as', value=el.null, type_=sa.String )) if el.region is not None: query = query.bindparams(sa.bindparam( 'region', value=el.region, type_=sa.String )) if el.max_file_size is not None: max_file_size_mib = float(el.max_file_size) / 1024 / 1024 query = query.bindparams(sa.bindparam( 'max_file_size', value=max_file_size_mib, type_=sa.Float )) return compiler.process( query.bindparams( sa.bindparam('credentials', value=el.credentials, type_=sa.String), sa.bindparam( 'unload_location', value=el.unload_location, type_=sa.String, ), sa.bindparam( 'select', value=compiler.process( el.select, literal_binds=True, ), type_=sa.String, ), ), **kw )
def visit_unload_from_select(element, compiler, **kw)
Returns the actual sql query for the UnloadFromSelect class.
2.286668
2.181612
1.048155
query = bindparams = [ sa.bindparam( 'location', value=element.location, type_=sa.String, ), sa.bindparam( 'credentials', value=element.credentials, type_=sa.String, ), ] if element.region is not None: bindparams.append(sa.bindparam( 'region', value=element.region, type_=sa.String, )) quoted_lib_name = compiler.preparer.quote_identifier(element.library_name) query = query.format(name=quoted_lib_name, or_replace='OR REPLACE' if element.replace else '', region='REGION :region' if element.region else '') return compiler.process(sa.text(query).bindparams(*bindparams), **kw)
def visit_create_library_command(element, compiler, **kw)
Returns the actual sql query for the CreateLibraryCommand class.
3.522597
3.203548
1.099592
try: with open(pacfile) as f: pac_script = f.read() _pacparser.parse_pac_string(pac_script) except IOError: raise IOError('Could not read the pacfile: {}'.format(pacfile))
def parse_pac_file(pacfile)
Reads the pacfile and evaluates it in the Javascript engine created by init().
3.811482
3.639482
1.047259
if host is None: m = _URL_REGEX.match(url) if not m: raise URLError(url) if len(m.groups()) is 1: host = m.groups()[0] else: raise URLError(url) return _pacparser.find_proxy(url, host)
def find_proxy(url, host=None)
Finds proxy string for the given url and host. If host is not defined, it's extracted from the url.
3.595976
3.319255
1.083368
if not os.path.isfile(pacfile): raise IOError('Pac file does not exist: {}'.format(pacfile)) init() parse_pac(pacfile) proxy = find_proxy(url,host) cleanup() return proxy
def just_find_proxy(pacfile, url, host=None)
This function is a wrapper around init, parse_pac, find_proxy and cleanup. This is the function to call if you want to find proxy just for one url.
3.983006
2.881187
1.382418
yes, no = [], [] for d in iterable: if pred(d): yes.append(d) else: no.append(d) return yes, no
def split_data(iterable, pred)
Split data from ``iterable`` into two lists. Each element is passed to function ``pred``; elements for which ``pred`` returns True are put into ``yes`` list, other elements are put into ``no`` list. >>> split_data(["foo", "Bar", "Spam", "egg"], lambda t: t.istitle()) (['Bar', 'Spam'], ['foo', 'egg'])
2.134564
3.081086
0.692796