code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
n = len(graph) order = [] times_seen = [-1] * n for start in range(n): if times_seen[start] == -1: times_seen[start] = 0 to_visit = [start] while to_visit: node = to_visit[-1] children = graph[node] if times_seen[node] == len(children): to_visit.pop() order.append(node) else: child = children[times_seen[node]] times_seen[node] += 1 if times_seen[child] == -1: times_seen[child] = 0 to_visit.append(child) return order[::-1]
def topological_order_dfs(graph)
Topological sorting by depth first search :param graph: directed graph in listlist format, cannot be listdict :returns: list of vertices in order :complexity: `O(|V|+|E|)`
1.805297
1.942807
0.929221
V = range(len(graph)) indeg = [0 for _ in V] for node in V: # compute indegree for neighbor in graph[node]: indeg[neighbor] += 1 Q = [node for node in V if indeg[node] == 0] order = [] while Q: node = Q.pop() # node without incoming arrows order.append(node) for neighbor in graph[node]: indeg[neighbor] -= 1 if indeg[neighbor] == 0: Q.append(neighbor) return order
def topological_order(graph)
Topological sorting by maintaining indegree :param graph: directed graph in listlist format, cannot be listdict :returns: list of vertices in order :complexity: `O(|V|+|E|)`
2.131833
2.267618
0.94012
"starts from 0" if k >= len(self): raise IndexError k += 1 # self has index -1 h = len(self.next) - 1 x = self while k: while x.next[h] is None or x.count[h] > k: h -= 1 k -= x.count[h] x = x.next[h] return x.key
def getkth(self, k)
starts from 0
4.471197
4.062624
1.100569
ans = self.nextNode(key) return (ans.key if ans is not None else None)
def nextKey(self, key)
nextKey(key) >= key
7.129643
7.398266
0.963691
update = self._updateList(key)[0] return (update[0].key if update else None)
def lastKey(self, key)
lastKey(key) < key
12.586794
13.470239
0.934415
try: x = next(iter(self)) self.remove(x) return x except StopIteration: raise KeyError('pop from an empty set')
def pop(self)
Pops the first element
3.565758
3.832209
0.930471
n = len(p) fact = 1 # compute (n-1) factorial for i in range(2, n): fact *= i r = 0 # compute rank of p digits = list(range(n)) # all yet unused digits for i in range(n-1): # for all digits except last one q = digits.index(p[i]) r += fact * q del digits[q] # remove this digit p[i] fact //= (n - 1 - i) # weight of next digit return r
def permutation_rank(p)
Given a permutation of {0,..,n-1} find its rank according to lexicographical order :param p: list of length n containing all integers from 0 to n-1 :returns: rank between 0 and n! -1 :beware: computation with big numbers :complexity: `O(n^2)`
4.781817
4.986103
0.959029
fact = 1 # compute (n-1) factorial for i in range(2, n): fact *= i digits = list(range(n)) # all yet unused digits p = [] # build permutation for i in range(n): q = r // fact # by decomposing r = q * fact + rest r %= fact p.append(digits[q]) del digits[q] # remove digit at position q if i != n - 1: fact //= (n - 1 - i) # weight of next digit return p
def rank_permutation(r, n)
Given r and n find the permutation of {0,..,n-1} with rank according to lexicographical order equal to r :param r n: integers with 0 ≤ r < n! :returns: permutation p as a list of n integers :beware: computation with big numbers :complexity: `O(n^2)`
5.488504
5.643115
0.972602
z = [] i = 0 j = 0 while i < len(x) or j < len(y): if j == len(y) or i < len(x) and x[i] <= y[j]: # priority on x z.append(x[i]) i += 1 else: z.append(y[j]) j += 1 return z
def merge(x, y)
Merge two ordered lists :param x: :param y: x,y are non decreasing ordered lists :returns: union of x and y in order :complexity: linear
1.867839
2.043458
0.914058
if universe is None: universe = set() for S in sets: universe |= set(S) tree = PQ_tree(universe) try: for S in sets: tree.reduce(S) return tree.border() except IsNotC1P: return None
def consecutive_ones_property(sets, universe=None)
Check the consecutive ones property. :param list sets: is a list of subsets of the ground set. :param groundset: is the set of all elements, by default it is the union of the given sets :returns: returns a list of the ordered ground set where every given set is consecutive, or None if there is no solution. :complexity: O(len(groundset) * len(sets)) :disclaimer: an optimal implementation would have complexity O(len(groundset) + len(sets) + sum(map(len,sets))), and there are more recent easier algorithms for this problem.
6.547688
7.278131
0.899639
self.sons.append(node) node.parent = self
def add(self, node)
Add one node as descendant
6.938138
5.875021
1.180956
if len(L) == 1: self.add(L[0]) elif len(L) >= 2: x = PQ_node(P_shape) x.add_all(L) self.add(x)
def add_group(self, L)
Add elements of L as descendants of the node. If there are several elements in L, group them in a P-node first
4.513529
4.034341
1.118777
if self.shape == L_shape: L.append(self.value) else: for x in self.sons: x.border(L)
def border(self, L)
Append to L the border of the subtree.
6.02267
4.409382
1.365876
n = len(w) f = [0] * n # init f[0] = 0 k = 0 # current longest border length for i in range(1, n): # compute f[i] while w[k] != w[i] and k > 0: k = f[k - 1] # try shorter lengths if w[k] == w[i]: # last caracters match k += 1 # we can increment the border length f[i] = k # we found the maximal border of w[:i + 1] return f
def maximum_border_length(w)
Maximum string borders by Knuth-Morris-Pratt :param w: string :returns: table f such that f[i] is the longest border length of w[:i + 1] :complexity: linear
3.856116
3.138442
1.228672
sep = '\x00' # special unused character assert sep not in t and sep not in s f = maximum_border_length(t + sep + s) n = len(t) for i, fi in enumerate(f): if fi == n: # found a border of the length of t return i - 2 * n # beginning of the border in s return -1
def knuth_morris_pratt(s, t)
Find a substring by Knuth-Morris-Pratt :param s: the haystack string :param t: the needle string :returns: index i such that s[i: i + len(t)] == t, or -1 :complexity: O(len(s) + len(t))
8.273166
8.742579
0.946307
f = maximum_border_length(u) n = len(u) if n % (n - f[-1]) == 0: # does the alignment shift divide n ? return n // (n - f[-1]) # we found a power decomposition return 1
def powerstring_by_border(u)
Power string by Knuth-Morris-Pratt :param x: string :returns: largest k such that there is a string y with x = y^k :complexity: O(len(x))
12.777172
13.603207
0.939276
n = len(M) r = [len(Mi) for Mi in M] c = [len(Mi[0]) for Mi in M] opt = [[0 for j in range(n)] for i in range(n)] arg = [[None for j in range(n)] for i in range(n)] for j_i in range(1, n): # loop on i, j of increasing j - i = j_i for i in range(n - j_i): j = i + j_i opt[i][j] = float('inf') for k in range(i, j): alt = opt[i][k] + opt[k + 1][j] + r[i] * c[k] * c[j] if opt[i][j] > alt: opt[i][j] = alt arg[i][j] = k return opt, arg
def matrix_mult_opt_order(M)
Matrix chain multiplication optimal order :param M: list of matrices :returns: matrices opt, arg, such that opt[i][j] is the optimal number of operations to compute M[i] * ... * M[j] when done in the order (M[i] * ... * M[k]) * (M[k + 1] * ... * M[j]) for k = arg[i][j] :complexity: :math:`O(n^2)`
2.528222
2.35171
1.075057
opt, arg = matrix_mult_opt_order(M) return _apply_order(M, arg, 0, len(M)-1)
def matrix_chain_mult(M)
Matrix chain multiplication :param M: list of matrices :returns: M[0] * ... * M[-1], computed in time optimal order :complexity: whatever is needed by the multiplications
9.905443
9.466659
1.046351
n = len(x) m = len(y) # initializing row 0 and column 0 A = [[i + j for j in range(m + 1)] for i in range(n + 1)] for i in range(n): for j in range(m): A[i + 1][j + 1] = min(A[i][j + 1] + 1, # insert A[i + 1][j] + 1, # delete A[i][j] + int(x[i] != y[j])) # subst. return A[n][m]
def levenshtein(x, y)
Levenshtein edit distance :param x: :param y: strings :returns: distance :complexity: `O(|x|*|y|)`
2.084745
2.124292
0.981384
n = len(men) assert n == len(women) current_suitor = [0] * n spouse = [None] * n rank = [[0] * n for j in range(n)] # build rank for j in range(n): for r in range(n): rank[j][women[j][r]] = r singles = deque(range(n)) # all men are single and get in the queue while singles: i = singles.popleft() j = men[i][current_suitor[i]] current_suitor[i] += 1 if spouse[j] is None: spouse[j] = i elif rank[j][spouse[j]] < rank[j][i]: singles.append(i) else: singles.put(spouse[j]) # sorry for spouse[j] spouse[j] = i return spouse
def gale_shapley(men, women)
Stable matching by Gale-Shapley :param men: table of size n, men[i] is preference list of women for men i :param women: similar :returns: matching table, from women to men :complexity: :math:`O(n^2)`
3.299913
3.418447
0.965325
P = [] Q = [0] R = [] succ = [0] * len(graph) seen = [set() for _ in graph] while Q: node = Q.pop() P.append(node) while succ[node] < len(graph[node]): neighbor = graph[node][succ[node]] succ[node] += 1 if neighbor not in seen[node]: seen[neighbor].add(node) R.append(neighbor) node = neighbor while R: Q.append(R.pop()) return P
def eulerian_tour_undirected(graph)
Eulerian tour on an undirected graph :param graph: directed graph in listlist format, cannot be listdict :assumes: graph is eulerian :returns: eulerian cycle as a vertex list :complexity: `O(|V|+|E|)`
2.679855
2.86124
0.936606
P = [] Q = [0] R = [] succ = [0] * len(graph) while Q: node = Q.pop() P.append(node) while succ[node] < len(graph[node]): neighbor = graph[node][succ[node]] succ[node] += 1 R.append(neighbor) node = neighbor while R: Q.append(R.pop()) return P
def eulerian_tour_directed(graph)
Eulerian tour on a directed graph :param graph: directed graph in listlist format, cannot be listdict :assumes: graph is eulerian :returns: eulerian cycle as a vertex list :complexity: `O(|V|+|E|)`
2.830174
3.075622
0.920196
n = len(graph) weight = [[float('inf')] * n for _ in range(n)] for r in range(1, len(cycle)): weight[cycle[r-1]][cycle[r]] = r if not directed: weight[cycle[r]][cycle[r-1]] = r write_graph(filename, graph, arc_label=weight, directed=directed)
def write_cycle(filename, graph, cycle, directed)
Write an eulerian tour in DOT format :param filename: the file to be written in DOT format :param graph: graph in listlist format, cannot be listdict :param bool directed: describes the graph :param cycle: tour as a vertex list :returns: nothing :complexity: `O(|V|^2 + |E|)`
2.945975
3.255334
0.904969
graphe = [[] for _ in range(n)] for v in range(n - 1): noeuds = random.sample(range(v + 1, n), random.choice( range(0 if len(graphe[v]) % 2 == 0 else 1, (n - v), 2))) graphe[v].extend(noeuds) for w in graphe[v]: if w > v: graphe[w].append(v) return graphe
def random_eulerien_graph(n)
Generates some random eulerian graph :param int n: number of vertices :returns: undirected graph in listlist representation :complexity: linear
2.807786
2.932292
0.95754
m = len(tour)-1 arcs = set((tour[i], tour[i+1]) for i in range(m)) if len(arcs) != m: return False for (u,v) in arcs: if v not in graph[u]: return False return True
def is_eulerian_tour(graph, tour)
Eulerian tour on an undirected graph :param graph: directed graph in listlist format, cannot be listdict :param tour: vertex list :returns: test if tour is eulerian :complexity: `O(|V|*|E|)` under the assumption that set membership is in constant time
2.52903
2.838776
0.890888
n = len(x) m = len(y) # -- compute optimal length A = [[0 for j in range(m + 1)] for i in range(n + 1)] for i in range(n): for j in range(m): if x[i] == y[j]: A[i + 1][j + 1] = A[i][j] + 1 else: A[i + 1][j + 1] = max(A[i][j + 1], A[i + 1][j]) # -- extract solution sol = [] i, j = n, m while A[i][j] > 0: if A[i][j] == A[i - 1][j]: i -= 1 elif A[i][j] == A[i][j - 1]: j -= 1 else: i -= 1 j -= 1 sol.append(x[i]) return ''.join(sol[::-1])
def longest_common_subsequence(x, y)
Longest common subsequence Dynamic programming :param x: :param y: x, y are lists or strings :returns: longest common subsequence in form of a string :complexity: `O(|x|*|y|)`
1.688726
1.733878
0.973959
n = len(x) expr = [{} for _ in range(1 << n)] # expr[S][val] # = string solely composed of values in set S that evaluates to val for i in range(n): expr[1 << i] = {x[i]: str(x[i])} # store singletons all_ = (1 << n) - 1 for S in range(3, all_ + 1): # 3: first num that isn't a power of 2 if expr[S] != {}: continue # in that case S is a power of 2 for L in range(1, S): # decompose set S into non-empty sets L, R if L & S == L: R = S ^ L for vL in expr[L]: # combine expressions from L for vR in expr[R]: # with expressions from R eL = expr[L][vL] eR = expr[R][vR] expr[S][vL] = eL if vL > vR: # difference cannot become negative expr[S][vL - vR] = "(%s-%s)" % (eL, eR) if L < R: # break symmetry expr[S][vL + vR] = "(%s+%s)" % (eL, eR) expr[S][vL * vR] = "(%s*%s)" % (eL, eR) if vR != 0 and vL % vR == 0: # only integer div expr[S][vL // vR] = "(%s/%s)" % (eL, eR) # look for the closest expression from the target for dist in range(target + 1): for sign in [-1, +1]: val = target + sign * dist if val in expr[all_]: return "%s=%i" % (expr[all_][val], val) # never reaches here if x contains integers between 0 and target pass
def arithm_expr_target(x, target)
Create arithmetic expression approaching target value :param x: allowed constants :param target: target value :returns: string in form 'expression=value' :complexity: huge
4.244063
4.089261
1.037856
if mu[u0] is None: # Free node while True: au = [False] * n # Empty alternating tree av = [False] * n if improve_matching(G, u0, mu, mv, au, av, lu, lv): break improve_labels(G, au, av, lu, lv) return (mu, sum(lu) + sum(lv))
def kuhn_munkres(G): # maximum profit bipartite matching in O(n^4) assert len(G) == len(G[0]) n = len(G) mu = [None] * n # Empty matching mv = [None] * n lu = [max(row) for row in G] # Trivial labels lv = [0] * n for u0 in range(n)
Maximum profit perfect matching for minimum cost perfect matching just inverse the weights :param G: squared weight matrix of a complete bipartite graph :complexity: :math:`O(n^4)`
6.00001
8.563044
0.700687
b = [False] * (R + 1) b[0] = True for xi in x: for s in range(R, xi - 1, -1): b[s] |= b[s - xi] return b[R]
def subset_sum(x, R)
Subsetsum :param x: table of non negative values :param R: target value :returns bool: True if a subset of x sums to R :complexity: O(n*R)
2.378299
2.67522
0.889011
nU = len(G) U = range(nU) nV = len(G[0]) V = range(nV) assert nU <= nV mu = [None] * nU # empty matching mv = [None] * nV lu = [max(row) for row in G] # trivial labels lv = [0] * nV for root in U: # build an alternate tree au = [False] * nU # au, av mark nodes... au[root] = True # ... covered by the tree Av = [None] * nV # Av[v] successor of v in the tree # for every vertex u, slack[u] := (val, v) such that # val is the smallest slack on the constraints (*) # with fixed u and v being the corresponding vertex slack = [(lu[root] + lv[v] - G[root][v], root) for v in V] while True: ((delta, u), v) = min((slack[v], v) for v in V if Av[v] is None) assert au[u] if delta > TOLERANCE: # tree is full for u0 in U: # improve labels if au[u0]: lu[u0] -= delta for v0 in V: if Av[v0] is not None: lv[v0] += delta else: (val, arg) = slack[v0] slack[v0] = (val - delta, arg) assert abs(lu[u] + lv[v] - G[u][v]) <= TOLERANCE # equality Av[v] = u # add (u, v) to A if mv[v] is None: break # alternating path found u1 = mv[v] assert not au[u1] au[u1] = True # add (u1, v) to A for v1 in V: if Av[v1] is None: # update margins alt = (lu[u1] + lv[v1] - G[u1][v1], u1) if slack[v1] > alt: slack[v1] = alt while v is not None: # ... alternating path found u = Av[v] # along path to root prec = mu[u] mv[v] = u # augment matching mu[u] = v v = prec return (mu, sum(lu) + sum(lv))
def kuhn_munkres(G, TOLERANCE=1e-6)
Maximum profit bipartite matching by Kuhn-Munkres :param G: weight matrix where G[u][v] is the weight of edge (u,v), :param TOLERANCE: a value with absolute value below tolerance is considered as being zero. If G consists of integer or fractional values then TOLERANCE can be chosen 0. :requires: graph (U,V,E) is complete bi-partite graph with len(U) <= len(V). float('-inf') or float('inf') entries in G are allowed but not None. :returns: matching table from U to V, value of matching :complexity: :math:`O(|U|^2 |V|)`
4.40912
4.423182
0.996821
S = [] for start, end in sorted(I, key=lambda v: v[1]): if not S or S[-1] < start: S.append(end) return S
def interval_cover(I)
Minimum interval cover :param I: list of closed intervals :returns: minimum list of points covering all intervals :complexity: O(n log n)
3.887688
4.001775
0.971491
n = len(graph) dist = [float('inf')] * n prec = [None] * n black = [False] * n dist[source] = 0 gray = deque([source]) while gray: node = gray.pop() if black[node]: continue black[node] = True if node == target: break for neighbor in graph[node]: ell = dist[node] + weight[node][neighbor] if black[neighbor] or dist[neighbor] <= ell: continue dist[neighbor] = ell prec[neighbor] = node if weight[node][neighbor] == 0: gray.append(neighbor) else: gray.appendleft(neighbor) return dist, prec
def dist01(graph, weight, source=0, target=None)
Shortest path in a 0,1 weighted graph :param graph: directed graph in listlist or listdict format :param weight: matrix or adjacency dictionary :param int source: vertex :param target: exploration stops once distance to target is found :returns: distance table, predecessor table :complexity: `O(|V|+|E|)`
2.139502
2.18776
0.977942
B = ([(left, +1) for left, right in S] + [(right, -1) for left, right in S]) B.sort() c = 0 best = (c, None) for x, d in B: c += d if best[0] < c: best = (c, x) return best
def max_interval_intersec(S)
determine a value that is contained in a largest number of given intervals :param S: list of half open intervals :complexity: O(n log n), where n = len(S)
3.36376
4.180576
0.804616
rows = len(grid) cols = len(grid[0]) dirs = [(0, +1, '>'), (0, -1, '<'), (+1, 0, 'v'), (-1, 0, '^')] i, j = source grid[i][j] = 's' Q = deque() Q.append(source) while Q: i1, j1 = Q.popleft() for di, dj, symbol in dirs: # explore all directions i2 = i1 + di j2 = j1 + dj if not (0 <= i2 and i2 < rows and 0 <= j2 and j2 < cols): continue # reached the bounds of the grid if grid[i2][j2] != ' ': # inaccessible or already visited continue grid[i2][j2] = symbol # mark visit if (i2, j2) == target: grid[i2][j2] = 't' # goal is reached return Q.append((i2, j2))
def dist_grid(grid, source, target=None)
Distances in a grid by BFS :param grid: matrix with 4-neighborhood :param (int,int) source: pair of row, column indices :param (int,int) target: exploration stops if target is reached :complexity: linear in grid size
2.531505
2.553702
0.991308
return self._range_min(1, 0, self.N, i, k)
def range_min(self, i, k)
:returns: min{ t[i], t[i + 1], ..., t[k - 1]} :complexity: O(log len(t))
7.576394
5.977329
1.267522
if start + span <= i or k <= start: # disjoint intervals return self.INF if i <= start and start + span <= k: # included intervals return self.s[p] left = self._range_min(2 * p, start, span // 2, i, k) right = self._range_min(2 * p + 1, start + span // 2, span // 2, i, k) return min(left, right)
def _range_min(self, p, start, span, i, k)
returns the minimum in t in the indexes [i, k) intersected with [start, start + span). p is the node associated to the later interval.
3.365499
3.059967
1.099848
# requires node and its direct descends to be clean l = 2 * node r = 2 * node + 1 assert self.lazyset[node] is None assert self.lazyadd[node] == 0 assert self.lazyset[l] is None assert self.lazyadd[l] == 0 assert self.lazyset[r] is None assert self.lazyadd[r] == 0 self.maxval[node] = max(self.maxval[l], self.maxval[r]) self.minval[node] = min(self.minval[l], self.minval[r]) self.sumval[node] = self.sumval[l] + self.sumval[r]
def _maintain(self, node)
maintains the invariant for the given node :promize: the lazy values are None/0 for this node
2.684712
2.46606
1.088665
if self.lazyset[node] is not None: # first do the pending set val = self.lazyset[node] self.minval[node] = val self.maxval[node] = val self.sumval[node] = val * (right - left) self.lazyset[node] = None if left < right - 1: # not a leaf self.lazyset[2 * node] = val # propagate to direct descendents self.lazyadd[2 * node] = 0 self.lazyset[2 * node + 1] = val self.lazyadd[2 * node + 1] = 0 if self.lazyadd[node] != 0: # then do the pending add val = self.lazyadd[node] self.minval[node] += val self.maxval[node] += val self.sumval[node] += val * (right - left) self.lazyadd[node] = 0 if left < right - 1: # not at a leaf self.lazyadd[2 * node] += val # propagate to direct descendents self.lazyadd[2 * node + 1] += val
def _clear(self, node, left, right)
propagates the lazy updates for this node to the subtrees. as a result the maxval, minval, sumval values for the node are up to date.
2.401476
2.206962
1.088137
n = len(tab) left = [0] * n right = [0] * n tmp = [None] * n # temporary table rank = list(range(n)) _merge_sort(tab, tmp, rank, left, right, 0, n) return left, right
def left_right_inversions(tab)
Compute left and right inversions of each element of a table. :param tab: list with comparable elements :returns: lists left and right. left[j] = the number of i<j such that tab[i] > tab[j]. right[i] = the number of i<j such that tab[i] > tab[j]. :complexity: `O(n \log n)`
3.735635
4.241418
0.880752
if intervals == []: return None center = intervals[len(intervals) // 2][0] L = [] R = [] C = [] for I in intervals: if I[1] <= center: L.append(I) elif center < I[0]: R.append(I) else: C.append(I) by_low = sorted((I[0], I) for I in C) by_high = sorted((I[1], I) for I in C) IL = interval_tree(L) IR = interval_tree(R) return _Node(center, by_low, by_high, IL, IR)
def interval_tree(intervals)
Construct an interval tree :param intervals: list of half-open intervals encoded as value pairs *[left, right)* :assumes: intervals are lexicographically ordered ``>>> assert intervals == sorted(intervals)`` :returns: the root of the interval tree :complexity: :math:`O(n)`
2.429275
2.535878
0.957962
INF = float('inf') if t is None: return [] if p < t.center: retval = intervals_containing(t.left, p) j = bisect_right(t.by_low, (p, (INF, INF))) for i in range(j): retval.append(t.by_low[i][1]) else: retval = intervals_containing(t.right, p) i = bisect_right(t.by_high, (p, (INF, INF))) for j in range(i, len(t.by_high)): retval.append(t.by_high[j][1]) return retval
def intervals_containing(t, p)
Query the interval tree :param t: root of the interval tree :param p: value :returns: a list of intervals containing p :complexity: O(log n + m), where n is the number of intervals in t, and m the length of the returned list
2.403879
2.337945
1.028202
S.sort() top = [] bot = [] for p in S: while len(top) >= 2 and not left_turn(p, top[-1], top[-2]): top.pop() top.append(p) while len(bot) >= 2 and not left_turn(bot[-2], bot[-1], p): bot.pop() bot.append(p) return bot[:-1] + top[:0:-1]
def andrew(S)
Convex hull by Andrew :param S: list of points as coordinate pairs :requires: S has at least 2 points :returns: list of points of the convex hull :complexity: `O(n log n)`
2.733124
2.397619
1.139933
while lo < hi: mid = lo + (hi - lo) // 2 if tab[mid]: hi = mid else: lo = mid + 1 return lo
def discrete_binary_search(tab, lo, hi)
Binary search in a table :param tab: boolean monotone table with tab[hi] = True :param int lo: :param int hi: with hi >= lo :returns: first index i in [lo,hi] such that tab[i] :complexity: `O(log(hi-lo))`
1.731574
2.146419
0.806727
while hi - lo > gap: # in other languages you can force floating division by using 2.0 mid = (lo + hi) / 2. if f(mid): hi = mid else: lo = mid return lo
def continuous_binary_search(f, lo, hi, gap=1e-4)
Binary search for a function :param f: boolean monotone function with f(hi) = True :param int lo: :param int hi: with hi >= lo :param float gap: :returns: first value x in [lo,hi] such that f(x), x is computed up to some precision :complexity: `O(log((hi-lo)/gap))`
5.039017
6.078148
0.829038
lo = 0 intervalsize = (1 << logsize) >> 1 while intervalsize > 0: if not tab[lo | intervalsize]: lo |= intervalsize intervalsize >>= 1 return lo
def optimized_binary_search_lower(tab, logsize)
Binary search in a table using bit operations :param tab: boolean monotone table of size :math:`2^\\textrm{logsize}` with tab[0] = False :param int logsize: :returns: last i such that not tab[i] :complexity: O(logsize)
4.442106
4.909307
0.904834
hi = (1 << logsize) - 1 intervalsize = (1 << logsize) >> 1 while intervalsize > 0: if tab[hi ^ intervalsize]: hi ^= intervalsize intervalsize >>= 1 return hi
def optimized_binary_search(tab, logsize)
Binary search in a table using bit operations :param tab: boolean monotone table of size :math:`2^\\textrm{logsize}` with tab[hi] = True :param int logsize: :returns: first i such that tab[i] :complexity: O(logsize)
4.361297
4.590028
0.950168
while hi - lo > gap: step = (hi - lo) / 3. if f(lo + step) < f(lo + 2 * step): lo += step else: hi -= step return lo
def ternary_search(f, lo, hi, gap=1e-10)
Ternary maximum search for a bitonic function :param f: boolean bitonic function (increasing then decreasing, not necessarily strictly) :param int lo: :param int hi: with hi >= lo :param float gap: :returns: value x in [lo,hi] maximizing f(x), x is computed up to some precision :complexity: `O(log((hi-lo)/gap))`
2.508875
3.14863
0.796815
w = list(set(w)) # remove duplicates d = {} # group words according to some signature for i in range(len(w)): s = ''.join(sorted(w[i])) # signature if s in d: d[s].append(i) else: d[s] = [i] # -- extract anagrams answer = [] for s in d: if len(d[s]) > 1: # ignore words without anagram answer.append([w[i] for i in d[s]]) return answer
def anagrams(w)
group a list of words into anagrams :param w: list of strings :returns: list of lists :complexity: :math:`O(n k \log k)` in average, for n words of length at most k. :math:`O(n^2 k \log k)` in worst case due to the usage of a dictionary.
3.192343
3.236943
0.986222
if i == len(x): yield 0 else: for s in part_sum(x, i + 1): yield s yield s + x[i]
def part_sum(x, i=0)
All subsetsums from x[i:] :param x: table of values :param int i: index defining suffix of x to be considered :iterates: over all values, in arbitrary order :complexity: :math:`O(2^{len(x)-i})`
2.769951
3.202576
0.864913
k = len(x) // 2 # divide input Y = [v for v in part_sum(x[:k])] Z = [R - v for v in part_sum(x[k:])] Y.sort() # test of intersection between Y and Z Z.sort() i = 0 j = 0 while i < len(Y) and j < len(Z): if Y[i] == Z[j]: return True elif Y[i] < Z[j]: # increment index of smallest element i += 1 else: j += 1 return False
def subset_sum(x, R)
Subsetsum by splitting :param x: table of values :param R: target value :returns bool: if there is a subsequence of x with total sum R :complexity: :math:`O(n^{\\lceil n/2 \\rceil})`
3.591938
3.826739
0.938642
global sccp, waiting, dfs_time, dfs_num sccp = [] waiting = [] waits = [False] * len(graph) dfs_time = 0 dfs_num = [None] * len(graph) def dfs(node): global sccp, waiting, dfs_time, dfs_num waiting.append(node) # new node is waiting waits[node] = True dfs_num[node] = dfs_time # mark visit dfs_time += 1 dfs_min = dfs_num[node] # compute dfs_min for neighbor in graph[node]: if dfs_num[neighbor] is None: dfs_min = min(dfs_min, dfs(neighbor)) elif waits[neighbor] and dfs_min > dfs_num[neighbor]: dfs_min = dfs_num[neighbor] if dfs_min == dfs_num[node]: # representative of a component sccp.append([]) # make a component while True: # add waiting nodes u = waiting.pop() waits[u] = False sccp[-1].append(u) if u == node: # until representative break return dfs_min for node in range(len(graph)): if dfs_num[node] is None: dfs(node) return sccp
def tarjan_recursif(graph)
Strongly connected components by Tarjan, recursive implementation :param graph: directed graph in listlist format, cannot be listdict :returns: list of lists for each component :complexity: linear
2.712387
2.763486
0.981509
n = len(graph) dfs_num = [None] * n dfs_min = [n] * n waiting = [] waits = [False] * n # invariant: waits[v] iff v in waiting sccp = [] # list of detected components dfs_time = 0 times_seen = [-1] * n for start in range(n): if times_seen[start] == -1: # initiate path times_seen[start] = 0 to_visit = [start] while to_visit: node = to_visit[-1] # top of stack if times_seen[node] == 0: # start process dfs_num[node] = dfs_time dfs_min[node] = dfs_time dfs_time += 1 waiting.append(node) waits[node] = True children = graph[node] if times_seen[node] == len(children): # end of process to_visit.pop() # remove from stack dfs_min[node] = dfs_num[node] # compute dfs_min for child in children: if waits[child] and dfs_min[child] < dfs_min[node]: dfs_min[node] = dfs_min[child] if dfs_min[node] == dfs_num[node]: # representative component = [] # make component while True: # add nodes u = waiting.pop() waits[u] = False component.append(u) if u == node: # until repr. break sccp.append(component) else: child = children[times_seen[node]] times_seen[node] += 1 if times_seen[child] == -1: # not visited yet times_seen[child] = 0 to_visit.append(child) return sccp
def tarjan(graph)
Strongly connected components by Tarjan, iterative implementation :param graph: directed graph in listlist format, cannot be listdict :returns: list of lists for each component :complexity: linear
2.89765
2.957525
0.979755
rev_graph = [[] for node in graph] for node in range(len(graph)): for neighbor in graph[node]: rev_graph[neighbor].append(node) return rev_graph
def reverse(graph)
replace all arcs (u, v) by arcs (v, u) in a graph
2.077225
2.19447
0.946573
n = len(graph) order = [] sccp = [] kosaraju_dfs(graph, range(n), order, []) kosaraju_dfs(reverse(graph), order[::-1], [], sccp) return sccp[::-1]
def kosaraju(graph)
Strongly connected components by Kosaraju :param graph: directed graph in listlist format, cannot be listdict :returns: list of lists for each component :complexity: linear
3.764503
4.264605
0.882732
"Get the directory $level levels above $base_dir." while level > 0: base_dir = os.path.dirname(base_dir) level -= 1 return base_dir
def get_parent_dir(base_dir, level=1)
Get the directory $level levels above $base_dir.
3.343873
2.340399
1.428762
with open(mustache_file, 'r') as file_: parsed = pystache.parse(file_.read()) return parsed
def get_pystache_parsed(mustache_file)
Return a ParsedTemplate instance based on the contents of $mustache_file.
2.543012
3.095569
0.821501
temp_glob = rel_to_cwd('templates', '**', 'templates', 'config.yaml') temp_groups = glob(temp_glob) temp_groups = [get_parent_dir(path, 2) for path in temp_groups] return set(temp_groups)
def get_template_dirs()
Return a set of all template directories.
5.41863
5.256415
1.03086
scheme_glob = rel_to_cwd('schemes', '**', '*.yaml') scheme_groups = glob(scheme_glob) scheme_groups = [get_parent_dir(path) for path in scheme_groups] return set(scheme_groups)
def get_scheme_dirs()
Return a set of all scheme directories.
4.567314
4.360534
1.047421
patterns = patterns or ['*'] pattern_list = ['{}.yaml'.format(pattern) for pattern in patterns] scheme_files = [] for scheme_path in get_scheme_dirs(): for pattern in pattern_list: file_paths = glob(os.path.join(scheme_path, pattern)) scheme_files.extend(file_paths) return scheme_files
def get_scheme_files(patterns=None)
Return a list of all (or those matching $pattern) yaml (scheme) files.
2.469312
2.474375
0.997954
scheme['scheme-name'] = scheme.pop('scheme') scheme['scheme-author'] = scheme.pop('author') scheme['scheme-slug'] = slug bases = ['base{:02X}'.format(x) for x in range(0, 16)] for base in bases: scheme['{}-hex'.format(base)] = scheme.pop(base) scheme['{}-hex-r'.format(base)] = scheme['{}-hex'.format(base)][0:2] scheme['{}-hex-g'.format(base)] = scheme['{}-hex'.format(base)][2:4] scheme['{}-hex-b'.format(base)] = scheme['{}-hex'.format(base)][4:6] scheme['{}-rgb-r'.format(base)] = str( int(scheme['{}-hex-r'.format(base)], 16)) scheme['{}-rgb-g'.format(base)] = str( int(scheme['{}-hex-g'.format(base)], 16)) scheme['{}-rgb-b'.format(base)] = str( int(scheme['{}-hex-b'.format(base)], 16)) scheme['{}-dec-r'.format(base)] = str( int(scheme['{}-rgb-r'.format(base)]) / 255) scheme['{}-dec-g'.format(base)] = str( int(scheme['{}-rgb-g'.format(base)]) / 255) scheme['{}-dec-b'.format(base)] = str( int(scheme['{}-rgb-b'.format(base)]) / 255)
def format_scheme(scheme, slug)
Change $scheme so it can be applied to a template.
1.54395
1.529729
1.009296
scheme_file_name = os.path.basename(scheme_file) if scheme_file_name.endswith('.yaml'): scheme_file_name = scheme_file_name[:-5] return scheme_file_name.lower().replace(' ', '-')
def slugify(scheme_file)
Format $scheme_file_name to be used as a slug variable.
2.005061
1.93154
1.038063
scheme = get_yaml_dict(scheme_file) scheme_slug = slugify(scheme_file) format_scheme(scheme, scheme_slug) scheme_name = scheme['scheme-name'] print('Building colorschemes for scheme "{}"…'.format(scheme_name)) for temp_group in templates: for _, sub in temp_group.templates.items(): output_dir = os.path.join(base_output_dir, temp_group.name, sub['output']) try: os.makedirs(output_dir) except FileExistsError: pass if sub['extension'] is not None: filename = 'base16-{}{}'.format(scheme_slug, sub['extension']) else: filename = 'base16-{}'.format(scheme_slug) build_path = os.path.join(output_dir, filename) with open(build_path, 'w') as file_: file_content = pystache.render(sub['parsed'], scheme) file_.write(file_content) print('Built colorschemes for scheme "{}".'.format(scheme_name))
def build_single(scheme_file, templates, base_output_dir)
Build colorscheme for a single $scheme_file using all TemplateGroup instances in $templates.
2.974888
2.837754
1.048325
while True: scheme_file = queue.get() if scheme_file is None: break build_single(scheme_file, templates, base_output_dir) queue.task_done()
def build_single_worker(queue, templates, base_output_dir)
Worker thread for picking up scheme files from $queue and building b16 templates using $templates until it receives None.
2.518744
1.914873
1.315358
queue = Queue() for scheme in scheme_files: queue.put(scheme) if len(scheme_files) < 40: thread_num = len(scheme_files) else: thread_num = 40 threads = [] for _ in range(thread_num): thread = Thread(target=build_single_worker, args=(queue, templates, base_output_dir)) thread.start() threads.append(thread) queue.join() for _ in range(thread_num): queue.put(None) for thread in threads: thread.join()
def build_from_job_list(scheme_files, templates, base_output_dir)
Use $scheme_files as a job lists and build base16 templates using $templates (a list of TemplateGroup objects).
1.898458
2.043206
0.929156
template_dirs = templates or get_template_dirs() scheme_files = get_scheme_files(schemes) base_output_dir = base_output_dir or rel_to_cwd('output') # raise LookupError if there is not at least one template or scheme # to work with if not template_dirs or not scheme_files: raise LookupError # raise PermissionError if user has no write acces for $base_output_dir try: os.makedirs(base_output_dir) except FileExistsError: pass if not os.access(base_output_dir, os.W_OK): raise PermissionError templates = [TemplateGroup(path) for path in template_dirs] build_from_job_list(scheme_files, templates, base_output_dir) print('Finished building process.')
def build(templates=None, schemes=None, base_output_dir=None)
Main build function to initiate building process.
3.622578
3.471297
1.043581
config_path = rel_to_cwd(self.base_path, 'templates', 'config.yaml') templates = get_yaml_dict(config_path) for temp, sub in templates.items(): mustache_path = os.path.join(get_parent_dir(config_path), '{}.mustache'.format(temp)) sub['parsed'] = get_pystache_parsed(mustache_path) return templates
def get_templates(self)
Return a list of template_dicts based on the config.yaml in $self.base_path. Keys correspond to templates and values represent further settings regarding each template. A pystache object containing the parsed corresponding mustache file is added to the sub-dictionary.
4.846797
3.592808
1.349028
file_content = ( 'schemes: ' 'https://github.com/chriskempson/base16-schemes-source.git\n' 'templates: ' 'https://github.com/chriskempson/base16-templates-source.git' ) file_path = rel_to_cwd('sources.yaml') with open(file_path, 'w') as file_: file_.write(file_content)
def write_sources_file()
Write a sources.yaml file to current working dir.
3.605914
3.218956
1.120212
yaml_dict = get_yaml_dict(yaml_file) job_list = [] for key, value in yaml_dict.items(): job_list.append((value, rel_to_cwd(base_dir, key))) return job_list
def yaml_to_job_list(yaml_file, base_dir)
Return a job_list consisting of git repos from $yaml_file as well as their base target directory.
3.085155
3.082837
1.000752
if os.path.exists(os.path.join(path, '.git')): # get rid of local repo if it already exists shutil.rmtree(path) os.makedirs(path, exist_ok=True) print('Start cloning from {}…'.format(git_url)) git_proc = subprocess.Popen(['git', 'clone', git_url, path], stderr=subprocess.PIPE, stdout=subprocess.PIPE, env={'GIT_TERMINAL_PROMPT': '0'}) try: stdoutmsg, stderrmsg = git_proc.communicate(timeout=120) except subprocess.TimeoutExpired: git_proc.kill() stderrmsg = b'Timed out.' if git_proc.returncode == 0: print('Cloned {}.'.format(git_url)) else: print('Error cloning from {}:\n{}'.format(git_url, stderrmsg.decode('utf-8')))
def git_clone(git_url, path)
Clone git repository at $git_url to $path.
2.224405
2.252299
0.987615
while True: job = queue.get() if job is None: break git_url, path = job git_clone(git_url, path) queue.task_done()
def git_clone_worker(queue)
Worker thread for picking up git clone jobs from $queue until it receives None.
2.323811
2.535206
0.916616
queue = Queue() for job in job_list: queue.put(job) if len(job_list) < 20: thread_num = len(job_list) else: thread_num = 20 threads = [] for _ in range(thread_num): thread = Thread(target=git_clone_worker, args=(queue, )) thread.start() threads.append(thread) queue.join() for _ in range(thread_num): queue.put(None) for thread in threads: thread.join()
def git_clone_job_list(job_list)
Deal with all git clone jobs in $job_list.
1.751083
1.77564
0.98617
if not shutil.which('git'): print('Git executable not found in $PATH.') sys.exit(1) if not custom_sources: print('Creating sources.yaml…') write_sources_file() print('Cloning sources…') sources_file = rel_to_cwd('sources.yaml') jobs = yaml_to_job_list(sources_file, rel_to_cwd('sources')) git_clone_job_list(jobs) print('Cloning templates…') jobs = yaml_to_job_list(rel_to_cwd('sources', 'templates', 'list.yaml'), rel_to_cwd('templates')) print('Cloning schemes…') jobs.extend(yaml_to_job_list(rel_to_cwd('sources', 'schemes', 'list.yaml'), rel_to_cwd('schemes'))) git_clone_job_list(jobs) print('Completed updating repositories.')
def update(custom_sources=False)
Update function to be called from cli.py
3.123017
3.056519
1.021756
try: with open(yaml_file, 'r') as file_: yaml_dict = yaml.safe_load(file_.read()) or {} return yaml_dict except FileNotFoundError: return {}
def get_yaml_dict(yaml_file)
Return a yaml_dict from reading yaml_file. If yaml_file is empty or doesn't exist, return an empty dict instead.
2.67285
2.3753
1.125268
for file_ in files: rec = Recipient(file_) colorscheme = rec.get_colorscheme(scheme_file) rec.inject_scheme(colorscheme) rec.write()
def inject_into_files(scheme_file, files)
Inject $scheme_file into list $files.
5.558538
5.932989
0.936887
with open(path, 'r') as file_: content = file_.read() return content
def _get_file_content(self, path)
Return a string representation file content at $path.
3.870622
3.472246
1.114732
temp = None for line in content.splitlines(): # make sure there's both start and end line if not temp: match = TEMP_NEEDLE.match(line) if match: temp = match.group(1).strip() continue else: match = TEMP_END_NEEDLE.match(line) if match: return temp raise IndexError(self.path)
def _get_temp(self, content)
Get the string that points to a specific base16 scheme.
4.239062
4.265091
0.993897
scheme = get_yaml_dict(scheme_file) scheme_slug = builder.slugify(scheme_file) builder.format_scheme(scheme, scheme_slug) try: temp_base, temp_sub = self.temp.split('##') except ValueError: temp_base, temp_sub = (self.temp.strip('##'), 'default') temp_path = rel_to_cwd('templates', temp_base) temp_group = builder.TemplateGroup(temp_path) try: single_temp = temp_group.templates[temp_sub] except KeyError: raise FileNotFoundError(None, None, self.path + ' (sub-template)') colorscheme = pystache.render(single_temp['parsed'], scheme) return colorscheme
def get_colorscheme(self, scheme_file)
Return a string object with the colorscheme that is to be inserted.
5.211155
5.446757
0.956745
# correctly formatted start and end of block should have already been # ascertained by _get_temp content_lines = self.content.splitlines() b16_scheme_lines = b16_scheme.splitlines() start_line = None for num, line in enumerate(content_lines): if not start_line: match = TEMP_NEEDLE.match(line) if match: start_line = num + 1 else: match = TEMP_END_NEEDLE.match(line) if match: end_line = num # put lines back together new_content_lines = (content_lines[0:start_line] + b16_scheme_lines + content_lines[end_line:]) self.content = '\n'.join(new_content_lines)
def inject_scheme(self, b16_scheme)
Inject string $b16_scheme into self.content.
3.479367
3.229344
1.077422
with open(self.path, 'w') as file_: file_.write(self.content)
def write(self)
Write content back to file.
4.397763
3.511384
1.25243
custom_temps = arg_namespace.template or [] temp_paths = [rel_to_cwd('templates', temp) for temp in custom_temps] try: builder.build(templates=temp_paths, schemes=arg_namespace.scheme, base_output_dir=arg_namespace.output) except (LookupError, PermissionError) as exception: if isinstance(exception, LookupError): print('Necessary resources for building not found in current ' 'working directory.') if isinstance(exception, PermissionError): print("No write permission for output directory.")
def build_mode(arg_namespace)
Check command line arguments and run build function.
5.119319
4.912055
1.042195
try: injector.inject_into_files(arg_namespace.scheme, arg_namespace.file) except (IndexError, FileNotFoundError, PermissionError, IsADirectoryError) as exception: if isinstance(exception, IndexError): print('"{}" has no valid injection marker lines.'.format( exception.args[0])) if isinstance(exception, FileNotFoundError): print('Lacking resource "{}" to complete operation.'.format( exception.filename)) if isinstance(exception, PermissionError): print('No write permission for current working directory.') if isinstance(exception, IsADirectoryError): print('"{}" is a directory. Provide a *.yaml scheme file instead.' .format(exception.filename))
def inject_mode(arg_namespace)
Check command line arguments and run build function.
4.903036
4.816524
1.017962
try: updater.update(custom_sources=arg_namespace.custom) except (PermissionError, FileNotFoundError) as exception: if isinstance(exception, PermissionError): print('No write permission for current working directory.') if isinstance(exception, FileNotFoundError): print('Necessary resources for updating not found in current ' 'working directory.')
def update_mode(arg_namespace)
Check command line arguments and run update function.
4.993651
4.719418
1.058108
''' Like script_to_address but supports altcoins Copied 2015-10-02 from https://github.com/mflaxman/pybitcointools/blob/faf56c53148989ea390238c3c4541a6ae1d601f5/bitcoin/transaction.py#L224-L236 ''' if re.match('^[0-9a-fA-F]*$', script): script = binascii.unhexlify(script) if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(script) == 25: return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses else: if vbyte in [111, 196]: # Testnet scripthash_byte = 196 else: scripthash_byte = vbyte # BIP0016 scripthash addresses return bin_to_b58check(script[2:-1], scripthash_byte)
def script_to_address(script, vbyte=0)
Like script_to_address but supports altcoins Copied 2015-10-02 from https://github.com/mflaxman/pybitcointools/blob/faf56c53148989ea390238c3c4541a6ae1d601f5/bitcoin/transaction.py#L224-L236
4.608303
2.177538
2.11629
''' Pythonize a blockcypher API response ''' confirmed_txrefs = [] for confirmed_txref in response_dict.get('txrefs', []): confirmed_txref['confirmed'] = parser.parse(confirmed_txref['confirmed']) confirmed_txrefs.append(confirmed_txref) response_dict['txrefs'] = confirmed_txrefs unconfirmed_txrefs = [] for unconfirmed_txref in response_dict.get('unconfirmed_txrefs', []): unconfirmed_txref['received'] = parser.parse(unconfirmed_txref['received']) unconfirmed_txrefs.append(unconfirmed_txref) response_dict['unconfirmed_txrefs'] = unconfirmed_txrefs return response_dict
def _clean_tx(response_dict)
Pythonize a blockcypher API response
2.04631
1.800613
1.136452
''' Pythonize a blockcypher API response ''' response_dict['received_time'] = parser.parse(response_dict['received_time']) response_dict['time'] = parser.parse(response_dict['time']) return response_dict
def _clean_block(response_dict)
Pythonize a blockcypher API response
4.713092
3.080612
1.52992
''' Takes an address and coin_symbol and returns the address details Optional: - txn_limit: # transactions to include - before_bh: filters response to only include transactions below before height in the blockchain. - after_bh: filters response to only include transactions above after height in the blockchain. - confirmations: returns the balance and TXRefs that have this number of confirmations - unspent_only: filters response to only include unspent TXRefs. - show_confidence: adds confidence information to unconfirmed TXRefs. For batching a list of addresses, see get_addresses_details ''' assert is_valid_address_for_coinsymbol( b58_address=address, coin_symbol=coin_symbol), address assert isinstance(show_confidence, bool), show_confidence url = make_url(coin_symbol, **dict(addrs=address)) params = {} if txn_limit: params['limit'] = txn_limit if api_key: params['token'] = api_key if before_bh: params['before'] = before_bh if after_bh: params['after'] = after_bh if confirmations: params['confirmations'] = confirmations if unspent_only: params['unspentOnly'] = 'true' if show_confidence: params['includeConfidence'] = 'true' if include_script: params['includeScript'] = 'true' r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) r = get_valid_json(r) return _clean_tx(response_dict=r)
def get_address_details(address, coin_symbol='btc', txn_limit=None, api_key=None, before_bh=None, after_bh=None, unspent_only=False, show_confidence=False, confirmations=0, include_script=False)
Takes an address and coin_symbol and returns the address details Optional: - txn_limit: # transactions to include - before_bh: filters response to only include transactions below before height in the blockchain. - after_bh: filters response to only include transactions above after height in the blockchain. - confirmations: returns the balance and TXRefs that have this number of confirmations - unspent_only: filters response to only include unspent TXRefs. - show_confidence: adds confidence information to unconfirmed TXRefs. For batching a list of addresses, see get_addresses_details
3.332184
1.91303
1.741836
''' Batch version of get_address_details method ''' for address in address_list: assert is_valid_address_for_coinsymbol( b58_address=address, coin_symbol=coin_symbol), address assert isinstance(show_confidence, bool), show_confidence kwargs = dict(addrs=';'.join([str(addr) for addr in address_list])) url = make_url(coin_symbol, **kwargs) params = {} if txn_limit: params['limit'] = txn_limit if api_key: params['token'] = api_key if before_bh: params['before'] = before_bh if after_bh: params['after'] = after_bh if confirmations: params['confirmations'] = confirmations if unspent_only: params['unspentOnly'] = 'true' if show_confidence: params['includeConfidence'] = 'true' if include_script: params['includeScript'] = 'true' r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) r = get_valid_json(r) return [_clean_tx(response_dict=d) for d in r]
def get_addresses_details(address_list, coin_symbol='btc', txn_limit=None, api_key=None, before_bh=None, after_bh=None, unspent_only=False, show_confidence=False, confirmations=0, include_script=False)
Batch version of get_address_details method
2.754647
2.617815
1.05227
''' Takes a wallet, api_key, coin_symbol and returns the wallet's details Optional: - txn_limit: # transactions to include - before_bh: filters response to only include transactions below before height in the blockchain. - after_bh: filters response to only include transactions above after height in the blockchain. - confirmations: returns the balance and TXRefs that have this number of confirmations - unspent_only: filters response to only include unspent TXRefs. - show_confidence: adds confidence information to unconfirmed TXRefs. ''' assert len(wallet_name) <= 25, wallet_name assert api_key assert is_valid_coin_symbol(coin_symbol=coin_symbol) assert isinstance(show_confidence, bool), show_confidence assert isinstance(omit_addresses, bool), omit_addresses url = make_url(coin_symbol, **dict(addrs=wallet_name)) params = {} if txn_limit: params['limit'] = txn_limit if api_key: params['token'] = api_key if before_bh: params['before'] = before_bh if after_bh: params['after'] = after_bh if confirmations: params['confirmations'] = confirmations if unspent_only: params['unspentOnly'] = 'true' if show_confidence: params['includeConfidence'] = 'true' if omit_addresses: params['omitWalletAddresses'] = 'true' r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return _clean_tx(get_valid_json(r))
def get_wallet_transactions(wallet_name, api_key, coin_symbol='btc', before_bh=None, after_bh=None, txn_limit=None, omit_addresses=False, unspent_only=False, show_confidence=False, confirmations=0)
Takes a wallet, api_key, coin_symbol and returns the wallet's details Optional: - txn_limit: # transactions to include - before_bh: filters response to only include transactions below before height in the blockchain. - after_bh: filters response to only include transactions above after height in the blockchain. - confirmations: returns the balance and TXRefs that have this number of confirmations - unspent_only: filters response to only include unspent TXRefs. - show_confidence: adds confidence information to unconfirmed TXRefs.
3.13833
1.855669
1.691212
''' Takes an address and coin_symbol and return the address details ''' assert is_valid_address_for_coinsymbol(b58_address=address, coin_symbol=coin_symbol) url = make_url(coin_symbol, 'addrs', **{address: 'balance'}) params = {} if api_key: params['token'] = api_key r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def get_address_overview(address, coin_symbol='btc', api_key=None)
Takes an address and coin_symbol and return the address details
4.543277
3.918003
1.15959
''' Takes a coin_symbol and returns a new address with it's public and private keys. This method will create the address server side, which is inherently insecure and should only be used for testing. If you want to create a secure address client-side using python, please check out bitmerchant: from bitmerchant.wallet import Wallet Wallet.new_random_wallet() https://github.com/sbuss/bitmerchant ''' assert api_key, 'api_key required' assert is_valid_coin_symbol(coin_symbol) if coin_symbol not in ('btc-testnet', 'bcy'): WARNING_MSG = [ 'Generating private key details server-side.', 'You really should do this client-side.', 'See https://github.com/sbuss/bitmerchant for an example.', ] print(' '.join(WARNING_MSG)) url = make_url(coin_symbol, 'addrs') params = {'token': api_key} r = requests.post(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def generate_new_address(coin_symbol='btc', api_key=None)
Takes a coin_symbol and returns a new address with it's public and private keys. This method will create the address server side, which is inherently insecure and should only be used for testing. If you want to create a secure address client-side using python, please check out bitmerchant: from bitmerchant.wallet import Wallet Wallet.new_random_wallet() https://github.com/sbuss/bitmerchant
5.750864
2.697528
2.131902
''' Returns a new address (without access to the private key) and adds it to your HD wallet (previously created using create_hd_wallet). This method will traverse/discover a new address server-side from your previously supplied extended public key, the server will never see your private key. It is therefor safe for production use. You may also include a subchain_index directive if your wallet has multiple subchain_indices and you'd like to specify which one should be traversed. ''' assert is_valid_coin_symbol(coin_symbol) assert api_key, 'api_key required' assert wallet_name, wallet_name assert isinstance(num_addresses, int), num_addresses url = make_url(coin_symbol, 'wallets/hd', **{wallet_name, 'addresses/derive'}) params = {'token': api_key} if subchain_index: params['subchain_index'] = subchain_index if num_addresses > 1: params['count'] = num_addresses r = requests.post(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) return get_valid_json(r)
def derive_hd_address(api_key=None, wallet_name=None, num_addresses=1, subchain_index=None, coin_symbol='btc')
Returns a new address (without access to the private key) and adds it to your HD wallet (previously created using create_hd_wallet). This method will traverse/discover a new address server-side from your previously supplied extended public key, the server will never see your private key. It is therefor safe for production use. You may also include a subchain_index directive if your wallet has multiple subchain_indices and you'd like to specify which one should be traversed.
5.630356
2.035515
2.766059
assert is_valid_hash(tx_hash), tx_hash assert is_valid_coin_symbol(coin_symbol), coin_symbol added = 'txs/{}{}'.format(tx_hash, '/confidence' if confidence_only else '') url = make_url(coin_symbol, added) params = {} if api_key: params['token'] = api_key if limit: params['limit'] = limit if tx_input_offset: params['inStart'] = tx_input_offset if tx_output_offset: params['outStart'] = tx_output_offset if include_hex: params['includeHex'] = 'true' if show_confidence and not confidence_only: params['includeConfidence'] = 'true' r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) if 'error' not in response_dict and not confidence_only: if response_dict['block_height'] > 0: response_dict['confirmed'] = parser.parse(response_dict['confirmed']) else: response_dict['block_height'] = None # Blockcypher reports fake times if it's not in a block response_dict['confirmed'] = None # format this string as a datetime object response_dict['received'] = parser.parse(response_dict['received']) return response_dict
def get_transaction_details(tx_hash, coin_symbol='btc', limit=None, tx_input_offset=None, tx_output_offset=None, include_hex=False, show_confidence=False, confidence_only=False, api_key=None)
Takes a tx_hash, coin_symbol, and limit and returns the transaction details Optional: - limit: # inputs/ouputs to include (applies to both) - tx_input_offset: input offset - tx_output_offset: output offset - include_hex: include the raw TX hex - show_confidence: adds confidence information to unconfirmed TXRefs. - confidence_only: show only the confidence statistics and don't return the rest of the endpoint details (faster)
2.83339
2.849876
0.994215
for tx_hash in tx_hash_list: assert is_valid_hash(tx_hash) assert is_valid_coin_symbol(coin_symbol) if len(tx_hash_list) == 0: return [] elif len(tx_hash_list) == 1: return [get_transaction_details(tx_hash=tx_hash_list[0], coin_symbol=coin_symbol, limit=limit, api_key=api_key )] url = make_url(coin_symbol, **dict(txs=';'.join(tx_hash_list))) params = {} if api_key: params['token'] = api_key if limit: params['limit'] = limit r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict_list = get_valid_json(r) cleaned_dict_list = [] for response_dict in response_dict_list: if 'error' not in response_dict: if response_dict['block_height'] > 0: response_dict['confirmed'] = parser.parse(response_dict['confirmed']) else: # Blockcypher reports fake times if it's not in a block response_dict['confirmed'] = None response_dict['block_height'] = None # format this string as a datetime object response_dict['received'] = parser.parse(response_dict['received']) cleaned_dict_list.append(response_dict) return cleaned_dict_list
def get_transactions_details(tx_hash_list, coin_symbol='btc', limit=None, api_key=None)
Takes a list of tx_hashes, coin_symbol, and limit and returns the transaction details Limit applies to both num inputs and num outputs. TODO: add offsetting once supported
2.628461
2.662786
0.98711
''' Given a tx_hash, return the number of confirmations that transactions has. Answer is going to be from 0 - current_block_height. ''' return get_transaction_details(tx_hash=tx_hash, coin_symbol=coin_symbol, limit=1, api_key=api_key).get('confirmations')
def get_num_confirmations(tx_hash, coin_symbol='btc', api_key=None)
Given a tx_hash, return the number of confirmations that transactions has. Answer is going to be from 0 - current_block_height.
5.741413
2.396102
2.396147
url = make_url(coin_symbol, 'txs') params = {} if api_key: params['token'] = api_key if limit: params['limit'] = limit r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) unconfirmed_txs = [] for unconfirmed_tx in response_dict: unconfirmed_tx['received'] = parser.parse(unconfirmed_tx['received']) unconfirmed_txs.append(unconfirmed_tx) return unconfirmed_txs
def get_broadcast_transactions(coin_symbol='btc', limit=10, api_key=None)
Get a list of broadcast but unconfirmed transactions Similar to bitcoind's getrawmempool method
2.45303
2.499284
0.981493
''' Warning, slow! ''' transactions = get_broadcast_transactions( coin_symbol=coin_symbol, api_key=api_key, limit=limit, ) return [tx['hash'] for tx in transactions]
def get_broadcast_transaction_hashes(coin_symbol='btc', api_key=None, limit=10)
Warning, slow!
4.09563
3.044714
1.345161
assert is_valid_coin_symbol(coin_symbol) assert is_valid_block_representation( block_representation=block_representation, coin_symbol=coin_symbol) url = make_url(coin_symbol, **dict(blocks=block_representation)) params = {} if api_key: params['token'] = api_key if txn_limit: params['limit'] = txn_limit if txn_offset: params['txstart'] = txn_offset r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) if 'error' in response_dict: return response_dict return _clean_block(response_dict=response_dict)
def get_block_overview(block_representation, coin_symbol='btc', txn_limit=None, txn_offset=None, api_key=None)
Takes a block_representation, coin_symbol and txn_limit and gets an overview of that block, including up to X transaction ids. Note that block_representation may be the block number or block hash
2.675255
2.833233
0.944241
''' Batch request version of get_blocks_overview ''' for block_representation in block_representation_list: assert is_valid_block_representation( block_representation=block_representation, coin_symbol=coin_symbol) assert is_valid_coin_symbol(coin_symbol) blocks = ';'.join([str(x) for x in block_representation_list]) url = make_url(coin_symbol, **dict(blocks=blocks)) logger.info(url) params = {} if api_key: params['token'] = api_key if txn_limit: params['limit'] = txn_limit r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) r = get_valid_json(r) return [_clean_tx(response_dict=d) for d in r]
def get_blocks_overview(block_representation_list, coin_symbol='btc', txn_limit=None, api_key=None)
Batch request version of get_blocks_overview
3.489003
3.34537
1.042935
''' Takes a block_representation and returns the merkle root ''' return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['mrkl_root']
def get_merkle_root(block_representation, coin_symbol='btc', api_key=None)
Takes a block_representation and returns the merkle root
5.098839
4.469824
1.140725
''' Takes a block_representation and returns the number of bits ''' return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['bits']
def get_bits(block_representation, coin_symbol='btc', api_key=None)
Takes a block_representation and returns the number of bits
5.296167
4.448543
1.19054