code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if self.spells_cost_health and source.type == CardType.SPELL: self.log("%s spells cost %i health", self, amount) self.game.queue_actions(self, [Hit(self.hero, amount)]) return amount if self.temp_mana: # Coin, Innervate etc used_temp = min(self.temp_mana, amount) amount -= used_temp self.temp_mana -= used_temp self.log("%s pays %i mana", self, amount) self.used_mana += amount return amount
def pay_cost(self, source, amount: int) -> int
Make player pay \a amount mana. Returns how much mana is spent, after temporary mana adjustments.
5.949053
5.140236
1.15735
if isinstance(card, str): card = self.card(card, zone=Zone.PLAY) self.game.cheat_action(self, [Summon(self, card)]) return card
def summon(self, card)
Puts \a card in the PLAY zone
6.410696
5.49972
1.16564
actions = [] for action in event.actions: if callable(action): ac = action(self, *args) if not ac: # Handle falsy returns continue if not hasattr(ac, "__iter__"): actions.append(ac) else: actions += action(self, *args) else: actions.append(action) ret = source.game.trigger(self, actions, args) if event.once: self._events.remove(event) return ret
def trigger_event(self, source, event, args)
Trigger an event on the Entity * \a source: The source of the event * \a event: The event being triggered * \a args: A list of arguments to pass to the callback
3.427936
3.602504
0.951542
if target.immune: self.log("%r is immune to %s for %i damage", target, self, amount) return 0 return amount
def get_damage(self, amount: int, target) -> int
Override to modify the damage dealt to a target from the given amount.
5.840526
4.855356
1.202904
if isinstance(card, LazyValue): card = card.evaluate(source) if isinstance(card, Action): card = card.trigger(source)[0] if not isinstance(card, list): cards = [card] else: cards = card ret = [] for card in cards: if isinstance(card, str): ret.append(source.controller.card(card, source)) else: ret.append(card) return ret
def _eval_card(source, card)
Return a Card instance from \a card The card argument can be: - A Card instance (nothing is done) - The string ID of the card (the card is created) - A LazyValue (the card is dynamically created)
2.700201
2.699126
1.000398
ret = self.__class__(*self._args, **self._kwargs) ret.callback = args ret.times = self.times return ret
def then(self, *args)
Create a callback containing an action queue, called upon the action's trigger with the action's arguments available.
5.998286
7.191614
0.834067
from . import cards from .deck import Deck deck = [] collection = [] # hero = card_class.default_hero for card in cards.db.keys(): if card in exclude: continue cls = cards.db[card] if not cls.collectible: continue if cls.type == CardType.HERO: # Heroes are collectible... continue if cls.card_class and cls.card_class not in [card_class, CardClass.NEUTRAL]: # Play with more possibilities continue collection.append(cls) while len(deck) < Deck.MAX_CARDS: card = random.choice(collection) if deck.count(card.id) < card.max_count_in_deck: deck.append(card.id) return deck
def random_draft(card_class: CardClass, exclude=[])
Return a deck of 30 random cards for the \a card_class
3.462339
3.319896
1.042906
for cardset in CARD_SETS: module = import_module("fireplace.cards.%s" % (cardset)) if hasattr(module, id): return getattr(module, id)
def get_script_definition(id)
Find and return the script definition for card \a id
5.48491
4.766399
1.150745
chosen_cards = [] # sum all the weights cum_weights = [] totalweight = 0 for i, w in enumerate(weights): totalweight += w * len(card_sets[i]) cum_weights.append(totalweight) # for each card for i in range(count): # choose a set according to weighting chosen_set = bisect(cum_weights, random.random() * totalweight) # choose a random card from that set chosen_card_index = random.randint(0, len(card_sets[chosen_set]) - 1) chosen_cards.append(card_sets[chosen_set].pop(chosen_card_index)) totalweight -= weights[chosen_set] cum_weights[chosen_set:] = [x - weights[chosen_set] for x in cum_weights[chosen_set:]] return [source.controller.card(card, source=source) for card in chosen_cards]
def weighted_card_choice(source, weights: List[int], card_sets: List[str], count: int)
Take a list of weights and a list of card pools and produce a random weighted sample without replacement. len(weights) == len(card_sets) (one weight per card set)
2.470834
2.421891
1.020208
type = BlockType.TRIGGER return self.action_block(source, actions, type, event_args=event_args)
def trigger(self, source, actions, event_args)
Perform actions as a result of an event listener (TRIGGER)
6.843952
5.880142
1.163909
gameover = False for player in self.players: if player.playstate in (PlayState.CONCEDED, PlayState.DISCONNECTED): player.playstate = PlayState.LOSING if player.playstate == PlayState.LOSING: gameover = True if gameover: if self.players[0].playstate == self.players[1].playstate: for player in self.players: player.playstate = PlayState.TIED else: for player in self.players: if player.playstate == PlayState.LOSING: player.playstate = PlayState.LOST else: player.playstate = PlayState.WON self.state = State.COMPLETE self.manager.step(self.next_step, Step.FINAL_WRAPUP) self.manager.step(self.next_step, Step.FINAL_GAMEOVER) self.manager.step(self.next_step)
def check_for_end_game(self)
Check if one or more player is currently losing. End the game if they are.
2.534754
2.474983
1.02415
source.event_args = event_args ret = self.trigger_actions(source, actions) source.event_args = None return ret
def queue_actions(self, source, actions, event_args=None)
Queue a list of \a actions for processing from \a source. Triggers an aura refresh afterwards.
3.50171
3.590485
0.975275
ret = [] for action in actions: if isinstance(action, EventListener): # Queuing an EventListener registers it as a one-time event # This allows registering events from eg. play actions self.log("Registering event listener %r on %r", action, self) action.once = True # FIXME: Figure out a cleaner way to get the event listener target if source.type == CardType.SPELL: listener = source.controller else: listener = source listener._events.append(action) else: ret.append(action.trigger(source)) return ret
def trigger_actions(self, source, actions)
Performs a list of `actions` from `source`. This should seldom be called directly - use `queue_actions` instead.
5.967788
5.702127
1.04659
ret = self.check(source) if self._neg: ret = not ret if ret: if self._if: return self._if elif self._else: return self._else
def evaluate(self, source)
Evaluates the board state from `source` and returns an iterable of Actions as a result.
3.845356
4.741377
0.811021
actions = self.evaluate(source) if actions: if not hasattr(actions, "__iter__"): actions = (actions, ) source.game.trigger_actions(source, actions)
def trigger(self, source)
Triggers all actions meant to trigger on the board state from `source`.
4.23994
3.837723
1.104806
log.info("Creating a copy of %r", entity) return source.controller.card(entity.id, source)
def copy(self, source, entity)
Return a copy of \a entity
13.076235
13.127192
0.996118
if not filters: new_filters = self.filters.copy() else: new_filters = filters.copy() for k, v in new_filters.items(): if isinstance(v, LazyValue): new_filters[k] = v.evaluate(source) from .. import cards return cards.filter(**new_filters)
def find_cards(self, source=None, **filters)
Generate a card pool with all cards matching specified filters
2.871117
2.819216
1.01841
from ..utils import weighted_card_choice if cards: # Use specific card list if given self.weights = [1] card_sets = [list(cards)] elif not self.weightedfilters: # Use global filters if no weighted filter sets given self.weights = [1] card_sets = [self.find_cards(source)] else: # Otherwise find cards for each set of filters # add the global filters to each set of filters wf = [{**x, **self.filters} for x in self.weightedfilters] card_sets = [self.find_cards(source, **x) for x in wf] # get weighted sample of card pools return weighted_card_choice(source, self.weights, card_sets, self.count)
def evaluate(self, source, cards=None) -> str
This picks from a single combined card pool without replacement, weighting each filtered set of cards against the total
4.981942
4.569369
1.090291
ret = self.controller.card(buff, self) ret.source = self ret.apply(target) for k, v in kwargs.items(): setattr(ret, k, v) return ret
def buff(self, target, buff, **kwargs)
Summon \a buff and apply it to \a target If keyword arguments are given, attempt to set the given values to the buff. Example: player.buff(target, health=random.randint(1, 5)) NOTE: Any Card can buff any other Card. The controller of the Card that buffs the target becomes the controller of the buff.
4.76772
4.268164
1.117042
if not self.data.scripts.powered_up: return False for script in self.data.scripts.powered_up: if not script.check(self): return False return True
def powered_up(self)
Returns True whether the card is "powered up".
3.6316
3.671818
0.989047
if self.zone == Zone.HAND: return self.controller.hand.index(self) + 1 return 0
def zone_position(self)
Returns the card's position (1-indexed) in its zone, or 0 if not available.
6.721403
3.644205
1.844408
if choose: if self.must_choose_one: choose = card = self.choose_cards.filter(id=choose)[0] self.log("%r: choosing %r", self, choose) else: raise InvalidAction("%r cannot be played with choice %r" % (self, choose)) else: if self.must_choose_one: raise InvalidAction("%r requires a choice (one of %r)" % (self, self.choose_cards)) card = self if not self.is_playable(): raise InvalidAction("%r isn't playable." % (self)) if card.requires_target(): if not target: raise InvalidAction("%r requires a target to play." % (self)) elif target not in self.play_targets: raise InvalidAction("%r is not a valid target for %r." % (target, self)) elif target: self.logger.warning("%r does not require a target, ignoring target %r", self, target) self.game.play_card(self, target, index, choose) return self
def play(self, target=None, index=None, choose=None)
Queue a Play action on the card.
2.935394
2.813971
1.04315
return self.game.cheat_action(self, [actions.Morph(self, into)])
def morph(self, into)
Morph the card into another card
15.761463
12.153837
1.29683
return self.game.cheat_action(self, [actions.Shuffle(self.controller, self)])
def shuffle_into_deck(self)
Shuffle the card into the controller's deck
20.854258
13.345178
1.562681
if self.has_combo and self.controller.combo: if PlayReq.REQ_TARGET_FOR_COMBO in self.requirements: return True for req in TARGETING_PREREQUISITES: if req in self.requirements: return True return False
def battlecry_requires_target(self)
True if the play action of the card requires a target
6.85433
6.365781
1.076746
if self.has_combo and PlayReq.REQ_TARGET_FOR_COMBO in self.requirements: if self.controller.combo: return True if PlayReq.REQ_TARGET_IF_AVAILABLE in self.requirements: return bool(self.play_targets) if PlayReq.REQ_TARGET_IF_AVAILABLE_AND_DRAGON_IN_HAND in self.requirements: if self.controller.hand.filter(race=Race.DRAGON): return bool(self.play_targets) req = self.requirements.get(PlayReq.REQ_TARGET_IF_AVAILABLE_AND_MINIMUM_FRIENDLY_MINIONS) if req is not None: if len(self.controller.field) >= req: return bool(self.play_targets) req = self.requirements.get(PlayReq.REQ_TARGET_IF_AVAILABLE_AND_MINIMUM_FRIENDLY_SECRETS) if req is not None: if len(self.controller.secrets) >= req: return bool(self.play_targets) return PlayReq.REQ_TARGET_TO_PLAY in self.requirements
def requires_target(self)
True if the card currently requires a target
2.867103
2.687096
1.06699
if card is None: card = cardxml.CardXML(id) if cardscript is None: cardscript = get_script_definition(id) if cardscript: card.scripts = type(id, (cardscript, ), {}) else: card.scripts = type(id, (), {}) scriptnames = ( "activate", "combo", "deathrattle", "draw", "inspire", "play", "enrage", "update", "powered_up" ) for script in scriptnames: actions = getattr(card.scripts, script, None) if actions is None: # Set the action by default to avoid runtime hasattr() calls setattr(card.scripts, script, []) elif not callable(actions): if not hasattr(actions, "__iter__"): # Ensure the actions are always iterable setattr(card.scripts, script, (actions, )) for script in ("events", "secret"): events = getattr(card.scripts, script, None) if events is None: setattr(card.scripts, script, []) elif not hasattr(events, "__iter__"): setattr(card.scripts, script, [events]) if not hasattr(card.scripts, "cost_mod"): card.scripts.cost_mod = None if not hasattr(card.scripts, "Hand"): card.scripts.Hand = type("Hand", (), {}) if not hasattr(card.scripts.Hand, "events"): card.scripts.Hand.events = [] if not hasattr(card.scripts.Hand.events, "__iter__"): card.scripts.Hand.events = [card.scripts.Hand.events] if not hasattr(card.scripts.Hand, "update"): card.scripts.Hand.update = () if not hasattr(card.scripts.Hand.update, "__iter__"): card.scripts.Hand.update = (card.scripts.Hand.update, ) # Set choose one cards if hasattr(cardscript, "choose"): card.choose_cards = cardscript.choose[:] else: card.choose_cards = [] if hasattr(cardscript, "tags"): for tag, value in cardscript.tags.items(): card.tags[tag] = value # Set some additional events based on the base tags... if card.poisonous: card.scripts.events.append(POISONOUS) return card
def merge(id, card, cardscript=None)
Find the xmlcard and the card definition of \a id Then return a merged class of the two
2.877855
2.854027
1.008349
if not self.initialized: self.initialize() cards = self.values() if "type" not in kwargs: kwargs["type"] = [CardType.SPELL, CardType.WEAPON, CardType.MINION] for attr, value in kwargs.items(): if value is not None: # What? this doesn't work? # cards = __builtins__["filter"](lambda c: getattr(c, attr) == value, cards) cards = [ card for card in cards if (isinstance(value, list) and getattr(card, attr) in value) or getattr(card, attr) == value ] return [card.id for card in cards]
def filter(self, **kwargs)
Returns a list of card IDs matching the given filters. Each filter, if not None, is matched against the registered card database. cards. Examples arguments: \a collectible: Whether the card is collectible or not. \a type: The type of the card (hearthstone.enums.CardType) \a race: The race (tribe) of the card (hearthstone.enums.Race) \a rarity: The rarity of the card (hearthstone.enums.Rarity) \a cost: The mana cost of the card
3.341024
3.210359
1.040701
css_filename = 'dropzone.min.css' serve_local = current_app.config['DROPZONE_SERVE_LOCAL'] if serve_local: css = '<link rel="stylesheet" href="%s" type="text/css">\n' % \ url_for('dropzone.static', filename=css_filename) else: css = '<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/dropzone@%s/dist/min/%s"' \ ' type="text/css">\n' % (version, css_filename) if css_url: css = '<link rel="stylesheet" href="%s" type="text/css">\n' % css_url return Markup(css)
def load_css(css_url=None, version='5.2.0')
Load Dropzone's css resources with given version. .. versionadded:: 1.4.4 :param css_url: The CSS url for Dropzone.js. :param version: The version of Dropzone.js.
2.261231
2.283579
0.990213
js_filename = 'dropzone.min.js' serve_local = current_app.config['DROPZONE_SERVE_LOCAL'] if serve_local: js = '<script src="%s"></script>\n' % url_for('dropzone.static', filename=js_filename) else: js = '<script src="https://cdn.jsdelivr.net/npm/dropzone@%s/dist/%s"></script>\n' % (version, js_filename) if js_url: js = '<script src="%s"></script>\n' % js_url return Markup(js)
def load_js(js_url=None, version='5.2.0')
Load Dropzone's js resources with given version. .. versionadded:: 1.4.4 :param js_url: The JS url for Dropzone.js. :param version: The version of Dropzone.js.
2.46297
2.488055
0.989917
if current_app.config['DROPZONE_IN_FORM']: return Markup('<div class="dropzone" id="myDropzone"></div>') if action: action_url = get_url(action) else: action_url = url_for(action_view, **kwargs) if csrf or current_app.config['DROPZONE_ENABLE_CSRF']: if 'csrf' not in current_app.extensions: raise RuntimeError("CSRFProtect is not initialized. It's required to enable CSRF protect, \ see docs for more details.") csrf_field = render_template_string('<input type="hidden" name="csrf_token" value="{{ csrf_token() }}"/>') else: csrf_field = '' return Markup('''<form action="%s" method="post" class="dropzone" id="myDropzone" enctype="multipart/form-data">%s</form>''' % (action_url, csrf_field))
def create(action='', csrf=False, action_view='', **kwargs)
Create a Dropzone form with given action. .. versionchanged:: 1.4.2 Added ``csrf`` parameter to enable CSRF protect. .. versionchanged:: 1.4.3 Added ``action`` parameter to replace ``action_view``, ``action_view`` was deprecated now. .. versionchanged:: 1.5.0 If ``DROPZONE_IN_FORM`` set to ``True``, create ``<div>`` instead of ``<form>``. :param action: The action attribute in ``<form>``, pass the url which handle uploads. :param csrf: Enable CSRF protect or not, same with ``DROPZONE_ENABLE_CSRF``. :param action_view: The view which handle the post data, deprecated since 1.4.2.
2.924144
2.735129
1.069106
t, kf, t0, major, minor, prod, beta = sympy.symbols( 't k_f t0 Y Z X beta', negative=False) for f in funcs: args = [t, kf, prod, major, minor] if f in (pseudo_rev, binary_rev): args.insert(2, kf/beta) expr = f(*args, backend='sympy') with open(f.__name__ + '.png', 'wb') as ofh: sympy.printing.preview(expr, output='png', filename='out.png', viewer='BytesIO', outputbuffer=ofh) with open(f.__name__ + '_diff.png', 'wb') as ofh: sympy.printing.preview(expr.diff(t).subs({t0: 0}).simplify(), output='png', filename='out.png', viewer='BytesIO', outputbuffer=ofh)
def main()
This example demonstrates how to generate pretty equations from the analytic expressions found in ``chempy.kinetics.integrated``.
4.999835
4.790606
1.043675
pretty = rxn.unicode(self.substances, with_param=True, with_name=False) return '<a title="%d: %s">%s</a>' % (ori_idx, pretty, printer._print(rxn.name or rxn.param))
def _cell_label_html(self, printer, ori_idx, rxn)
Reaction formatting callback. (reaction index -> string)
8.758204
8.200473
1.068012
new_concs = concs.copy() for r in self.rxns: if r.has_precipitates(self.substances): net_stoich = np.asarray(r.net_stoich(self.substances)) s_net, s_stoich, s_idx = r.precipitate_stoich(self.substances) new_concs -= new_concs[s_idx]/s_stoich * net_stoich return new_concs
def dissolved(self, concs)
Return dissolved concentrations
4.201571
4.172484
1.006971
_plot = plot_kwargs is not None if _plot: latex_names = plot_kwargs.pop('latex_names', False) conc_unit_str = plot_kwargs.pop('conc_unit_str', 'M') if 'ax' not in plot_kwargs: plot_kwargs['ax'] = self._get_default_plot_ax() init_concs = self.as_per_substance_array(init_concs) neqsys = self.get_neqsys( neqsys_type, NumSys=NumSys, rref_equil=kwargs.pop('rref_equil', False), rref_preserv=kwargs.pop('rref_preserv', False), precipitates=kwargs.pop('precipitates', None)) if x0 is None: x0 = init_concs if _plot: cb = neqsys.solve_and_plot_series if 'plot_kwargs' not in kwargs: kwargs['plot_kwargs'] = plot_kwargs if 'labels' not in kwargs['plot_kwargs']: kwargs['plot_kwargs']['labels'] = ( self.substance_labels(latex_names)) if 'substances' in plot_kwargs: if 'indices' in plot_kwargs: raise ValueError("Now I am confused..") kwargs['plot_kwargs']['indices'] = map( self.as_substance_index, plot_kwargs.pop('substances')) print(kwargs['plot_kwargs']['indices']) else: cb = neqsys.solve_series params = np.concatenate((init_concs, self.eq_constants())) xvecs, info_dicts = cb( x0, params, varied_data, self.as_substance_index(varied), propagate=False, **kwargs) sanity = [self._result_is_sane(init_concs, x) for x in xvecs] if _plot: import matplotlib.pyplot as plt from pyneqsys.plotting import mpl_outside_legend mpl_outside_legend(plt.gca()) varied_subst = self.substances[varied] xlbl = ('$[' + varied_subst.latex_name + ']_0$' if latex_names else str(varied_subst)) plt.gca().set_xlabel(xlbl + ' / ' + conc_unit_str) plt.gca().set_ylabel('Concentration / ' + conc_unit_str) return xvecs, info_dicts, sanity
def roots(self, init_concs, varied_data, varied, x0=None, NumSys=NumSysLog, plot_kwargs=None, neqsys_type='chained_conditional', **kwargs)
Parameters ---------- init_concs : array or dict varied_data : array varied_idx : int or str x0 : array NumSys : _NumSys subclass See :class:`NumSysLin`, :class:`NumSysLog`, etc. plot_kwargs : dict See py:meth:`pyneqsys.NeqSys.solve`. Two additional keys are intercepted here: latex_names: bool (default: False) conc_unit_str: str (default: 'M') neqsys_type : str what method to use for NeqSys construction (get_neqsys_*) \\*\\*kwargs : Keyword argumetns passed on to py:meth:`pyneqsys.NeqSys.solve_series`.
3.448796
3.159425
1.09159
if units is None: M = 1 else: M = units.molar if warn and 'F-' in electrolytes: warnings.warn("In Schumpe 1993: data for fluoride uncertain.") return sum([(p_gas_rM[gas]/M+p_ion_rM[k]/M)*v for k, v in electrolytes.items()])
def lg_solubility_ratio(electrolytes, gas, units=None, warn=True)
Returns the log10 value of the solubilty ratio Implements equation 16, p 156. from Schumpe (1993) Parameters ---------- electrolytes : dict Mapping substance key (one in ``p_ion_rM``) to concentration. gas : str Substance key for the gas (one in ``p_gas_rM``). units : object (optional) object with attribute: molar warn : bool (default: True) Emit UserWarning when 'F-' among electrolytes.
9.206757
4.641036
1.983772
if units is None: K = 1 m = 1 kg = 1 else: K = units.Kelvin m = units.meter kg = units.kilogram if T is None: T = 298.15*K m3 = m**3 if a is None: a = (-3.983035*K, # C 301.797*K, # C 522528.9*K*K, # C**2 69.34881*K, # C 999.974950*kg/m3) if just_return_a: return a if T0 is None: T0 = 273.15*K t = T - T0 if warn and (_any(t < 0*K) or _any(t > 40*K)): warnings.warn("Temperature is outside range (0-40 degC)") return a[4]*(1-((t + a[0])**2*(t + a[1]))/(a[2]*(t + a[3])))
def water_density(T=None, T0=None, units=None, a=None, just_return_a=False, warn=True)
Density of water (kg/m3) as function of temperature (K) according to VSMOW model between 0 and 40 degree Celsius. Fitted using Thiesen's equation. Parameters ---------- T : float Temperature (in Kelvin) (default: 298.15). T0 : float Value of T for 0 degree Celsius (default: 273.15). units : object (optional) Object with attributes: Kelvin, meter, kilogram. a : array_like (optional) 5 parameters to the equation. just_return_a : bool (optional, default: False) Do not compute rho, just return the parameters ``a``. warn : bool (default: True) Emit UserWarning when outside temperature range. Returns ------- Density of water (float of kg/m3 if T is float and units is None) Examples -------- >>> print('%.2f' % water_density(277.13)) 999.97 References ---------- TANAKA M., GIRARD G., DAVIS R., PEUTO A. and BIGNELL N., "Recommanded table for the density of water between 0 °C and 40 °C based on recent experimental reports", Metrologia, 2001, 38, 301-309. http://iopscience.iop.org/article/10.1088/0026-1394/38/4/3 doi:10.1088/0026-1394/38/4/3
3.852219
3.459534
1.113508
be = get_backend(backend) if units is None: K = 1 else: K = units.Kelvin if T0 is None: T0 = 298.15*K return H * be.exp(Tderiv*(1/T - 1/T0))
def Henry_H_at_T(T, H, Tderiv, T0=None, units=None, backend=None)
Evaluate Henry's constant H at temperature T Parameters ---------- T: float Temperature (with units), assumed to be in Kelvin if ``units == None`` H: float Henry's constant Tderiv: float (optional) dln(H)/d(1/T), assumed to be in Kelvin if ``units == None``. T0: float Reference temperature, assumed to be in Kelvin if ``units == None`` default: 298.15 K units: object (optional) object with attributes: kelvin (e.g. chempy.units.default_units) backend : module (optional) module with "exp", default: numpy, math
3.202737
3.37499
0.948962
mass = 0.0 for k, v in composition.items(): if k == 0: # electron mass -= v*5.489e-4 else: mass += v*relative_atomic_masses[k-1] return mass
def mass_from_composition(composition)
Calculates molecular mass from atomic weights Parameters ---------- composition: dict Dictionary mapping int (atomic number) to int (coefficient) Returns ------- float molecular weight in atomic mass units Notes ----- Atomic number 0 denotes charge or "net electron defficiency" Examples -------- >>> '%.2f' % mass_from_composition({0: -1, 1: 1, 8: 1}) '17.01'
4.054856
4.987124
0.813065
if units is None: cP = 1 K = 1 else: cP = units.centipoise K = units.kelvin if T is None: T = 298.15*K if eta20 is None: eta20 = eta20_cP*cP t = T - 273.15*K if warn and (_any(t < 0*K) or _any(t > 100*K)): warnings.warn("Temperature is outside range (0-100 degC)") # equation (5) in the paper says "log" but they seem to mean "log10" # when comparing with Table II. return eta20 * 10**((A*(20 - t) - B*(t - 20)**2)/(t + C))
def water_viscosity(T=None, eta20=None, units=None, warn=True)
Viscosity of water (cP) as function of temperature (K) Parameters ---------- T : float Temperature (in Kelvin) (default: 298.15 K) eta20 : float Viscosity of water at 20 degree Celsius. units : object (optional) object with attributes: kelvin & centipoise warn : bool Emit UserWarning when outside temperature range. Returns ------- Water viscosity at temperature ``T``.
4.508091
4.398677
1.024874
if units is None: K = 1 m = 1 s = 1 else: K = units.Kelvin m = units.meter s = units.second if T is None: T = 298.15*K _D0 = D0 * m**2 * s**-1 _TS = TS * K if err_mult is not None: _dD0 = dD0 * m**2 * s**-1 _dTS = dTS * K _D0 += err_mult[0]*_dD0 _TS += err_mult[1]*_dTS if warn and (_any(T < low_t_bound*K) or _any(T > high_t_bound*K)): warnings.warn("Temperature is outside range (0-100 degC)") return _D0*((T/_TS) - 1)**gamma
def water_self_diffusion_coefficient(T=None, units=None, warn=True, err_mult=None)
Temperature-dependent self-diffusion coefficient of water. Parameters ---------- T : float Temperature (default: in Kelvin) units : object (optional) object with attributes: Kelvin, meter, kilogram warn : bool (default: True) Emit UserWarning when outside temperature range. err_mult : length 2 array_like (default: None) Perturb paramaters D0 and TS with err_mult[0]*dD0 and err_mult[1]*dTS respectively, where dD0 and dTS are the reported uncertainties in the fitted paramters. Useful for estimating error in diffusion coefficient. References ---------- Temperature-dependent self-diffusion coefficients of water and six selected molecular liquids for calibration in accurate 1H NMR PFG measurements Manfred Holz, Stefan R. Heila, Antonio Saccob; Phys. Chem. Chem. Phys., 2000,2, 4740-4742 http://pubs.rsc.org/en/Content/ArticleLanding/2000/CP/b005319h DOI: 10.1039/B005319H
3.223092
2.745548
1.173934
lines = ['digraph "' + str(rsys.name) + '" {\n'] ind = ' ' # indentation if penwidths is None: penwidths = [1.0]*rsys.nr categories = rsys.categorize_substances(checks=()) def add_substance(key): fc = 'black' if key in categories['depleted']: fc = colors[0] if key in categories['accumulated']: fc = colors[1] label = ('$%s$' if tex else '%s') % getattr(rsys.substances[key], 'latex_name' if tex else 'name') lines.append(ind + '"{key}" [fontcolor={fc} label="{lbl}"];\n'.format(key=key, fc=fc, lbl=label)) for sk in rsys.substances: add_substance(sk) def add_vertex(key, num, reac, penwidth): snum = str(num) if num > 1 else '' fmt = ','.join( ['label="{}"'.format(snum)] + (['penwidth={}'.format(penwidth)] if penwidth != 1 else []) ) lines.append(ind + '"{}" -> "{}" [color={},fontcolor={},{}];\n'.format( *((key, rid, colors[0], colors[0], fmt) if reac else (rid, key, colors[1], colors[1], fmt)) )) if include_inactive: reac_stoichs = rsys.all_reac_stoichs() prod_stoichs = rsys.all_prod_stoichs() else: reac_stoichs = rsys.active_reac_stoichs() prod_stoichs = rsys.active_prod_stoichs() for ri, rxn in enumerate(rsys.rxns): rid = rprefix + str(ri+rref0) lines.append(ind + '{') lines.append(ind*2 + 'node ' + nodeparams.format(rxn.name or rid)) lines.append(ind*2 + rid) lines.append(ind + '}\n') for idx, key in enumerate(rsys.substances): num = reac_stoichs[ri, idx] if num == 0: continue add_vertex(key, num, True, penwidths[ri]) for idx, key in enumerate(rsys.substances): num = prod_stoichs[ri, idx] if num == 0: continue add_vertex(key, num, False, penwidths[ri]) lines.append('}\n') return lines
def rsys2dot(rsys, tex=False, rprefix='r', rref0=1, nodeparams='[label="{}",shape=diamond]', colors=('maroon', 'darkgreen'), penwidths=None, include_inactive=True)
Returns list of lines of DOT (graph description language) formated graph. Parameters ========== rsys: ReactionSystem tex: bool (default False) If set True, output will be LaTeX formated (Substance need to have latex_name attribute set) rprefix: string Reaction enumeration prefix, default: r rref0: integer Reaction enumeration inital counter value, default: 1 nodeparams: string DOT formated param list, default: [label={} shape=diamond] Returns ======= list of lines of DOT representation of the graph representation.
2.816134
2.722091
1.034548
lines = rsys2dot(rsys, **kwargs) created_tempdir = False try: if output_dir is None: output_dir = tempfile.mkdtemp() created_tempdir = True basename, ext = os.path.splitext(os.path.basename(fname)) outpath = os.path.join(output_dir, fname) dotpath = os.path.join(output_dir, basename + '.dot') with open(dotpath, 'wt') as ofh: ofh.writelines(lines) if ext == '.tex': cmds = [prog or 'dot2tex'] else: cmds = [prog or 'dot', '-T'+outpath.split('.')[-1]] p = subprocess.Popen(cmds + [dotpath, '-o', outpath]) retcode = p.wait() if retcode: fmtstr = "{}\n returned with exit status {}" raise RuntimeError(fmtstr.format(' '.join(cmds), retcode)) return outpath finally: if save is True or save == 'True': pass else: if save is False or save == 'False': if created_tempdir: shutil.rmtree(output_dir) else: # interpret save as path to copy pdf to. shutil.copy(outpath, save)
def rsys2graph(rsys, fname, output_dir=None, prog=None, save=False, **kwargs)
Convenience function to call `rsys2dot` and write output to file and render the graph Parameters ---------- rsys : ReactionSystem fname : str filename output_dir : str (optional) path to directory (default: temporary directory) prog : str (optional) default: 'dot' save : bool removes temporary directory if False, default: False \\*\\*kwargs : Keyword arguments passed along to py:func:`rsys2dot`. Returns ------- str Outpath Examples -------- >>> rsys2graph(rsys, sbstncs, '/tmp/out.png') # doctest: +SKIP
2.834054
2.981656
0.950497
f_stats = os.stat(path) return (f_stats.st_mode & (stat.S_IRWXG | stat.S_IRWXO)) == 0 and f_stats.st_uid == os.getuid()
def check_permission_safety(path)
Check if the file at the given path is safe to use as a state file. This checks that group and others have no permissions on the file and that the current user is the owner.
2.452823
2.373545
1.033401
assert key_path, key_path if not os.path.exists(key_path): log.fatal('%s: no such file', key_path) return None if not check_permission_safety(key_path): log.fatal('Private key file %s must be readable only by its owner.', key_path) return None if password_path and not check_permission_safety(password_path): log.fatal('Password file %s must be readable only by its owner.', password_path) return None with open(key_path) as keyfile: private_key = keyfile.readline().strip() if is_hex(private_key) and len(decode_hex(private_key)) == 32: log.warning('Private key in raw format. Consider switching to JSON-encoded') else: keyfile.seek(0) try: json_data = json.load(keyfile) if password_path: with open(password_path) as password_file: password = password_file.readline().strip() else: password = getpass.getpass('Enter the private key password: ') if json_data['crypto']['kdf'] == 'pbkdf2': password = password.encode() # type: ignore private_key = encode_hex(decode_keyfile_json(json_data, password)) except ValueError: log.fatal('Invalid private key format or password!') return None return private_key
def get_private_key(key_path, password_path=None)
Open a JSON-encoded private key and return it If a password file is provided, uses it to decrypt the key. If not, the password is asked interactively. Raw hex-encoded private keys are supported, but deprecated.
2.716167
2.590695
1.048432
txhash = contract_method.transact(self.transaction) LOG.debug(f'Sending txHash={encode_hex(txhash)}') (receipt, _) = check_successful_tx( web3=self.web3, txid=txhash, timeout=self.wait, ) return receipt
def transact( self, contract_method: ContractFunction, )
A wrapper around to_be_called.transact() that waits until the transaction succeeds.
5.259544
4.818191
1.091601
receipt = self.deploy( contract_name=token_type, args=[token_supply, token_decimals, token_name, token_symbol], ) token_address = receipt['contractAddress'] assert token_address and is_address(token_address) token_address = to_checksum_address(token_address) return {token_type: token_address}
def deploy_token_contract( self, token_supply: int, token_decimals: int, token_name: str, token_symbol: str, token_type: str = 'CustomToken', )
Deploy a token contract.
2.571096
2.465166
1.042971
deployed_contracts: DeployedContracts = { 'contracts_version': self.contract_version_string(), 'chain_id': int(self.web3.version.network), 'contracts': {}, } self._deploy_and_remember(CONTRACT_ENDPOINT_REGISTRY, [], deployed_contracts) secret_registry = self._deploy_and_remember( contract_name=CONTRACT_SECRET_REGISTRY, arguments=[], deployed_contracts=deployed_contracts, ) token_network_registry_args = [ secret_registry.address, deployed_contracts['chain_id'], DEPLOY_SETTLE_TIMEOUT_MIN, DEPLOY_SETTLE_TIMEOUT_MAX, ] if max_num_of_token_networks: token_network_registry_args.append(max_num_of_token_networks) self._deploy_and_remember( contract_name=CONTRACT_TOKEN_NETWORK_REGISTRY, arguments=token_network_registry_args, deployed_contracts=deployed_contracts, ) return deployed_contracts
def deploy_raiden_contracts( self, max_num_of_token_networks: Optional[int], ) -> DeployedContracts
Deploy all required raiden contracts and return a dict of contract_name:address Args: max_num_of_token_networks (Optional[int]): The max number of tokens that can be registered to the TokenNetworkRegistry. If None, the argument is omitted from the call to the constructor of TokenNetworkRegistry.
2.265126
2.286891
0.990482
receipt = self.deploy(contract_name, arguments) deployed_contracts['contracts'][contract_name] = _deployed_data_from_receipt( receipt=receipt, constructor_arguments=arguments, ) return self.web3.eth.contract( abi=self.contract_manager.get_contract_abi(contract_name), address=deployed_contracts['contracts'][contract_name]['address'], )
def _deploy_and_remember( self, contract_name: str, arguments: List, deployed_contracts: 'DeployedContracts', ) -> Contract
Deploys contract_name with arguments and store the result in deployed_contracts.
2.813868
2.885603
0.97514
with_limits = contracts_version_expects_deposit_limits(self.contracts_version) if with_limits: return self._register_token_network_with_limits( token_registry_abi, token_registry_address, token_address, channel_participant_deposit_limit, token_network_deposit_limit, ) else: return self._register_token_network_without_limits( token_registry_abi, token_registry_address, token_address, channel_participant_deposit_limit, token_network_deposit_limit, )
def register_token_network( self, token_registry_abi: Dict, token_registry_address: str, token_address: str, channel_participant_deposit_limit: Optional[int], token_network_deposit_limit: Optional[int], )
Register token with a TokenNetworkRegistry contract.
2.065253
1.99395
1.03576
if channel_participant_deposit_limit: raise ValueError( 'contracts_version below 0.9.0 does not expect ' 'channel_participant_deposit_limit', ) if token_network_deposit_limit: raise ValueError( 'contracts_version below 0.9.0 does not expect token_network_deposit_limit', ) token_network_registry = self.web3.eth.contract( abi=token_registry_abi, address=token_registry_address, ) version_from_onchain = token_network_registry.functions.contract_version().call() if version_from_onchain != self.contract_manager.version_string: raise RuntimeError( f'got {version_from_onchain} from the chain, expected ' f'{self.contract_manager.version_string} in the deployment data', ) command = token_network_registry.functions.createERC20TokenNetwork( token_address, ) self.transact(command) token_network_address = token_network_registry.functions.token_to_token_networks( token_address, ).call() token_network_address = to_checksum_address(token_network_address) LOG.debug(f'TokenNetwork address: {token_network_address}') return token_network_address
def _register_token_network_without_limits( self, token_registry_abi: Dict, token_registry_address: str, token_address: str, channel_participant_deposit_limit: Optional[int], token_network_deposit_limit: Optional[int], )
Register token with a TokenNetworkRegistry contract with a contracts-version that doesn't require deposit limits in the TokenNetwork constructor.
2.424614
2.351372
1.031149
chain_id = int(self.web3.version.network) deployed_contracts: DeployedContracts = { 'contracts_version': self.contract_version_string(), 'chain_id': chain_id, 'contracts': {}, } self._deploy_and_remember(CONTRACT_SERVICE_REGISTRY, [token_address], deployed_contracts) user_deposit = self._deploy_and_remember( contract_name=CONTRACT_USER_DEPOSIT, arguments=[token_address, user_deposit_whole_balance_limit], deployed_contracts=deployed_contracts, ) monitoring_service_constructor_args = [ token_address, deployed_contracts['contracts'][CONTRACT_SERVICE_REGISTRY]['address'], deployed_contracts['contracts'][CONTRACT_USER_DEPOSIT]['address'], ] msc = self._deploy_and_remember( contract_name=CONTRACT_MONITORING_SERVICE, arguments=monitoring_service_constructor_args, deployed_contracts=deployed_contracts, ) one_to_n = self._deploy_and_remember( contract_name=CONTRACT_ONE_TO_N, arguments=[user_deposit.address, chain_id], deployed_contracts=deployed_contracts, ) # Tell the UserDeposit instance about other contracts. LOG.debug( 'Calling UserDeposit.init() with ' f'msc_address={msc.address} ' f'one_to_n_address={one_to_n.address}', ) self.transact(user_deposit.functions.init( _msc_address=msc.address, _one_to_n_address=one_to_n.address, )) return deployed_contracts
def deploy_service_contracts( self, token_address: str, user_deposit_whole_balance_limit: int, )
Deploy 3rd party service contracts
2.471599
2.471789
0.999923
if isinstance(private_key, str): private_key_bytes = to_bytes(hexstr=private_key) else: private_key_bytes = private_key pk = PrivateKey(private_key_bytes) return public_key_to_address(pk.public_key)
def private_key_to_address(private_key: Union[str, bytes]) -> ChecksumAddress
Converts a private key to an Ethereum address.
2.079418
1.866516
1.114064
if isinstance(public_key, PublicKey): public_key = public_key.format(compressed=False) assert isinstance(public_key, bytes) return to_checksum_address(sha3(public_key[1:])[-20:])
def public_key_to_address(public_key: Union[PublicKey, bytes]) -> ChecksumAddress
Converts a public key to an Ethereum address.
2.660528
2.324414
1.144602
txn_hash = event['transactionHash'] event_name = event['event'] assert event_name in self.event_waiting assert txn_hash in self.event_waiting[event_name] self.event_count[event_name][txn_hash] += 1 event_entry = self.event_waiting[event_name][txn_hash] if event_entry.count == self.event_count[event_name][txn_hash]: self.event_waiting[event_name].pop(txn_hash) # Call callback function with event if event_entry.callback: event_entry.callback(event)
def _handle_waited_log(self, event: dict)
A subroutine of handle_log Increment self.event_count, forget about waiting, and call the callback if any.
2.639708
2.323905
1.135893
def assert_args(event): assert event['args'] == args, f'{event["args"]} == {args}' self.add(txn_hash=txn_hash, event_name=event_name, callback=assert_args) self.check(timeout=timeout)
def assert_event(self, txn_hash, event_name, args, timeout=5)
Assert that `event_name` is emitted with the `args` For use in tests only.
3.87081
4.25061
0.910648
joined_file = Path(__file__).parent.joinpath('joined.sol') remapping = {module: str(path) for module, path in contracts_source_path().items()} command = [ './utils/join-contracts.py', '--import-map', json.dumps(remapping), str(contracts_source_path_of_deployment_module( source_module, ).joinpath(contract_name + '.sol')), str(joined_file), ] working_dir = Path(__file__).parent.parent try: subprocess.check_call(command, cwd=working_dir) except subprocess.CalledProcessError as ex: print(f'cd {str(working_dir)}; {subprocess.list2cmdline(command)} failed.') raise ex return joined_file.read_text()
def join_sources(source_module: DeploymentModule, contract_name: str)
Use join-contracts.py to concatenate all imported Solidity files. Args: source_module: a module name to look up contracts_source_path() contract_name: 'TokenNetworkRegistry', 'SecretRegistry' etc.
3.775344
3.230788
1.168552
etherscan_api = api_of_chain_id[chain_id] deployment_info = get_contracts_deployment_info( chain_id=chain_id, module=source_module, ) if deployment_info is None: raise FileNotFoundError( f'Deployment file not found for chain_id={chain_id} and module={source_module}', ) contract_manager = ContractManager(contracts_precompiled_path()) data = post_data_for_etherscan_verification( apikey=apikey, deployment_info=deployment_info['contracts'][contract_name], source=join_sources(source_module=source_module, contract_name=contract_name), contract_name=contract_name, metadata=json.loads(contract_manager.contracts[contract_name]['metadata']), constructor_args=get_constructor_args( deployment_info=deployment_info, contract_name=contract_name, contract_manager=contract_manager, ), ) response = requests.post(etherscan_api, data=data) content = json.loads(response.content.decode()) print(content) print(f'Status: {content["status"]}; {content["message"]} ; GUID = {content["result"]}') etherscan_url = etherscan_api.replace('api-', '').replace('api', '') etherscan_url += '/verifyContract2?a=' + data['contractaddress'] manual_submission_guide = f if content['status'] != '1': if content['result'] == 'Contract source code already verified': return else: raise ValueError( 'Etherscan submission failed for an unknown reason\n' + manual_submission_guide, ) # submission succeeded, obtained GUID guid = content['result'] status = '0' retries = 10 while status == '0' and retries > 0: retries -= 1 r = guid_status(etherscan_api=etherscan_api, guid=guid) status = r['status'] if r['result'] == 'Fail - Unable to verify': raise ValueError(manual_submission_guide) if r['result'] == 'Pass - Verified': return print('Retrying...') sleep(5) raise TimeoutError(manual_submission_guide)
def etherscan_verify_contract( chain_id: int, apikey: str, source_module: DeploymentModule, contract_name: str, )
Calls Etherscan API for verifying the Solidity source of a contract. Args: chain_id: EIP-155 chain id of the Ethereum chain apikey: key for calling Etherscan API source_module: a module name to look up contracts_source_path() contract_name: 'TokenNetworkRegistry', 'SecretRegistry' etc.
3.607433
3.616489
0.997496
def f(_, param, value): if value is not None: raise click.NoSuchOption( f'--{param.name.replace("_", "-")} is no longer a valid option. ' + message, ) return f
def error_removed_option(message: str)
Takes a message and returns a callback that raises NoSuchOption if the value is not None. The message is used as an argument to NoSuchOption.
8.864969
7.285529
1.216791
@click.option( '--private-key', required=True, help='Path to a private key store.', ) @click.option( '--rpc-provider', default='http://127.0.0.1:8545', help='Address of the Ethereum RPC provider', ) @click.option( '--wait', default=300, help='Max tx wait time in s.', ) @click.option( '--gas-price', default=5, type=int, help='Gas price to use in gwei', ) @click.option( '--gas-limit', default=5_500_000, ) @click.option( '--contracts-version', default=None, help='Contracts version to verify. Current version will be used by default.', ) @functools.wraps(func) def wrapper(*args, **kwargs): return func(*args, **kwargs) return wrapper
def common_options(func)
A decorator that combines commonly appearing @click.option decorators.
2.375193
2.322818
1.022548
if private_key is None: return logging.basicConfig(level=logging.DEBUG) logging.getLogger('web3').setLevel(logging.INFO) logging.getLogger('urllib3').setLevel(logging.INFO) web3 = Web3(HTTPProvider(rpc_provider, request_kwargs={'timeout': 60})) web3.middleware_stack.inject(geth_poa_middleware, layer=0) print('Web3 provider is', web3.providers[0]) private_key = get_private_key(private_key) assert private_key is not None owner = private_key_to_address(private_key) # pylint: disable=E1101 assert web3.eth.getBalance(owner) > 0, 'Account with insuficient funds.' deployer = ContractDeployer( web3=web3, private_key=private_key, gas_limit=gas_limit, gas_price=gas_price, wait=wait, contracts_version=contracts_version, ) ctx.obj = {} ctx.obj['deployer'] = deployer ctx.obj['deployed_contracts'] = {} ctx.obj['token_type'] = 'CustomToken' ctx.obj['wait'] = wait
def setup_ctx( ctx: click.Context, private_key: str, rpc_provider: str, wait: int, gas_price: int, gas_limit: int, contracts_version: None = None, )
Set up deployment context according to common options (shared among all subcommands).
2.268152
2.208834
1.026855
return { 'lib': _BASE.joinpath(stem, 'lib'), 'raiden': _BASE.joinpath(stem, 'raiden'), 'test': _BASE.joinpath(stem, 'test'), 'services': _BASE.joinpath(stem, 'services'), }
def contracts_source_path_with_stem(stem)
The directory remapping given to the Solidity compiler.
3.086553
3.219101
0.958825
ret = {} old_working_dir = Path.cwd() chdir(_BASE) def relativise(path): return path.relative_to(_BASE) import_dir_map = [ '%s=%s' % (k, relativise(v)) for k, v in self.contracts_source_dirs.items() ] import_dir_map.insert(0, '.=.') # allow solc to compile contracts in all subdirs try: for contracts_dir in self.contracts_source_dirs.values(): res = compile_files( [str(relativise(file)) for file in contracts_dir.glob('*.sol')], output_values=PRECOMPILED_DATA_FIELDS + ['ast'], import_remappings=import_dir_map, optimize=False, ) # Strip `ast` part from result # TODO: Remove after https://github.com/ethereum/py-solc/issues/56 is fixed res = { contract_name: { content_key: content_value for content_key, content_value in contract_content.items() if content_key != 'ast' } for contract_name, contract_content in res.items() } ret.update(_fix_contract_key_names(res)) except FileNotFoundError as ex: raise ContractSourceManagerCompilationError( 'Could not compile the contract. Check that solc is available.', ) from ex finally: chdir(old_working_dir) return ret
def _compile_all_contracts(self) -> Dict
Compile solidity contracts into ABI and BIN. This requires solc somewhere in the $PATH and also the :ref:`ethereum.tools` python library. The return value is a dict that should be written into contracts.json.
4.101338
3.897483
1.052304
self.checksum_contracts() if self.overall_checksum is None: raise ContractSourceManagerCompilationError('Checksumming failed.') contracts_compiled = self._compile_all_contracts() target_path.parent.mkdir(parents=True, exist_ok=True) with target_path.open(mode='w') as target_file: target_file.write( json.dumps( dict( contracts=contracts_compiled, contracts_checksums=self.contracts_checksums, overall_checksum=self.overall_checksum, contracts_version=None, ), sort_keys=True, indent=4, ), ) return ContractManager(target_path)
def compile_contracts(self, target_path: Path) -> ContractManager
Store compiled contracts JSON at `target_path`.
3.190284
3.12088
1.022239
# We get the precompiled file data contracts_precompiled = ContractManager(precompiled_path) # Silence mypy assert self.contracts_checksums is not None # Compare each contract source code checksum with the one from the precompiled file for contract, checksum in self.contracts_checksums.items(): try: # Silence mypy assert contracts_precompiled.contracts_checksums is not None precompiled_checksum = contracts_precompiled.contracts_checksums[contract] except KeyError: raise ContractSourceManagerVerificationError( f'No checksum for {contract}', ) if precompiled_checksum != checksum: raise ContractSourceManagerVerificationError( f'checksum of {contract} does not match {precompiled_checksum} != {checksum}', ) # Compare the overall source code checksum with the one from the precompiled file if self.overall_checksum != contracts_precompiled.overall_checksum: raise ContractSourceManagerVerificationError( f'overall checksum does not match ' f'{self.overall_checksum} != {contracts_precompiled.overall_checksum}', )
def verify_precompiled_checksums(self, precompiled_path: Path) -> None
Compare source code checksums with those from a precompiled file.
2.936516
2.671745
1.0991
checksums: Dict[str, str] = {} for contracts_dir in self.contracts_source_dirs.values(): file: Path for file in contracts_dir.glob('*.sol'): checksums[file.name] = hashlib.sha256(file.read_bytes()).hexdigest() self.overall_checksum = hashlib.sha256( ':'.join(checksums[key] for key in sorted(checksums)).encode(), ).hexdigest() self.contracts_checksums = checksums
def checksum_contracts(self) -> None
Remember the checksum of each source, and the overall checksum.
3.013031
2.585946
1.165156
if first is None: return second if second is None: return first if first > second: return keccak(second + first) else: return keccak(first + second)
def _hash_pair(first: bytes, second: bytes) -> bytes
Computes the hash of the items in lexicographic order
3.22775
2.382428
1.354815
if not all(isinstance(l, bytes) and len(l) == 32 for l in items): raise ValueError('Not all items are hashes') leaves = sorted(items) if len(leaves) == 0: return MerkleTree(layers=[[EMPTY_MERKLE_ROOT]]) if not len(leaves) == len(set(leaves)): raise ValueError('The leaves items must not contain duplicate items') tree = [leaves] layer = leaves while len(layer) > 1: # [a, b, c, d, e] -> [(a, b), (c, d), (e, None)] iterator = iter(layer) paired_items = zip_longest(iterator, iterator) layer = [_hash_pair(a, b) for a, b in paired_items] tree.append(layer) return MerkleTree(layers=tree)
def compute_merkle_tree(items: Iterable[bytes]) -> MerkleTree
Calculates the merkle root for a given list of items
2.91548
2.874771
1.014161
assert merkle_tree.layers, 'the merkle tree layers are empty' assert merkle_tree.layers[-1], 'the root layer is empty' return merkle_tree.layers[-1][0]
def get_merkle_root(merkle_tree: MerkleTree) -> bytes
Returns the root element of the merkle tree.
4.475055
3.463694
1.291989
if contracts_version is None: return True if contracts_version == '0.3._': return False return compare(contracts_version, '0.9.0') > -1
def contracts_version_expects_deposit_limits(contracts_version: Optional[str]) -> bool
Answers whether TokenNetworkRegistry of the contracts_vesion needs deposit limits
5.807091
4.634827
1.252925
receipt = wait_for_transaction_receipt(web3=web3, txid=txid, timeout=timeout) txinfo = web3.eth.getTransaction(txid) if 'status' not in receipt: raise KeyError( 'A transaction receipt does not contain the "status" field. ' 'Does your chain have Byzantium rules enabled?', ) if receipt['status'] == 0: raise ValueError(f'Status 0 indicates failure') if txinfo['gas'] == receipt['gasUsed']: raise ValueError(f'Gas is completely used ({txinfo["gas"]}). Failure?') return (receipt, txinfo)
def check_successful_tx(web3: Web3, txid: str, timeout=180) -> Tuple[dict, dict]
See if transaction went through (Solidity code did not throw). :return: Transaction receipt and transaction info
3.991414
4.095582
0.974566
contracts = deployment_data['contracts'] contract_address = contracts[contract_name]['address'] contract_instance = self.web3.eth.contract( abi=self.contract_manager.get_contract_abi(contract_name), address=contract_address, ) # Check that the deployed bytecode matches the precompiled data blockchain_bytecode = self.web3.eth.getCode(contract_address).hex() compiled_bytecode = self.contract_manager.get_runtime_hexcode(contract_name) assert blockchain_bytecode == compiled_bytecode print( f'{contract_name} at {contract_address} ' f'matches the compiled data from contracts.json', ) # Check blockchain transaction hash & block information receipt = self.web3.eth.getTransactionReceipt( contracts[contract_name]['transaction_hash'], ) assert receipt['blockNumber'] == contracts[contract_name]['block_number'], ( f'We have block_number {contracts[contract_name]["block_number"]} in the deployment ' f'info, but {receipt["blockNumber"]} in the transaction receipt from web3.' ) assert receipt['gasUsed'] == contracts[contract_name]['gas_cost'], ( f'We have gasUsed {contracts[contract_name]["gas_cost"]} in the deployment info, ' f'but {receipt["gasUsed"]} in the transaction receipt from web3.' ) assert receipt['contractAddress'] == contracts[contract_name]['address'], ( f'We have contractAddress {contracts[contract_name]["address"]} in the deployment info' f' but {receipt["contractAddress"]} in the transaction receipt from web3.' ) # Check the contract version version = contract_instance.functions.contract_version().call() assert version == deployment_data['contracts_version'], \ f'got {version} expected {deployment_data["contracts_version"]}.' \ f'contract_manager has contracts_version {self.contract_manager.contracts_version}' return contract_instance, contracts[contract_name]['constructor_arguments']
def _verify_deployed_contract( self, deployment_data: DeployedContracts, contract_name: str, ) -> Contract
Verify deployment info against the chain Verifies: - the runtime bytecode - precompiled data against the chain - information stored in deployment_*.json against the chain, except for the constructor arguments, which have to be checked separately. Returns: (onchain_instance, constructor_arguments)
2.447922
2.361465
1.036612
if version is None: return _BASE.joinpath('data') return _BASE.joinpath(f'data_{version}')
def contracts_data_path(version: Optional[str] = None)
Returns the deployment data directory for a version.
5.269322
4.082812
1.290611
data_path = contracts_data_path(version) return data_path.joinpath('contracts.json')
def contracts_precompiled_path(version: Optional[str] = None) -> Path
Returns the path of JSON file where the bytecode can be found.
5.844429
4.136945
1.412741
data_path = contracts_data_path(version) chain_name = ID_TO_NETWORKNAME[chain_id] if chain_id in ID_TO_NETWORKNAME else 'private_net' return data_path.joinpath(f'deployment_{"services_" if services else ""}{chain_name}.json')
def contracts_deployed_path( chain_id: int, version: Optional[str] = None, services: bool = False, )
Returns the path of the deplolyment data JSON file.
5.190479
4.039053
1.285073
if not dict1: return dict2 if not dict2: return dict1 common_contracts: Dict[str, DeployedContract] = deepcopy(dict1['contracts']) assert not common_contracts.keys() & dict2['contracts'].keys() common_contracts.update(dict2['contracts']) assert dict2['chain_id'] == dict1['chain_id'] assert dict2['contracts_version'] == dict1['contracts_version'] return { 'contracts': common_contracts, 'chain_id': dict1['chain_id'], 'contracts_version': dict1['contracts_version'], }
def merge_deployment_data(dict1: DeployedContracts, dict2: DeployedContracts) -> DeployedContracts
Take contents of two deployment JSON files and merge them The dictionary under 'contracts' key will be merged. The 'contracts' contents from different JSON files must not overlap. The contents under other keys must be identical.
2.218401
2.104199
1.054274
if module not in DeploymentModule: raise ValueError(f'Unknown module {module} given to get_contracts_deployment_info()') def module_chosen(to_be_added: DeploymentModule): return module == to_be_added or module == DeploymentModule.ALL files: List[Path] = [] if module_chosen(DeploymentModule.RAIDEN): files.append(contracts_deployed_path( chain_id=chain_id, version=version, services=False, )) if module == DeploymentModule.SERVICES and not version_provides_services(version): raise ValueError( f'SERVICES module queried for version {version}, but {version} ' 'does not provide service contracts.', ) if module_chosen(DeploymentModule.SERVICES) and version_provides_services(version): files.append(contracts_deployed_path( chain_id=chain_id, version=version, services=True, )) deployment_data: DeployedContracts = {} # type: ignore for f in files: deployment_data = merge_deployment_data( deployment_data, _load_json_from_path(f), ) if not deployment_data: deployment_data = None return deployment_data
def get_contracts_deployment_info( chain_id: int, version: Optional[str] = None, module: DeploymentModule = DeploymentModule.ALL, ) -> Optional[DeployedContracts]
Reads the deployment data. Returns None if the file is not found. Parameter: module The name of the module. ALL means deployed contracts from all modules that are available for the version.
3.016042
3.019152
0.99897
assert self.contracts, 'ContractManager should have contracts compiled' return self.contracts[contract_name]
def get_contract(self, contract_name: str) -> Dict
Return ABI, BIN of the given contract.
14.19904
10.092537
1.406885
assert self.contracts, 'ContractManager should have contracts compiled' return self.contracts[contract_name]['abi']
def get_contract_abi(self, contract_name: str) -> Dict
Returns the ABI for a given contract.
12.887892
9.477735
1.359807
# Import locally to avoid web3 dependency during installation via `compile_contracts` from web3.utils.contracts import find_matching_event_abi assert self.contracts, 'ContractManager should have contracts compiled' contract_abi = self.get_contract_abi(contract_name) return find_matching_event_abi( abi=contract_abi, event_name=event_name, )
def get_event_abi(self, contract_name: str, event_name: str) -> Dict
Returns the ABI for a given event.
5.591491
4.987427
1.121117
m = hashlib.md5() s = s.encode('utf-8') + salt.encode('utf-8') if salt is not None else s.encode('utf-8') m.update(s) result = m.hexdigest() return result
def string(s, salt=None)
获取一个字符串的 MD5 值 :param: * s: (string) 需要进行 hash 的字符串 * salt: (string) 随机字符串,默认为 None :return: * result: (string) 32 位小写 MD5 值
2.10156
2.108613
0.996655
m = hashlib.md5() with open(filename, 'rb') as f: m.update(f.read()) result = m.hexdigest() return result
def file(filename)
获取一个文件的 MD5 值 :param: * filename: (string) 需要进行 hash 的文件名 :return: * result: (string) 32位小写 MD5 值
2.541613
2.357653
1.078027
md5 = hashlib.md5() with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(8192), b''): md5.update(chunk) result = md5.hexdigest() return result
def big_file(filename)
获取一个大文件的 MD5 值 :param: * filename: (string) 需要进行 hash 的大文件路径 :return: * result: (string) 32位小写 MD5 值
2.172087
1.933444
1.123429
hmac_md5 = hmac.new(salt.encode('utf-8'), s.encode('utf-8'), digestmod=hashlib.md5).hexdigest() return hmac_md5
def hmac_md5(s, salt)
获取一个字符串的 使用 salt 加密的 hmac MD5 值 :param: * s: (string) 需要进行 hash 的字符串 * salt: (string) 随机字符串 :return: * result: (string) 32位小写 MD5 值
2.25082
3.149797
0.714592
hashed_str = hmac.new(secret.encode('utf-8'), message.encode('utf-8'), digestmod=hashlib.sha256).hexdigest() return hashed_str
def hmac_sha256(secret, message)
获取一个字符串的在密钥 secret 加密下的 sha256 哈希值 :param: * secret: (string) 哈希算法的密钥 * message: (string) 需要进行哈希的字符串 :return: * hashed_str: sha256 算法哈希值
2.219505
2.748258
0.807605
hashlib_sha256 = hashlib.sha256() hashlib_sha256.update(message.encode('utf-8')) hashed_str = hashlib_sha256.hexdigest() return hashed_str
def hashlib_sha256(message)
获取一个字符串的 sha256 哈希值 :param: * message: (string) 需要进行哈希的字符串 :return: * hashed_str: sha256 算法哈希值
1.958704
2.188879
0.894843
if not all([isinstance(min_length, int), isinstance(max_length, int)]): raise ValueError('min_length and max_length should be int, but we got {} and {}'. format(type(min_length), type(max_length))) if min_length > max_length: raise ValueError('min_length should less than or equal to max_length') # 避免随机源为空 if not any([has_letter, has_digit, has_punctuation]): raise ValueError('At least one value is True in has_letter, has_digit and has_punctuation') random_str_len = random.randint(min_length, max_length) random_source = '' random_source += string.ascii_letters if has_letter else '' random_source += string.digits if has_digit else '' random_source += string.punctuation if has_punctuation else '' # 避免出现 ValueError: Sample larger than population or is negative if random_str_len > len(random_source): random_source *= (random_str_len // len(random_source) + 1) mid_random_str = ''.join(random.sample(random_source, random_str_len)) prefix = prefix if prefix else '' suffix = suffix if suffix else '' random_str = ''.join([prefix, mid_random_str, suffix]) return random_str
def gen_random_str(min_length, max_length, prefix=None, suffix=None, has_letter=True, has_digit=False, has_punctuation=False)
指定一个前后缀、字符串长度以及字符串包含字符类型,返回随机生成带有前后缀及指定长度的字符串 :param: * min_length: (int) 字符串最小长度 * max_length: (int) 字符串最小长度 * prefix: (string) 字符串前缀 * suffix: (string) 字符串后缀 * has_letter: (bool) 字符串时候包含字母,默认为 True * has_digit: (bool) 字符串是否包含数字,默认为 False * has_punctuation: (bool) 字符串是否包含标点符号,默认为 False :return: * random_str: (string) 指定规则的随机字符串 举例如下:: print('--- gen_random_str demo ---') print(gen_random_str(5, 7)) print(gen_random_str(5, 7, prefix='FISHBASE_')) print(gen_random_str(5, 7, prefix='FISHBASE_', suffix='.py')) print(gen_random_str(5, 7, has_digit=True, has_punctuation=True)) print(gen_random_str(5, 7, prefix='FISHBASE_', has_digit=True, has_punctuation=True)) print('---') 执行结果:: --- gen_string_by_range demo --- q4uo6E8 FISHBASE_8uCBEUH FISHBASE_D4wRX2.py FISHBASE_65nqlNs FISHBASE_3"uFm$s ---
2.203274
2.266248
0.972212
prefix_list = ["13", "1400", "1410", "1440", "145", "146", "147", "148", "15", "162", "165", "166", "167", "170", "171", "172", "173", "175", "176", "177", "178", "1740", "18", "191", "198", "199"] prefix_str = random.choice(prefix_list) return prefix_str + "".join(random.choice("0123456789") for _ in range(11 - len(prefix_str)))
def gen_random_mobile()
随机生成一个手机号 :return: * str: (string) 手机号 举例如下:: print('--- gen_random_mobile demo ---') print(gen_random_mobile()) print(gen_random_mobile()) print('---') 执行结果:: --- gen_random_mobile demo --- 16706146773 14402633925 ---
2.255926
2.409703
0.936184
if not (isinstance(minimum, float) and isinstance(maximum, float)): raise ValueError('param minimum, maximum should be float, but got minimum: {} maximum: {}'. format(type(minimum), type(maximum))) if not isinstance(decimals, int): raise ValueError('param decimals should be a int, but we got {}'.format(type(decimals))) # 精度目前只支持最大 15 位 decimals = 15 if decimals > 15 else decimals # 存在 round 之后四舍五入之后,精度不匹配的情况,新加判断 while True: random_float = random.uniform(minimum, maximum) random_float = round(random_float, decimals) if len(str(random_float).split('.')[-1]) == decimals: return random_float
def gen_random_float(minimum, maximum, decimals=2)
指定一个浮点数范围,随机生成并返回区间内的一个浮点数,区间为闭区间 受限于 random.random 精度限制,支持最大 15 位精度 :param: * minimum: (float) 浮点数最小取值 * maximum: (float) 浮点数最大取值 * decimals: (int) 小数位数,默认为 2 位 :return: * random_float: (float) 随机浮点数 举例如下:: print('--- gen_random_float demo ---') print(gen_random_float(1.0, 9.0)) print(gen_random_float(1.0, 9.0, decimals=10)) print(gen_random_float(1.0, 9.0, decimals=20)) print('---') 执行结果:: --- gen_random_float demo --- 6.08 6.8187342239 2.137902497554043 ---
3.623883
3.526966
1.027479
# 获取省份下的地区信息 province = str(zone)[:2] areanote_list = IdCard.get_areanote_info(province) # 选出省份名称 province_name_list = [item for item in areanote_list if item[0] == str(zone)] if not (areanote_list and province_name_list): raise ValueError('zone error, please check and try again') # 只选取下辖区域 areanote_list.remove(province_name_list[0]) province_name = province_name_list[0][-1] random_areanote = random.choice(areanote_list) full_areanote = random_areanote[-1] return full_areanote.split(province_name)[-1]
def get_random_areanote(zone)
省份行政区划代码,返回下辖的随机地区名称 :param: * zone: (string) 省份行政区划代码 比如 '310000' :returns: * random_areanote: (string) 省份下辖随机地区名称 举例如下:: print('--- fish_data get_random_areanote demo ---') print(cardbin_get_bank_by_name(310000)) print('---') 输出结果:: --- fish_data get_random_areanote demo --- 徐汇区 ---
3.601374
3.39799
1.059854
bank_info = CardBin.get_bank_info(bankname) if not bank_info: raise ValueError('bankname {} error, check and try again'.format(bankname)) # 获取银行代码 bank = bank_info[0][0] # 获取 cardbin cardbin_info = CardBin.get_cardbin_info(bank, card_type) if not cardbin_info: raise ValueError('card_type {} error, check and try again'.format(card_type)) random_cardbin = random.choice(cardbin_info) cardbin = random_cardbin[0] card_len = random_cardbin[-1] # 银行卡前缀 card_number_str = cardbin # 随机生成前N-1位 while len(card_number_str) < card_len - 1: card_number_str += str(random.randint(0, 9)) # 获取校验位 check_code = CardBin.get_checkcode(card_number_str) return card_number_str + check_code
def gen_random_bank_card(bankname, card_type)
通过指定的银行名称,随机生成该银行的卡号 :param: * bankname: (string) 银行名称 eg. 中国银行 * card_type:(string) 卡种类,可选 CC(信用卡)、DC(借记卡) :returns: * random_bank_card: (string) 随机生成的银行卡卡号 举例如下:: print('--- gen_random_bank_card demo ---') print(gen_bank_card('中国银行', 'CC')) print(gen_bank_card('中国银行', 'DC')) print('---') 输出结果:: --- gen_random_bank_card demo --- 6259073791134721 6212836989522229131 ---
2.609131
2.78311
0.937487
default_log_file = 'default.log' _formatter = logging.Formatter( '%(asctime)s %(levelname)s %(filename)s[ln:%(lineno)d] %(message)s') if local_file is not None: default_log_file = local_file # time rotating file handler # _tfh = TimedRotatingFileHandler(default_log_file, when="midnight") _tfh = SafeFileHandler(filename=default_log_file) _tfh.setLevel(logging.INFO) _tfh.setFormatter(_formatter) logger.setLevel(logging.INFO) logger.addHandler(_tfh)
def set_log_file(local_file=None)
设置日志记录,按照每天一个文件,记录包括 info 以及以上级别的内容; 日志格式采取日志文件名直接加上日期,比如 fish_test.log.2018-05-27 :param: * local_fie: (string) 日志文件名 :return: 无 举例如下:: from fishbase.fish_logger import * from fishbase.fish_file import * log_abs_filename = get_abs_filename_with_sub_path('log', 'fish_test.log')[1] set_log_file(log_abs_filename) logger.info('test fish base log') logger.warn('test fish base log') logger.error('test fish base log') print('log ok')
2.596611
2.707378
0.959087
_formatter = logging.Formatter( '%(asctime)s %(levelname)s %(filename)s[ln:%(lineno)d] %(message)s') logger.setLevel(logging.INFO) stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(_formatter) logger.addHandler(stdout_handler)
def set_log_stdout()
设置输出到标准输出中 :param: 无 :return: 无 举例如下:: from fishbase.fish_logger import * set_log_stdout() logger.info('test fish base log') logger.warn('test fish base log') logger.error('test fish base log') print('log ok')
2.125784
2.281722
0.931658
try: if self.check_base_filename(record): self.build_base_filename() FileHandler.emit(self, record) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
def emit(self, record)
Emit a record. Always check time
3.437655
3.674381
0.935574
time_tuple = time.localtime() if self.suffix_time != time.strftime(self.suffix, time_tuple) or not os.path.exists( self.baseFilename + '.' + self.suffix_time): return 1 else: return 0
def check_base_filename(self, record)
Determine if builder should occur. record is not used, as we are just comparing times, but it is needed so the method signatures are the same
4.924116
4.084382
1.205596
if self.stream: self.stream.close() self.stream = None # remove old suffix if self.suffix_time != "": index = self.baseFilename.find("." + self.suffix_time) if index == -1: index = self.baseFilename.rfind(".") self.baseFilename = self.baseFilename[:index] # add new suffix current_time_tuple = time.localtime() self.suffix_time = time.strftime(self.suffix, current_time_tuple) self.baseFilename = self.baseFilename + "." + self.suffix_time self.mode = 'a' if not self.delay: self.stream = self._open()
def build_base_filename(self)
do builder; in this case, old time stamp is removed from filename and a new time stamp is append to the filename
2.647057
2.462615
1.074897
if isinstance(dates, str) and dates.isdigit(): y = dates[:4] m = dates[4:] if (len(y) != 4) or (not 1 <= int(m) <= 12): raise (ValueError("date must be a date string like '201806', but get {}".format(dates))) elif hasattr(dates, 'year') and hasattr(dates, 'month'): y = str(dates.year) m = str(dates.month) else: raise (TypeError("date except a years string like '201806' or a object has 'year' " "and 'month' attribute, but get a {}".format(type(dates)))) # set month to length 2 if month less than 10 m = '0'+m if len(m) != 2 else m mr = calendar.monthrange(int(y), int(m)) first_day = separator.join([y, m, '1']) last_day = separator.join([y, m, str(mr[1])]) return first_day, last_day
def get_date_range(dates, separator='-')
获取某个月的日期范围,返回该月第一天和最后一天的字符串表示 :param: * dates: (string 或者 datetime obj) 月份信息 * separator: (string) 分隔符,默认为 '-' :return: * first_day: (string) 该月份的第一天 * last_day: (string) 该月份的最后一天 举例如下:: print('--- get_date_range demo ---') now_time = datetime.now() print(get_date_range(now_time)) print(get_date_range('201802',separator='/')) print('---') 执行结果:: --- get_years demo --- ('2018-06-1', '2018-06-30') ('2018/02/1', '2018/02/28') ---
3.161908
3.064669
1.031729
if refer is None: refer = datetime.now() # 计算当前总月份数 try: months_count = refer.year * 12 + refer.month except Exception: raise TypeError('refer except {}, got an {}'.format(type(datetime.now()), type(refer))) # 计算结果总月分数 months_count += months y, m = divmod(months_count, 12) # 将m的值转换为1-12 if m == 0: y -= 1 m = 12 return ''.join(['%04d' % y, '%02d' % m])
def get_years(months=0, refer=None)
获取基准时月份增量的年月 :param: * months: (int) 月份增量,正数为往后年月,整数为往前年月 * refer: (datetime obj) datetime 对象,或者有 month 和 year 属性的实例,默认为当前时间 :return: * result: (string) 年月字符串 举例如下:: print('--- get_years demo ---') print(get_years(-5)) print(get_years(7, datetime.now())) print('---') 执行结果:: --- get_years demo --- 201801 201901 ---
4.048025
4.066048
0.995567
if not isinstance(start_time, int) or not isinstance(end_time, int): raise TypeError('start_time and end_time should be int, bu we got {0} and {1}'. format(type(start_time), type(end_time))) # 计算天数 time_diff = abs(end_time - start_time) days = (time_diff // (60*60*24)) # 计算小时数 remain = time_diff % (60*60*24) hours = (remain // (60*60)) # 计算分钟数 remain = remain % (60*60) minutes = (remain // 60) # 计算秒数 seconds = remain % 60 interval_dict = {"days": days, "hours": hours, "minutes": minutes, "seconds": seconds} return interval_dict
def get_time_interval(start_time, end_time)
获取两个unix时间戳之间的时间间隔 :param: * start_time: (int) 开始时间,unix 时间戳 * end_time: (int) 结束时间,unix 时间戳 :return: * interval_dict: (dict) 时间间隔字典 举例如下:: print('--- get_time_interval demo ---') import time start = int(time.time()) end = start - 98908 print(get_time_interval(end, start)) print('---') 执行结果:: --- get_time_interval demo --- {'days': 1, 'hours': 3, 'minutes': 28, 'seconds': 28} ---
1.925198
1.904182
1.011037
if not isinstance(timestamp, float) and not isinstance(timestamp, int): raise TypeError('timestamp should be a float or int, but we got {}'.format(type(timestamp))) date_type = datetime.fromtimestamp(timestamp) return date_type
def transform_unix_to_datetime(timestamp)
将 unix 时间戳转换成 datetime 类型 :param: * timestamp: (int) unix 时间戳 :return: * data_type: (datetime) datetime 类型实例 举例如下:: print('--- transform_unix_to_datetime demo ---') import time timestamp = int(time.time()) date_type = transform_unix_to_datetime(timestamp) print(type(date_type)) print(date_type) print('---') 执行结果:: --- transform_unix_to_datetime demo --- <class 'datetime.datetime'> 2018-08-22 19:48:03 ---
3.349747
3.848601
0.87038