code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
# renew the dashboard config self.submission_data["dashboard_config"] = self.dashboard.get_persistent_config() # write the submission data to the output file self._outputs["submission"].dump(self.submission_data, formatter="json", indent=4)
def dump_submission_data(self)
Dumps the current submission data to the submission file.
9.469769
9.254601
1.02325
task = self.task self._outputs = self.output() # create the job dashboard interface self.dashboard = task.create_job_dashboard() or NoJobDashboard() # read submission data and reset some values submitted = not task.ignore_submission and self._outputs["submission"].exists() if submitted: self.submission_data.update(self._outputs["submission"].load(formatter="json")) task.tasks_per_job = self.submission_data.tasks_per_job self.dashboard.apply_config(self.submission_data.dashboard_config) # when the branch outputs, i.e. the "collection" exists, just create dummy control outputs if "collection" in self._outputs and self._outputs["collection"].exists(): self.touch_control_outputs() # cancel jobs? elif self._cancel_jobs: if submitted: self.cancel() # cleanup jobs? elif self._cleanup_jobs: if submitted: self.cleanup() # submit and/or wait while polling else: # maybe set a tracking url tracking_url = self.dashboard.create_tracking_url() if tracking_url: task.set_tracking_url(tracking_url) # ensure the output directory exists if not submitted: self._outputs["submission"].parent.touch() # at this point, when the status file exists, it is considered outdated if "status" in self._outputs: self._outputs["status"].remove() try: # instantiate the configured job file factory, not kwargs yet self.job_file_factory = self.create_job_file_factory() # submit if not submitted: # set the initial list of unsubmitted jobs branches = sorted(task.branch_map.keys()) branch_chunks = list(iter_chunks(branches, task.tasks_per_job)) self.submission_data.unsubmitted_jobs = OrderedDict( (i + 1, branches) for i, branches in enumerate(branch_chunks) ) self.submit() # sleep once to give the job interface time to register the jobs post_submit_delay = self._get_task_attribute("post_submit_delay")() if post_submit_delay: time.sleep(post_submit_delay) # start status polling when a) no_poll is not set, or b) the jobs were already # submitted so that failed jobs are resubmitted after a single polling iteration if not task.no_poll or submitted: self.poll() finally: # in any event, cleanup the job file if self.job_file_factory: self.job_file_factory.cleanup_dir(force=False)
def run(self)
Actual run method that starts the processing of jobs and initiates the status polling, or performs job cancelling or cleaning, depending on the task parameters.
5.578483
5.39614
1.033791
task = self.task # get job ids from submission data job_ids = [ d["job_id"] for d in self.submission_data.jobs.values() if d["job_id"] not in (self.submission_data.dummy_job_id, None) ] if not job_ids: return # cancel jobs task.publish_message("going to cancel {} jobs".format(len(job_ids))) errors = self.job_manager.cancel_batch(job_ids) # print errors if errors: print("{} error(s) occured while cancelling {} job(s) of task {}:".format( len(errors), len(job_ids), task.task_id)) tmpl = " {}" for i, err in enumerate(errors): print(tmpl.format(err)) if i + 1 >= self.show_errors: remaining = len(errors) - self.show_errors if remaining > 0: print(" ... and {} more".format(remaining)) break # inform the dashboard for job_num, job_data in six.iteritems(self.submission_data.jobs): task.forward_dashboard_event(self.dashboard, job_data, "action.cancel", job_num)
def cancel(self)
Cancels running jobs. The job ids are read from the submission file which has to exist for obvious reasons.
3.64399
3.469815
1.050197
task = self.task # get job ids from submission data job_ids = [ d["job_id"] for d in self.submission_data.jobs.values() if d["job_id"] not in (self.submission_data.dummy_job_id, None) ] if not job_ids: return # cleanup jobs task.publish_message("going to cleanup {} jobs".format(len(job_ids))) errors = self.job_manager.cleanup_batch(job_ids) # print errors if errors: print("{} error(s) occured while cleaning up {} job(s) of task {}:".format( len(errors), len(job_ids), task.task_id)) tmpl = " {}" for i, err in enumerate(errors): print(tmpl.format(err)) if i + 1 >= self.show_errors: remaining = len(errors) - self.show_errors if remaining > 0: print(" ... and {} more".format(remaining)) break
def cleanup(self)
Cleans up jobs on the remote run location. The job ids are read from the submission file which has to exist for obvious reasons.
3.351878
3.160005
1.060719
task = self.task # create the parent directory self._outputs["submission"].parent.touch() # get all branch indexes and chunk them by tasks_per_job branch_chunks = list(iter_chunks(task.branch_map.keys(), task.tasks_per_job)) # submission output if not self._outputs["submission"].exists(): submission_data = self.submission_data.copy() # set dummy submission data submission_data.jobs.clear() for i, branches in enumerate(branch_chunks): job_num = i + 1 submission_data.jobs[job_num] = self.submission_data_cls.job_data(branches=branches) self._outputs["submission"].dump(submission_data, formatter="json", indent=4) # status output if "status" in self._outputs and not self._outputs["status"].exists(): status_data = self.status_data_cls() # set dummy status data for i, branches in enumerate(branch_chunks): job_num = i + 1 status_data.jobs[job_num] = self.status_data_cls.job_data( status=self.job_manager.FINISHED, code=0) self._outputs["status"].dump(status_data, formatter="json", indent=4)
def touch_control_outputs(self)
Creates and saves dummy submission and status files. This method is called in case the collection of branch task outputs exists.
3.806023
3.433909
1.108365
# possible events: # - action.submit # - action.cancel # - status.pending # - status.running # - status.finished # - status.retry # - status.failed # forward to dashboard in any event by default return dashboard.publish(job_data, event, job_num)
def forward_dashboard_event(self, dashboard, job_data, event, job_num)
Hook to preprocess and publish dashboard events. By default, every event is passed to the dashboard's :py:meth:`law.job.dashboard.BaseJobDashboard.publish` method unchanged.
4.866822
4.366227
1.114652
if mode not in ("r", "w"): raise Exception("unknown mode '{}', use r or w".format(mode)) # get additional arguments skip_copy = kwargs.pop("skip_copy", False) is_tmp = kwargs.pop("is_tmp", mode == "w") if mode == "r": if is_tmp: # create a temporary target tmp = self.__class__(is_tmp=self.ext(n=1) or True) # always copy self.copy_to_local(tmp) # yield the copy try: yield tmp finally: tmp.remove() else: # simply yield yield self else: # write mode if is_tmp: # create a temporary target tmp = self.__class__(is_tmp=self.ext(n=1) or True) # copy when existing if not skip_copy and self.exists(): self.copy_to_local(tmp) # yield the copy try: yield tmp # move back again if tmp.exists(): tmp.move_to_local(self, dir_perm=parent_perm) self.chmod(perm) else: logger.warning("cannot move non-existing localized file target {!r}".format( self)) finally: tmp.remove() else: # create the parent dir self.parent.touch(perm=parent_perm) # simply yield yield self if self.exists(): self.chmod(perm)
def localize(self, mode="r", perm=None, parent_perm=None, **kwargs)
localize(mode="r", perm=None, parent_perm=None, skip_copy=False, is_tmp=None, **kwargs)
3.507147
3.229708
1.085902
parser = sub_parsers.add_parser("index", prog="law index", description="Create or update the" " (human-readable) law task index file ({}). This is only required for the shell" " auto-completion.".format(Config.instance().get("core", "index_file"))) parser.add_argument("--modules", "-m", nargs="+", help="additional modules to traverse") parser.add_argument("--no-externals", "-e", action="store_true", help="skip external tasks") parser.add_argument("--remove", "-r", action="store_true", help="remove the index file and" " exit") parser.add_argument("--location", "-l", action="store_true", help="print the location of the" " index file and exit") parser.add_argument("--verbose", "-v", action="store_true", help="verbose output")
def setup_parser(sub_parsers)
Sets up the command line parser for the *index* subprogram and adds it to *sub_parsers*.
4.221886
4.123981
1.02374
index_file = Config.instance().get_expanded("core", "index_file") # just print the file location? if args.location: print(index_file) return # just remove the index file? if args.remove: if os.path.exists(index_file): os.remove(index_file) print("removed index file {}".format(index_file)) return # get modules to lookup lookup = [m.strip() for m in Config.instance().keys("modules")] if args.modules: lookup += args.modules print("loading tasks from {} module(s)".format(len(lookup))) # loop through modules, import everything to load tasks for modid in lookup: if not modid: continue if args.verbose: sys.stdout.write("loading module '{}'".format(modid)) try: import_module(modid) except Exception as e: if not args.verbose: print("Error in module '{}': {}".format(colored(modid, "red"), str(e))) else: print("\n\nError in module '{}':".format(colored(modid, "red"))) traceback.print_exc() continue if args.verbose: print(", {}".format(colored("done", style="bright"))) # determine tasks to write into the index file seen_families = [] task_classes = [] lookup = [Task] while lookup: cls = lookup.pop(0) lookup.extend(cls.__subclasses__()) # skip already seen task families if cls.task_family in seen_families: continue seen_families.append(cls.task_family) # skip when explicitly excluded if cls.exclude_index: continue # skip external tasks is_external_task = issubclass(cls, ExternalTask) if args.no_externals and is_external_task: continue # skip non-external tasks without run implementation run_is_callable = callable(getattr(cls, "run", None)) run_is_abstract = getattr(cls.run, "__isabstractmethod__", False) if not is_external_task and (not run_is_callable or run_is_abstract): continue task_classes.append(cls) def get_task_params(cls): params = [] for attr in dir(cls): member = getattr(cls, attr) if isinstance(member, luigi.Parameter): exclude = getattr(cls, "exclude_params_index", set()) if not multi_match(attr, exclude, any): params.append(attr.replace("_", "-")) return params def index_line(cls, params): # format: "module_id:task_family:param param ..." return "{}:{}:{}".format(cls.__module__, cls.task_family, " ".join(params)) stats = OrderedDict() # write the index file if not os.path.exists(os.path.dirname(index_file)): os.makedirs(os.path.dirname(index_file)) with open(index_file, "w") as f: for cls in task_classes: # get prams params = get_task_params(cls) # fill stats if cls.__module__ not in stats: stats[cls.__module__] = [] stats[cls.__module__].append((cls.task_family, params)) f.write(index_line(cls, params) + "\n") # print stats if args.verbose: for mod, data in six.iteritems(stats): print("\nmodule '{}', {} task(s):".format(colored(mod, style="bright"), len(data))) for task_family, _ in data: print(" - {}".format(colored(task_family, "green"))) print("") print("written {} task(s) to index file '{}'".format(len(task_classes), index_file))
def execute(args)
Executes the *index* subprogram with parsed commandline *args*.
2.890278
2.880226
1.00349
params = [] for cls in luigi.task.Config.__subclasses__(): if config_names and cls.__name__ not in config_names: continue for attr in dir(cls): param = getattr(cls, attr) if not isinstance(param, luigi.Parameter): continue full_name = attr.replace("_", "-") if getattr(cls, "use_cmdline_section", True): full_name = "{}-{}".format(cls.__name__.replace("_", "-"), full_name) params.append((cls, param, attr, full_name)) return params
def get_global_parameters(config_names=("core", "scheduler", "worker", "retcode"))
Returns a list of global, luigi-internal configuration parameters. Each list item is a 4-tuple containing the configuration class, the parameter instance, the parameter name, and the full parameter name in the cli. When *config_names* is set, it should be a list of configuration class names that are exclusively taken into account.
3.273565
2.434988
1.344386
anchor = os.path.abspath(os.path.expandvars(os.path.expanduser(anchor))) if os.path.exists(anchor) and os.path.isfile(anchor): anchor = os.path.dirname(anchor) return os.path.normpath(os.path.join(anchor, *paths))
def rel_path(anchor, *paths)
Returns a path made of framgment *paths* relativ to an *anchor* path. When *anchor* is a file, its absolute directory is used instead.
1.930197
1.84798
1.04449
home = os.getenv("LAW_HOME", "$HOME/.law") home = os.path.expandvars(os.path.expanduser(home)) return os.path.normpath(os.path.join(home, *paths))
def law_home_path(*paths)
Returns the law home directory (``$LAW_HOME``) that defaults to ``"$HOME/.law"``, optionally joined with *paths*.
2.502676
2.189403
1.143086
sys.stderr.write(" ".join(str(arg) for arg in args) + "\n") if kwargs.get("flush", False): sys.stderr.flush()
def print_err(*args, **kwargs)
print_err(*args, flush=False) Same as *print*, but outputs to stderr. If *flush* is *True*, stderr is flushed after printing.
2.544551
2.17489
1.169967
if msg is not None: if exitcode in (None, 0): print(msg) else: print_err(msg) sys.exit(exitcode)
def abort(msg=None, exitcode=1)
Aborts the process (*sys.exit*) with an *exitcode*. If *msg* is not *None*, it is printed first to stdout if *exitcode* is 0 or *None*, and to stderr otherwise.
2.952052
2.904878
1.016239
try: if not force and not os.isatty(sys.stdout.fileno()): return msg except: return msg color = colors.get(color, colors["default"]) background = backgrounds.get(background, backgrounds["default"]) if not isinstance(style, (tuple, list, set)): style = (style,) style = ";".join(str(styles.get(s, styles["default"])) for s in style) return "\033[{};{};{}m{}\033[0m".format(style, background, color, msg)
def colored(msg, color=None, background=None, style=None, force=False)
Return the colored version of a string *msg*. For *color*, *background* and *style* options, see https://misc.flogisoft.com/bash/tip_colors_and_formatting. Unless *force* is *True*, the *msg* string is returned unchanged in case the output is not a tty.
2.33936
2.27722
1.027288
choices = _choices = [str(c) for c in choices] if lower: _choices = [c.lower() for c in choices] if default is not None: if default not in choices: raise Exception("default must be one of the choices") hints = [(choice if choice != default else choice + "*") for choice in choices] if descriptions is not None: if len(descriptions) != len(choices): raise ValueError("length of descriptions must match length of choices") hints = ["{}({})".format(*tpl) for tpl in zip(hints, descriptions)] msg += " [{}] ".format(", ".join(hints)) choice = None while choice not in _choices: if choice is not None: print("invalid choice: '{}'\n".format(choice)) choice = six.moves.input(msg) if default is not None and choice == "": choice = default if lower: choice = choice.lower() return choice
def query_choice(msg, choices, default=None, descriptions=None, lower=True)
Interactively query a choice from the prompt until the input matches one of the *choices*. The prompt can be configured using *msg* and *descriptions*, which, if set, must have the same length as *choices*. When *default* is not *None* it must be one of the choices and is used when the input is empty. When *lower* is *True*, the input is compared to the choices in lower case.
2.59811
2.683998
0.968
if not regex: return mode(fnmatch.fnmatch(name, pattern) for pattern in patterns) else: return mode(re.match(pattern, name) for pattern in patterns)
def multi_match(name, patterns, mode=any, regex=False)
Compares *name* to multiple *patterns* and returns *True* in case of at least one match (*mode* = *any*, the default), or in case all patterns matched (*mode* = *all*). Otherwise, *False* is returned. When *regex* is *True*, *re.match* is used instead of *fnmatch.fnmatch*.
2.072473
2.104476
0.984793
return isinstance(obj, (types.GeneratorType, collections.MappingView, six.moves.range, enumerate))
def is_lazy_iterable(obj)
Returns whether *obj* is iterable lazily, such as generators, range objects, etc.
9.744043
8.428419
1.156094
if isinstance(obj, list): return list(obj) elif is_lazy_iterable(obj): return list(obj) elif isinstance(obj, (tuple, set)) and cast: return list(obj) else: return [obj]
def make_list(obj, cast=True)
Converts an object *obj* to a list and returns it. Objects of types *tuple* and *set* are converted if *cast* is *True*. Otherwise, and for all other types, *obj* is put in a new list.
2.391965
2.670531
0.895689
if isinstance(obj, tuple): return tuple(obj) elif is_lazy_iterable(obj): return tuple(obj) elif isinstance(obj, (list, set)) and cast: return tuple(obj) else: return (obj,)
def make_tuple(obj, cast=True)
Converts an object *obj* to a tuple and returns it. Objects of types *list* and *set* are converted if *cast* is *True*. Otherwise, and for all other types, *obj* is put in a new tuple.
2.465888
2.675809
0.921549
if isinstance(struct, dict): return flatten(struct.values()) elif isinstance(struct, (list, tuple, set)) or is_lazy_iterable(struct): objs = [] for obj in struct: objs.extend(flatten(obj)) return objs else: return [struct]
def flatten(struct)
Flattens and returns a complex structured object *struct*.
2.524174
2.659056
0.949274
# get or infer the class cls = kwargs.get("cls", None) if cls is None: for d in dicts: if isinstance(d, dict): cls = d.__class__ break else: raise TypeError("cannot infer cls as none of the passed objects is of type dict") # start merging merged_dict = cls() for d in dicts: if isinstance(d, dict): merged_dict.update(d) return merged_dict
def merge_dicts(*dicts, **kwargs)
merge_dicts(*dicts, cls=None) Takes multiple *dicts* and returns a single merged dict. The merging takes place in order of the passed dicts and therefore, values of rear objects have precedence in case of field collisions. The class of the returned merged dict is configurable via *cls*. If it is *None*, the class is inferred from the first dict object in *dicts*.
3.114847
2.734601
1.13905
executable = lambda path: os.path.isfile(path) and os.access(path, os.X_OK) # prog can also be a path dirname, _ = os.path.split(prog) if dirname: if executable(prog): return prog elif "PATH" in os.environ: for search_path in os.environ["PATH"].split(os.pathsep): path = os.path.join(search_path.strip('"'), prog) if executable(path): return path return None
def which(prog)
Pythonic ``which`` implementation. Returns the path to an executable *prog* by searching in *PATH*, or *None* when it could not be found.
2.187331
2.209584
0.989929
# default callable if not callable(callback): def callback(i): print(msg.format(i + offset)) results = [] for i, obj in enumerate(seq): results.append(func(obj)) do_call = (start and i == 0) or (i + 1) % every == 0 if do_call: callback(i) else: if end and results and not do_call: callback(i) return results
def map_verbose(func, seq, msg="{}", every=25, start=True, end=True, offset=0, callback=None)
Same as the built-in map function but prints a *msg* after chunks of size *every* iterations. When *start* (*stop*) is *True*, the *msg* is also printed after the first (last) iteration. Note that *msg* is supposed to be a template string that will be formatted with the current iteration number (starting at 0) plus *offset* using ``str.format``. When *callback* is callable, it is invoked instead of the default print method with the current iteration number (without *offset*) as the only argument. Example: .. code-block:: python func = lambda x: x ** 2 msg = "computing square of {}" squares = map_verbose(func, range(7), msg, every=3) # -> # computing square of 0 # computing square of 2 # computing square of 5 # computing square of 6
3.101234
4.333978
0.715563
# interpret generators and views as lists if is_lazy_iterable(struct): struct = list(struct) valid_types = tuple() if map_dict: valid_types += (dict,) if isinstance(map_dict, int) and not isinstance(map_dict, bool): map_dict -= 1 if map_list: valid_types += (list,) if isinstance(map_list, int) and not isinstance(map_list, bool): map_list -= 1 if map_tuple: valid_types += (tuple,) if isinstance(map_tuple, int) and not isinstance(map_tuple, bool): map_tuple -= 1 if map_set: valid_types += (set,) if isinstance(map_set, int) and not isinstance(map_set, bool): map_set -= 1 # is an instance of cls? if cls is not None and isinstance(struct, cls): return func(struct) # traverse? elif isinstance(struct, valid_types): # create a new struct, treat tuples as lists for itertative item appending new_struct = struct.__class__() if not isinstance(struct, tuple) else [] # create type-dependent generator and addition callback if isinstance(struct, (list, tuple)): gen = enumerate(struct) add = lambda _, value: new_struct.append(value) elif isinstance(struct, set): gen = enumerate(struct) add = lambda _, value: new_struct.add(value) else: # dict gen = six.iteritems(struct) add = lambda key, value: new_struct.__setitem__(key, value) # recursively fill the new struct for key, value in gen: value = map_struct(func, value, cls=cls, map_dict=map_dict, map_list=map_list, map_tuple=map_tuple, map_set=map_set) add(key, value) # convert tuples if isinstance(struct, tuple): new_struct = struct.__class__(new_struct) return new_struct # when cls is set, just return elif cls is not None: return struct # apply func else: return func(struct)
def map_struct(func, struct, cls=None, map_dict=True, map_list=True, map_tuple=False, map_set=False)
Applies a function *func* to each value of a complex structured object *struct* and returns the output in the same structure. Example: .. code-block:: python struct = {"foo": [123, 456], "bar": [{"1": 1}, {"2": 2}]} def times_two(i): return i * 2 map_struct(struct, times_two) # -> {"foo": [246, 912], "bar": [{"1": 2}, {"2": 4}]} When *cls* is not *None*, it exclusively defines the class of objects that *func* is applied on. All other objects are unchanged. *map_dict*, *map_list*, *map_tuple* and *map_set* configure if objects of the respective types are traversed or mapped. The can be booleans or integer values that define the depth of that setting in the struct.
2.293435
2.32635
0.985851
# interpret lazy iterables lists if is_lazy_iterable(struct): struct = list(struct) # when mask is a bool, or struct is not a dict or sequence, apply the mask immediately if isinstance(mask, bool) or not isinstance(struct, (list, tuple, dict)): return struct if mask else replace # check list and tuple types elif isinstance(struct, (list, tuple)) and isinstance(mask, (list, tuple)): new_struct = [] for i, val in enumerate(struct): if i >= len(mask): new_struct.append(val) else: repl = replace if isinstance(replace, (list, tuple)) and len(replace) > i: repl = replace[i] val = mask_struct(mask[i], val, replace=repl) if val != no_value: new_struct.append(val) return struct.__class__(new_struct) if new_struct else replace # check dict types elif isinstance(struct, dict) and isinstance(mask, dict): new_struct = struct.__class__() for key, val in six.iteritems(struct): if key not in mask: new_struct[key] = val else: repl = replace if isinstance(replace, dict) and key in replace: repl = replace[key] val = mask_struct(mask[key], val, replace=repl) if val != no_value: new_struct[key] = val return new_struct or replace # when this point is reached, mask and struct have incompatible types raise TypeError("mask and struct must have the same type, got '{}' and '{}'".format(type(mask), type(struct)))
def mask_struct(mask, struct, replace=no_value)
Masks a complex structured object *struct* with a *mask* and returns the remaining values. When *replace* is set, masked values are replaced with that value instead of being removed. The *mask* can have a complex structure as well. Examples: .. code-block:: python struct = {"a": [1, 2], "b": [3, ["foo", "bar"]]} # simple example mask_struct({"a": [False, True], "b": False}, struct) # => {"a": [2]} # omitting mask information results in keeping values mask_struct({"a": [False, True]}, struct) # => {"a": [2], "b": [3, ["foo", "bar"]]}
2.367987
2.402762
0.985527
fileno, path = tempfile.mkstemp(*args, **kwargs) # create the file with open(path, "w") as f: f.write("") # yield it try: yield fileno, path finally: if os.path.exists(path): os.remove(path)
def tmp_file(*args, **kwargs)
Context manager that generates a temporary file, yields the file descriptor number and temporary path, and eventually removes the files. All *args* and *kwargs* are passed to :py:meth:`tempfile.mkstemp`.
2.734378
2.698531
1.013284
kwargs["preexec_fn"] = os.setsid p = subprocess.Popen(*args, **kwargs) try: out, err = p.communicate() except KeyboardInterrupt: os.killpg(os.getpgid(p.pid), signal.SIGTERM) raise if six.PY3: if out is not None: out = out.decode("utf-8") if err is not None: err = err.decode("utf-8") return p.returncode, out, err
def interruptable_popen(*args, **kwargs)
Shorthand to :py:class:`Popen` followed by :py:meth:`Popen.communicate`. All *args* and *kwargs* are forwatded to the :py:class:`Popen` constructor. The return code, standard output and standard error are returned in a tuple. The call :py:meth:`Popen.communicate` is interruptable by the user.
1.886663
2.035955
0.926672
# force pipes kwargs["stdout"] = subprocess.PIPE kwargs["stderr"] = subprocess.STDOUT p = subprocess.Popen(*args, **kwargs) for line in iter(lambda: p.stdout.readline(), ""): if six.PY3: line = line.decode("utf-8") yield line.rstrip() # yield the process itself in the end p.communicate() yield p
def readable_popen(*args, **kwargs)
Shorthand to :py:class:`Popen` which yields the output live line-by-line. All *args* and *kwargs* are forwatded to the :py:class:`Popen` constructor. When EOF is reached, ``communicate()`` is called on the subprocess and it is yielded. Example: .. code-block:: python for line in readable_popen(["some_executable", "--args"]): if isinstance(line, str): print(line) else: process = line if process.returncode != 0: raise Exception("complain ...")
3.602046
4.618535
0.779911
return getattr(hashlib, algo)(six.b(str(inp))).hexdigest()[:l]
def create_hash(inp, l=10, algo="sha256")
Takes an input *inp* and creates a hash based on an algorithm *algo*. For valid algorithms, see python's hashlib. *l* corresponds to the maximum length of the returned hash. Internally, the string representation of *inp* is used.
5.349353
7.024752
0.761501
shutil.copy(src, dst) perm = os.stat(dst).st_mode shutil.copystat(src, dst) os.chmod(dst, perm)
def copy_no_perm(src, dst)
Copies a file from *src* to *dst* including meta data except for permission bits.
2.168556
2.377381
0.912162
if not os.path.exists(path): if perm is None: os.makedirs(path) else: umask = os.umask(0) try: os.makedirs(path, perm) finally: os.umask(umask)
def makedirs_perm(path, perm=None)
Recursively creates directory up to *path*. If *perm* is set, the permissions of all newly created directories are set to its value.
1.707811
1.888043
0.90454
if uid is None: uid = os.getuid() path = os.path.expandvars(os.path.expanduser(path)) return os.stat(path).st_uid == uid
def user_owns_file(path, uid=None)
Returns whether a file located at *path* is owned by the user with *uid*. When *uid* is *None*, the user id of the current process is used.
2.332361
2.252729
1.035349
if isinstance(l, six.integer_types): l = six.moves.range(l) if is_lazy_iterable(l): if size < 1: yield list(l) else: chunk = [] for elem in l: if len(chunk) < size: chunk.append(elem) else: yield chunk chunk = [elem] else: if chunk: yield chunk else: if size < 1: yield l else: for i in six.moves.range(0, len(l), size): yield l[i:i + size]
def iter_chunks(l, size)
Returns a generator containing chunks of *size* of a list, integer or generator *l*. A *size* smaller than 1 results in no chunking at all.
2.079466
1.936986
1.073558
if n == 0: idx = 0 elif unit: idx = byte_units.index(unit) else: idx = int(math.floor(math.log(abs(n), 1024))) idx = min(idx, len(byte_units)) return n / 1024. ** idx, byte_units[idx]
def human_bytes(n, unit=None)
Takes a number of bytes *n*, assigns the best matching unit and returns the respective number and unit string in a tuple. When *unit* is set, that unit is used. Example: .. code-block:: python human_bytes(3407872) # -> (3.25, "MB") human_bytes(3407872, "kB") # -> (3328.0, "kB")
2.700724
2.89642
0.932435
secs = float(datetime.timedelta(*args, **kwargs).total_seconds()) parts = [] for unit, mul in time_units: if secs / mul >= 1 or mul == 1: if mul > 1: n = int(math.floor(secs / mul)) secs -= n * mul else: n = round(secs, 1) parts.append("{} {}{}".format(n, unit, "" if n == 1 else "s")) return ", ".join(parts)
def human_time_diff(*args, **kwargs)
Returns a human readable time difference. The largest unit is days. All *args* and *kwargs* are passed to ``datetime.timedelta``. Example: .. code-block:: python human_time_diff(seconds=1233) # -> "20 minutes, 33 seconds" human_time_diff(seconds=90001) # -> "1 day, 1 hour, 1 second"
2.443338
2.650469
0.921851
if six.PY3: return isinstance(e, FileExistsError) # noqa: F821 else: return isinstance(e, OSError) and e.errno == 17
def is_file_exists_error(e)
Returns whether the exception *e* was raised due to an already existing file or directory.
2.647515
2.313026
1.144611
try: server = smtplib.SMTP(smtp_host, smtp_port) except Exception as e: logger = logging.getLogger(__name__) logger.warning("cannot create SMTP server: {}".format(e)) return False header = "From: {}\r\nTo: {}\r\nSubject: {}\r\n\r\n".format(sender, recipient, subject) server.sendmail(sender, recipient, header + content) return True
def send_mail(recipient, sender, subject="", content="", smtp_host="127.0.0.1", smtp_port=25)
Lightweight mail functionality. Sends an mail from *sender* to *recipient* with *subject* and *content*. *smtp_host* and *smtp_port* are forwarded to the ``smtplib.SMTP`` constructor. *True* is returned on success, *False* otherwise.
2.05224
2.093166
0.980448
if six.PY3: return open(*args, **kwargs) else: f = io.open(*args, **kwargs) if f.encoding and f.encoding.lower().replace("-", "") == "utf8": write_orig = f.write def write(data, *args, **kwargs): u = unicode # noqa: F821 if not isinstance(data, u): data = u(data) return write_orig(data, *args, **kwargs) f.write = write return f
def open_compat(*args, **kwargs)
Polyfill for python's ``open`` factory, returning the plain ``open`` in python 3, and ``io.open`` in python 2 with a patched ``write`` method that internally handles unicode conversion of its first argument. All *args* and *kwargs* are forwarded.
2.865545
2.597176
1.103331
orig = getattr(obj, attr, no_value) try: setattr(obj, attr, value) yield obj finally: try: if orig is no_value: delattr(obj, attr) else: setattr(obj, attr, orig) except: pass
def patch_object(obj, attr, value)
Context manager that temporarily patches an object *obj* by replacing its attribute *attr* with *value*. The original value is set again when the context is closed.
2.60791
2.397929
1.087567
for consumer in self.consumers: if not getattr(consumer, "closed", False): consumer.flush()
def _flush(self)
Flushes all registered consumer streams.
5.689994
3.850061
1.477897
for consumer in self.consumers: consumer.write(*args, **kwargs)
def _write(self, *args, **kwargs)
Writes to all registered consumer streams, passing *args* and *kwargs*.
5.784438
2.844203
2.033764
if self.filter_fn(*args, **kwargs): self.stream.write(*args, **kwargs)
def _write(self, *args, **kwargs)
Writes to the consumer stream when *filter_fn* evaluates to *True*, passing *args* and *kwargs*.
5.95285
2.776548
2.143975
if self.has_section(section) and self.has_option(section, option): value = self.get(section, option) if isinstance(value, six.string_types): if expandvars: value = os.path.expandvars(value) if expanduser: value = os.path.expanduser(value) return value if not type else self._get_type_converter(type)(value) else: return default
def get_default(self, section, option, default=None, type=None, expandvars=False, expanduser=False)
Returns the config value defined by *section* and *option*. When either the section or the option does not exist, the *default* value is returned instead. When *type* is set, it must be either `"str"`, `"int"`, `"float"`, or `"boolean"`. When *expandvars* is *True*, environment variables are expanded. When *expanduser* is *True*, user variables are expanded as well.
1.792666
1.975633
0.907388
kwargs.setdefault("expandvars", True) kwargs.setdefault("expanduser", True) return self.get_default(*args, **kwargs)
def get_expanded(self, *args, **kwargs)
Same as :py:meth:`get_default`, but *expandvars* and *expanduser* arguments are set to *True* by default.
4.616214
2.082675
2.216483
if overwrite is not None: overwrite_sections = overwrite overwrite_options = overwrite for section, _data in six.iteritems(data): if not self.has_section(section): self.add_section(section) elif not overwrite_sections: continue for option, value in six.iteritems(_data): if overwrite_options or not self.has_option(section, option): self.set(section, option, str(value))
def update(self, data, overwrite=None, overwrite_sections=True, overwrite_options=True)
Updates the currently stored configuration with new *data*, given as a dictionary. When *overwrite_sections* is *False*, sections in *data* that are already present in the current config are skipped. When *overwrite_options* is *False*, existing options are not overwritten. When *overwrite* is not *None*, both *overwrite_sections* and *overwrite_options* are set to its value.
1.91058
1.882665
1.014828
p = self.__class__(filename, skip_defaults=True, skip_fallbacks=True) self.update(p._sections, *args, **kwargs)
def include(self, filename, *args, **kwargs)
Updates the current configc with the config found in *filename*. All *args* and *kwargs* are forwarded to :py:meth:`update`.
8.518606
6.80381
1.252035
return [key for key, _ in self.items(section) if (not prefix or key.startswith(prefix))]
def keys(self, section, prefix=None)
Returns all keys of a *section* in a list. When *prefix* is set, only keys starting with that prefix are returned
4.112992
3.895564
1.055814
prefix = "luigi_" lparser = luigi.configuration.LuigiConfigParser.instance() if push: for section in self.sections(): if not section.startswith(prefix): continue lsection = section[len(prefix):] if not lparser.has_section(lsection): lparser.add_section(lsection) for option in self.options(section): if not lparser.has_option(lsection, option): if expand: value = self.get_expanded(section, option) else: value = self.get(section, option) lparser.set(lsection, option, value) if pull: for lsection in lparser.sections(): section = prefix + lsection if not self.has_section(section): self.add_section(section) for option, value in lparser.items(lsection): self.set(section, option, value)
def sync_luigi_config(self, push=True, pull=True, expand=True)
Synchronizes sections starting with ``"luigi_"`` with the luigi configuration parser. First, when *push* is *True*, options that exist in law but **not** in luigi are stored as defaults in the luigi config. Then, when *pull* is *True*, all luigi-related options in the law config are overwritten with those from luigi. This way, options set via luigi defaults (environment variables, global configuration files, `LUIGI_CONFIG_PATH`) always have precendence. When *expand* is *True*, environment variables are expanded before pushing them to the luigi config.
1.82145
1.788811
1.018246
# test import import telegram # noqa: F401 cfg = Config.instance() # get default token and chat if not token: token = cfg.get_expanded("notifications", "telegram_token") if not chat: chat = cfg.get_expanded("notifications", "telegram_chat") if not token or not chat: logger.warning("cannot send Telegram notification, token ({}) or chat ({}) empty".format( token, chat)) return False # append the user to mention to the title # unless explicitly set to empty string mention_text = "" if mention_user is None: mention_user = cfg.get_expanded("notifications", "telegram_mention_user") if mention_user: mention_text = " (@{})".format(mention_user) # request data for the API call request = { "parse_mode": "Markdown", } # standard or attachment content? if isinstance(content, six.string_types): request["text"] = "{}{}\n\n{}".format(title, mention_text, content) else: # content is a dict, add some formatting request["text"] = "{}{}\n\n".format(title, mention_text) for key, value in content.items(): request["text"] += "_{}_: {}\n".format(key, value) # extend by arbitrary kwargs request.update(kwargs) # threaded, non-blocking API communication thread = threading.Thread(target=_notify_telegram, args=(token, chat, request)) thread.start() return True
def notify_telegram(title, content, token=None, chat=None, mention_user=None, **kwargs)
Sends a telegram notification and returns *True* on success. The communication with the telegram API might have some delays and is therefore handled by a thread.
3.311171
3.233746
1.023943
if self.task.local_workflow_require_branches: return self._has_run else: return super(LocalWorkflowProxy, self).complete()
def complete(self)
When *local_workflow_require_branches* of the task was set to *True*, returns whether the :py:meth:`run` method has been called before. Otherwise, the call is forwarded to the super class.
15.57869
3.668351
4.246783
if not self._has_yielded and not self.task.local_workflow_require_branches: self._has_yielded = True yield list(self.task.get_branch_tasks().values()) self._has_run = True
def run(self)
When *local_workflow_require_branches* of the task was set to *False*, starts all branch tasks via dynamic dependencies by yielding them in a list, or simply does nothing otherwise.
11.358304
4.178712
2.718135
def wrapper(decorator): @functools.wraps(decorator) def wrapper(fn=None, **opts): _opts = default_opts.copy() _opts.update(opts) def wrapper(fn): @functools.wraps(fn) def wrapper(*args, **kwargs): return decorator(fn, _opts, *args, **kwargs) return wrapper return wrapper if fn is None else wrapper(fn) return wrapper return wrapper
def factory(**default_opts)
Factory function to create decorators for tasks' run methods. Default options for the decorator function can be given in *default_opts*. The returned decorator can be used with or without function invocation. Example: .. code-block:: python @factory(digits=2) def runtime(fn, opts, task, *args, **kwargs): t0 = time.time() try: return fn(task, *args, **kwargs) finally: t1 = time.time() diff = round(t1 - t0, opts["digits"]) print("runtime:") print(diff) ... class MyTask(law.Task): @runtime def run(self): ... # or @runtime(digits=3): def run(self): ... .. note:: Decorators might not have the expected behavior when used to decorate generator functions such as ``Task.run()`` methods that yield dynamic dependencies.
2.25539
2.367523
0.952637
_task = get_task(task) log = get_param(_task.log_file, _task.default_log_file) if log == "-" or not log: return fn(task, *args, **kwargs) else: # use the local target functionality to create the parent directory LocalFileTarget(log).parent.touch() with open_compat(log, "a", 1) as f: tee = TeeStream(f, sys.__stdout__) sys.stdout = tee sys.stderr = tee try: ret = fn(task, *args, **kwargs) except: traceback.print_exc(file=tee) raise finally: sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ tee.flush() return ret
def log(fn, opts, task, *args, **kwargs)
log() Wraps a bound method of a task and redirects output of both stdout and stderr to the file defined by the tasks's *log_file* parameter or *default_log_file* attribute. If its value is ``"-"`` or *None*, the output is not redirected.
3.742958
3.393152
1.103092
try: return fn(task, *args, **kwargs) except Exception as e: if opts["skip"] is None or not isinstance(e, opts["skip"]): for outp in luigi.task.flatten(task.output()): outp.remove() raise
def safe_output(fn, opts, task, *args, **kwargs)
safe_output(skip=None) Wraps a bound method of a task and guards its execution. If an exception occurs, and it is not an instance of *skip*, the task's output is removed prior to the actual raising.
4.036779
2.990169
1.350017
if opts["stddev"] <= 0: t = opts["t"] elif opts["pdf"] == "gauss": t = random.gauss(opts["t"], opts["stddev"]) elif opts["pdf"] == "uniform": t = random.uniform(opts["t"], opts["stddev"]) else: raise ValueError("unknown delay decorator pdf '{}'".format(opts["pdf"])) time.sleep(t) return fn(task, *args, **kwargs)
def delay(fn, opts, task, *args, **kwargs)
delay(t=5, stddev=0., pdf="gauss") Wraps a bound method of a task and delays its execution by *t* seconds.
3.201557
2.272633
1.408744
_task = get_task(task) # get notification transports transports = [] for param_name, param in _task.get_params(): if isinstance(param, NotifyParameter) and getattr(_task, param_name): try: transport = param.get_transport() if transport: transports.append(transport) except Exception as e: logger.warning("get_transport() failed for '{}' parameter: {}".format( param_name, e)) # nothing to do when there is no transport if not transports: return fn(task, *args, **kwargs) # guard the fn call and gather infos error = None t0 = time.time() try: return fn(task, *args, **kwargs) except (Exception, KeyboardInterrupt) as e: error = e raise finally: success = error is None # do nothing on KeyboardInterrupt, or when on_success / on_failure do not match the status if isinstance(error, KeyboardInterrupt): return elif success and not opts["on_success"]: return elif not success and not opts["on_failure"]: return duration = human_time_diff(seconds=round(time.time() - t0, 1)) status_string = "succeeded" if success else "failed" title = "Task {} {}!".format(_task.get_task_family(), status_string) parts = collections.OrderedDict([ ("Host", socket.gethostname()), ("Duration", duration), ("Last message", "-" if not len(_task._message_cache) else _task._message_cache[-1]), ("Task", str(_task)), ]) if not success: parts["Traceback"] = traceback.format_exc() message = "\n".join("{}: {}".format(*tpl) for tpl in parts.items()) # dispatch via all transports for transport in transports: fn = transport["func"] raw = transport.get("raw", False) try: fn(success, title, parts.copy() if raw else message, **opts) except Exception as e: t = traceback.format_exc() logger.warning("notification failed via transport '{}': {}\n{}".format(fn, e, t))
def notify(fn, opts, task, *args, **kwargs)
notify(on_success=True, on_failure=True, **kwargs) Wraps a bound method of a task and guards its execution. Information about the execution (task name, duration, etc) is collected and dispatched to all notification transports registered on wrapped task via adding :py:class:`law.NotifyParameter` parameters. Example: .. code-block:: python class MyTask(law.Task): notify_mail = law.NotifyMailParameter() @notify # or @notify(sender="[email protected]", recipient="[email protected]") def run(self): ... When the *notify_mail* parameter is *True*, a notification is sent to the configured email address. Also see :ref:`config-notifications`.
3.568303
3.458509
1.031746
start_time = time.time() try: return fn(task, *args, **kwargs) finally: duration = human_time_diff(seconds=round(time.time() - start_time, 1)) # log timeit_logger = logger.getChild("timeit") timeit_logger.info("runtime of {}: {}".format(task.task_id, duration)) # optionally publish a task message to the scheduler if opts["publish_message"] and callable(getattr(task, "publish_message", None)): task.publish_message("runtime: {}".format(duration))
def timeit(fn, opts, task, *args, **kwargs)
Wraps a bound method of a task and logs its execution time in a human readable format. Logs in info mode. When *publish_message* is *True*, the duration is also published as a task message to the scheduler.
3.776324
3.093481
1.220736
out = _voms_proxy_info(["--identity"])[1].strip() try: return re.match(r".*\/CN\=([^\/]+).*", out.strip()).group(1) except: raise Exception("no valid identity found in voms proxy: {}".format(out))
def get_voms_proxy_user()
Returns the owner of the voms proxy.
6.390141
5.964752
1.071317
valid = _voms_proxy_info(["--exists"], silent=True)[0] == 0 if log and not valid: logger.warning("no valid voms proxy found") return valid
def check_voms_proxy_validity(log=False)
Returns *True* when a valid voms proxy exists, *False* otherwise. When *log* is *True*, a warning will be logged.
6.371104
5.813046
1.096001
with tmp_file() as (_, tmp): with open(tmp, "w") as f: f.write(passwd) cmd = "cat '{}' | voms-proxy-init --valid '{}'".format(tmp, lifetime) if vo: cmd += " -voms '{}'".format(vo) code, out, _ = interruptable_popen(cmd, shell=True, executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.STDOUT) if code != 0: raise Exception("proxy renewal failed: {}".format(out))
def renew_voms_proxy(passwd="", vo=None, lifetime="196:00")
Renews the voms proxy using a password *passwd*, an optional virtual organization name *vo*, and a default *lifetime* of 8 days. The password is written to a temporary file first and piped into the renewal commad to ensure it is not visible in the process list.
3.192312
3.255139
0.980699
# get the proxy file proxy_file = get_voms_proxy_file() if not os.path.exists(proxy_file): raise Exception("proxy file '{}' does not exist".format(proxy_file)) if cache: if isinstance(cache, six.string_types): cache_file = cache else: cache_file = proxy_file + "_delegation_cache.json" def remove_cache(): try: if os.path.exists(cache_file): os.remove(cache_file) except OSError: pass # create the hash of the proxy file content with open(proxy_file, "r") as f: proxy_hash = create_hash(f.read()) # already delegated? cache_data = {} if os.path.exists(cache_file): with open(cache_file, "r") as f: try: cache_data = json.load(f) except: remove_cache() # is the hash up-to-date? if cache_data.get("hash") != proxy_hash: remove_cache() cache_data = {} # proxy already delegated to that endpoint? elif endpoint in cache_data.get("ids", []): return str(cache_data["ids"][endpoint]) # do the actual delegation delegation_id = uuid.uuid4().hex cmd = ["glite-ce-delegate-proxy", "-e", endpoint, delegation_id] code = interruptable_popen(cmd, stdout=stdout, stderr=stderr)[0] if code != 0: raise Exception("glite proxy delegation to endpoint {} failed".format(endpoint)) if cache: # write the id back to the delegation file cache_data["hash"] = proxy_hash cache_data.setdefault("ids", {})[endpoint] = delegation_id with open(cache_file, "w") as f: json.dump(cache_data, f, indent=4) os.chmod(cache_file, 0o0600) return delegation_id
def delegate_voms_proxy_glite(endpoint, stdout=None, stderr=None, cache=True)
Delegates the voms proxy via gLite to an *endpoint*, e.g. ``grid-ce.physik.rwth-aachen.de:8443``. *stdout* and *stderr* are passed to the *Popen* constructor for executing the ``glite-ce-delegate-proxy`` command. When *cache* is *True*, a json file is created alongside the proxy file, which stores the delegation ids per endpoint. The next time the exact same proxy should be delegated to the same endpoint, the cached delegation id is returned.
2.346751
2.187577
1.072763
formatter = FormatterRegister.formatters.get(name) if formatter or silent: return formatter else: raise Exception("cannot find formatter '{}'".format(name))
def get_formatter(name, silent=False)
Returns the formatter class whose name attribute is *name*. When no class could be found and *silent* is *True*, *None* is returned. Otherwise, an exception is raised.
4.518277
4.659156
0.969763
formatters = [f for f in six.itervalues(FormatterRegister.formatters) if f.accepts(path)] if formatters or silent: return formatters else: raise Exception("cannot find formatter for path '{}'".format(path))
def find_formatters(path, silent=True)
Returns a list of formatter classes which would accept the file given by *path*. When no classes could be found and *silent* is *True*, an empty list is returned. Otherwise, an exception is raised.
3.979155
3.800093
1.047121
if name == AUTO_FORMATTER: return find_formatters(path, silent=False)[0] else: return get_formatter(name, silent=False)
def find_formatter(name, path)
Returns the formatter class whose name attribute is *name* when *name* is not *AUTO_FORMATTER*. Otherwise, the first formatter that accepts *path* is returned. Internally, this method simply uses :py:func:`get_formatter` or :py:func:`find_formatters` depending on the value of *name*.
4.873771
3.326536
1.465119
encoded = base64.b64encode(six.b(" ".join(str(v) for v in value) or "-")) return encoded.decode("utf-8") if six.PY3 else encoded
def encode_list(cls, value)
Encodes a list *value* into a string via base64 encoding.
4.005278
3.575444
1.120218
return [ self.task_cls.__module__, self.task_cls.__name__, self.encode_list(self.task_params), self.encode_list(self.branches), self.encode_bool(self.auto_retry), self.encode_list(self.dashboard_data), ]
def get_args(self)
Returns the list of encoded job arguments. The order of this list corresponds to the arguments expected by the job wrapper script.
5.275692
4.656079
1.133076
for pkg in flatten(packages): if pkg in loaded_packages: logger.debug("skip contrib package '{}', already loaded".format(pkg)) continue loaded_packages.append(pkg) mod = __import__("law.contrib.{}".format(pkg), globals(), locals(), [pkg]) logger.debug("loaded contrib package '{}'".format(pkg)) for attr in mod.__all__: if hasattr(law, attr): logger.info("cannot register 'law.contrib.{0}.{1}' to 'law.{1}', " "already exists".format(pkg, attr)) else: setattr(law, attr, getattr(mod, attr)) law.__all__.append(attr) logger.debug("registered 'law.contrib.{0}.{1}' to 'law.{1}'".format(pkg, attr))
def load(*packages)
Loads contrib *packages* and adds members exposed in ``__all__`` to the law main module. Example: .. code-block:: python import law law.contrib.load("numpy") print(law.NumpyFormatter) # -> <class 'law.contrib.numpy.formatter.NumpyFormatter'> It is ensured that packages are loaded only once.
2.840257
2.670952
1.063388
cfg = Config.instance() if not recipient: recipient = cfg.get_expanded("notifications", "mail_recipient") if not sender: sender = cfg.get_expanded("notifications", "mail_sender") if not smtp_host: smtp_host = cfg.get_expanded("notifications", "mail_smtp_host") if not smtp_port: smtp_port = cfg.get_expanded("notifications", "mail_smtp_port") if not recipient or not sender: logger.warning("cannot send mail notification, recipient ({}) or sender ({}) empty".format( recipient, sender)) return False return send_mail(recipient, sender, title, message, smtp_host=smtp_host, smtp_port=smtp_port)
def notify_mail(title, message, recipient=None, sender=None, smtp_host=None, smtp_port=None, **kwargs)
Mail notification method taking a *title* and a string *message*. *recipient*, *sender*, *smtp_host* and *smtp_port* default to the configuration values in the [notifications] section.
1.94847
1.914434
1.017779
global _patched if _patched: return _patched = True patch_default_retcodes() patch_worker_run_task() patch_worker_factory() patch_keepalive_run() patch_cmdline_parser() logger.debug("applied law-specific luigi patches")
def patch_all()
Runs all patches. This function ensures that a second invocation has no effect.
11.502361
10.961604
1.049332
import luigi.retcodes retcode = luigi.retcodes.retcode retcode.already_running._default = 10 retcode.missing_data._default = 20 retcode.not_run._default = 30 retcode.task_failed._default = 40 retcode.scheduling_error._default = 50 retcode.unhandled_exception._default = 60
def patch_default_retcodes()
Sets the default luigi return codes in ``luigi.retcodes.retcode`` to: - already_running: 10 - missing_data: 20 - not_run: 30 - task_failed: 40 - scheduling_error: 50 - unhandled_exception: 60
3.558859
1.731402
2.055478
_run_task = luigi.worker.Worker._run_task def run_task(self, task_id): task = self._scheduled_tasks[task_id] task._worker_id = self._id task._worker_task = self._first_task try: _run_task(self, task_id) finally: task._worker_id = None task._worker_task = None # make worker disposable when sandboxed if os.getenv("LAW_SANDBOX_SWITCHED") == "1": self._start_phasing_out() luigi.worker.Worker._run_task = run_task
def patch_worker_run_task()
Patches the ``luigi.worker.Worker._run_task`` method to store the worker id and the id of its first task in the task. This information is required by the sandboxing mechanism
4.500735
4.022065
1.119011
def create_worker(self, scheduler, worker_processes, assistant=False): worker = luigi.worker.Worker(scheduler=scheduler, worker_processes=worker_processes, assistant=assistant, worker_id=os.getenv("LAW_SANDBOX_WORKER_ID")) worker._first_task = os.getenv("LAW_SANDBOX_WORKER_TASK") return worker luigi.interface._WorkerSchedulerFactory.create_worker = create_worker
def patch_worker_factory()
Patches the ``luigi.interface._WorkerSchedulerFactory`` to include sandboxing information when create a worker instance.
4.750007
3.634975
1.306751
_run = luigi.worker.KeepAliveThread.run def run(self): # do not run the keep-alive loop when sandboxed if os.getenv("LAW_SANDBOX_SWITCHED") == "1": self.stop() else: _run(self) luigi.worker.KeepAliveThread.run = run
def patch_keepalive_run()
Patches the ``luigi.worker.KeepAliveThread.run`` to immediately stop the keep-alive thread when running within a sandbox.
6.376058
4.328652
1.472989
# store original functions _init = luigi.cmdline_parser.CmdlineParser.__init__ # patch init def __init__(self, cmdline_args): _init(self, cmdline_args) self.cmdline_args = cmdline_args luigi.cmdline_parser.CmdlineParser.__init__ = __init__
def patch_cmdline_parser()
Patches the ``luigi.cmdline_parser.CmdlineParser`` to store the original command line arguments for later processing in the :py:class:`law.config.Config`.
3.314648
3.181491
1.041854
# test import import slackclient # noqa: F401 cfg = Config.instance() # get default token and channel if not token: token = cfg.get_expanded("notifications", "slack_token") if not channel: channel = cfg.get_expanded("notifications", "slack_channel") if not token or not channel: logger.warning("cannot send Slack notification, token ({}) or channel ({}) empty".format( token, channel)) return False # append the user to mention to the title # unless explicitly set to empty string mention_text = "" if mention_user is None: mention_user = cfg.get_expanded("notifications", "slack_mention_user") if mention_user: mention_text = " (@{})".format(mention_user) # request data for the API call request = { "channel": channel, "as_user": True, "parse": "full", } # standard or attachment content? if isinstance(content, six.string_types): request["text"] = "{}{}\n\n{}".format(title, mention_text, content) else: # content is a dict, send its data as an attachment request["text"] = "{} {}".format(title, mention_text) request["attachments"] = at = { "color": attachment_color, "fields": [], "fallback": "{}{}\n\n".format(title, mention_text), } # fill the attachment fields and extend the fallback for key, value in content.items(): at["fields"].append({ "title": key, "value": value, "short": len(value) <= short_threshold, }) at["fallback"] += "_{}_: {}\n".format(key, value) # extend by arbitrary kwargs request.update(kwargs) # threaded, non-blocking API communication thread = threading.Thread(target=_notify_slack, args=(token, request)) thread.start() return True
def notify_slack(title, content, attachment_color="#4bb543", short_threshold=40, token=None, channel=None, mention_user=None, **kwargs)
Sends a slack notification and returns *True* on success. The communication with the slack API might have some delays and is therefore handled by a thread. The format of the notification depends on *content*. If it is a string, a simple text notification is sent. Otherwise, it should be a dictionary whose fields are used to build a message attachment with two-column formatting.
3.192994
3.165342
1.008736
def wrapper(self, *args, **kwargs): if self.slow: time.sleep(random.randint(5, 15)) return func(self, *args, **kwargs) return wrapper
def maybe_wait(func)
Wrapper around run() methods that reads the *slow* flag to decide whether to wait some seconds for illustrative purposes. This is very straight forward, so no need for functools.wraps here.
2.730939
2.010565
1.358294
query_data = {} for line in out.strip().split("\n"): parts = line.split() if len(parts) < 6: continue job_id = parts[0] status_flag = parts[2] # map the status status = cls.map_status(status_flag) # save the result query_data[job_id] = cls.job_status_dict(job_id=job_id, status=status) return query_data
def parse_query_output(cls, out)
Example output to parse: 141914132 user_name DONE queue_name exec_host b63cee711a job_name Feb 8 14:54
3.252936
3.273132
0.99383
global console_handler # make sure logging is setup only once if console_handler: return # set the handler of the law root logger console_handler = logging.StreamHandler() console_handler.setFormatter(LogFormatter()) logging.getLogger("law").addHandler(console_handler) # set levels for all loggers for name, level in Config.instance().items("logging"): level = level.upper() if hasattr(logging, level): logger = logging.getLogger(name) logger.setLevel(getattr(logging, level)) logger.debug("registered logger with level '{}'".format(level))
def setup_logging()
Sets up the internal logging mechanism, i.e., it creates the :py:attr:`console_handler`, sets its formatting, and mounts on on the main ``"law"`` logger. It also sets the levels of all loggers that are given in the law config.
3.741879
2.948139
1.269234
# just print the file location? if args.location: print(Config.instance().config_file) return # every option below requires the name to be set if not args.name: abort("please give the name of the config in the format <section>[.<option>]") # removal if args.remove: abort("config removal not yet implemented") # setting if args.value: abort("config setting not yet implemented") # getting print(get_config(args.name, expand=args.expand))
def execute(args)
Executes the *config* subprogram with parsed commandline *args*.
6.494103
6.742909
0.963101
cfg = Config.instance() only_section = "." not in name # when only the section is given, print all keys if only_section: return "\n".join(cfg.keys(name)) else: section, option = name.split(".", 1) func = cfg.get_expanded if expand else cfg.get return func(section, option)
def get_config(name, expand=False)
Returns the config value that corresponds to *name*, which must have the format ``<section>[.<option>]``. When an option is given and *expand* is *True*, variables are expanded in the returned value.
5.239452
5.121593
1.023012
parser = sub_parsers.add_parser("run", prog="law run", description="Run a task with" " configurable parameters. See http://luigi.rtfd.io/en/stable/running_luigi.html for more" " info.") parser.add_argument("task_family", help="a task family registered in the task database file or" " a module and task class in the format <module>.<class>") parser.add_argument("parameter", nargs="*", help="task parameters")
def setup_parser(sub_parsers)
Sets up the command line parser for the *run* subprogram and adds it to *sub_parsers*.
6.617741
6.417169
1.031256
task_family = None error = None # try to infer the task module from the passed task family and import it parts = args.task_family.rsplit(".", 1) if len(parts) == 2: modid, cls_name = parts try: mod = __import__(modid, globals(), locals(), [cls_name]) if hasattr(mod, cls_name): task_cls = getattr(mod, cls_name) if not issubclass(task_cls, Task): abort("object '{}' is not a Task".format(args.task_family)) task_family = task_cls.task_family except ImportError as e: logger.warning("import error in module {}: {}".format(modid, e)) error = e # read task info from the index file and import it if task_family is None: index_file = Config.instance().get_expanded("core", "index_file") if os.path.exists(index_file): info = read_task_from_index(args.task_family, index_file) if not info: abort("task family '{}' not found in index".format(args.task_family)) modid, task_family, _ = info __import__(modid, globals(), locals()) # complain when no task could be found if task_family is None: if error: raise error else: abort("task '{}' not found".format(args.task_family)) # import the module and run luigi luigi_run([task_family] + sys.argv[3:])
def execute(args)
Executes the *run* subprogram with parsed commandline *args*.
3.249768
3.24475
1.001547
# read task information from the index file given a task family if index_file is None: index_file = Config.instance().get_expanded("core", "index_file") # open and go through lines with open(index_file, "r") as f: for line in f.readlines(): line = line.strip() if line.count(":") >= 2: modid, family, params = line.split(":", 2) if family == task_family: return modid, family, params return None
def read_task_from_index(task_family, index_file=None)
Returns module id, task family and space-separated parameters in a tuple for a task given by *task_family* from the *index_file*. When *None*, the *index_file* refers to the default as defined in :py:mod:`law.config`. Returns *None* when the task could not be found.
3.910975
3.581975
1.091849
# setup the main parser and sub parsers parser = ArgumentParser(prog="law", description="The law command line tool.") sub_parsers = parser.add_subparsers(help="subcommands", dest="command") # add main arguments parser.add_argument("--version", "-V", action="version", version=law.__version__) # setup all progs mods = {} for prog in progs: mods[prog] = import_module("law.cli." + prog) mods[prog].setup_parser(sub_parsers) # parse args and dispatch execution if len(sys.argv) >= 2 and sys.argv[1] in forward_progs: args = parser.parse_args(sys.argv[1:3]) else: args = parser.parse_args() if args.command: mods[args.command].execute(args) else: parser.print_help()
def run()
Entry point to the law cli. Sets up all parsers, parses all arguments, and executes the requested subprogram.
3.1058
2.848398
1.090367
@functools.wraps(func) def wrapper(self, job_data, event, job_num, *args, **kwargs): job_id = job_data["job_id"] dashboard_status = self.map_status(job_data.get("status"), event) # nothing to do when the status is invalid or did not change if not dashboard_status or self._last_states.get(job_id) == dashboard_status: return None # set the new status self._last_states[job_id] = dashboard_status return func(self, job_data, event, job_num, *args, **kwargs) return wrapper
def cache_by_status(func)
Decorator for :py:meth:`BaseJobDashboard.publish` (and inheriting classes) that caches the last published status to decide if the a new publication is necessary or not. When the status did not change since the last call, the actual publish method is not invoked and *None* is returned.
2.998933
2.919059
1.027363
parser = sub_parsers.add_parser("software", prog="law software", description="Create or update" " the law software cache ({}). This is only required for some sandboxes that need to" " forward software into containers.".format(get_sw_dir())) parser.add_argument("--remove", "-r", action="store_true", help="remove the software cache" " directory and exit") parser.add_argument("--location", "-l", action="store_true", help="print the location of the" " software cache directory and exit")
def setup_parser(sub_parsers)
Sets up the command line parser for the *software* subprogram and adds it to *sub_parsers*.
6.043494
5.604253
1.078376
sw_dir = get_sw_dir() # just print the cache location? if args.location: print(sw_dir) return # just remove the current software cache? if args.remove: remove_software_cache(sw_dir) return # rebuild the software cache build_software_cache(sw_dir)
def execute(args)
Executes the *software* subprogram with parsed commandline *args*.
4.521192
4.344383
1.040698
# ensure the cache is empty sw_dir = get_sw_dir(sw_dir) remove_software_cache(sw_dir) os.makedirs(sw_dir) # reload dependencies to find the proper module paths reload_dependencies(force=True) for mod in deps: path = os.path.dirname(mod.__file__) name, ext = os.path.splitext(os.path.basename(mod.__file__)) # single file or module? if name == "__init__": # copy the entire module name = os.path.basename(path) shutil.copytree(path, os.path.join(sw_dir, name)) else: shutil.copy2(os.path.join(path, name + ".py"), sw_dir)
def build_software_cache(sw_dir=None)
Builds up the software cache directory at *sw_dir* by simply copying all required python modules. *sw_dir* is evaluated with :py:func:`get_sw_dir`.
3.213517
3.121431
1.029501
sw_dir = get_sw_dir(sw_dir) if os.path.exists(sw_dir): shutil.rmtree(sw_dir)
def remove_software_cache(sw_dir=None)
Removes the software cache directory at *sw_dir* which is evaluated with :py:func:`get_sw_dir`.
2.222472
1.984263
1.120049
global _reloaded_deps if _reloaded_deps and not force: return _reloaded_deps = True for mod in deps: six.moves.reload_module(mod) logger.debug("reloaded module '{}'".format(mod))
def reload_dependencies(force=False)
Reloads all python modules that law depends on. Currently, this is just *luigi* and *six*. Unless *force* is *True*, multiple calls to this function will not have any effect.
3.820989
3.51325
1.087594
sw_dir = get_sw_dir(sw_dir) if os.path.exists(sw_dir): sys.path.insert(1, sw_dir) if reload_deps: reload_dependencies()
def use_software_cache(sw_dir=None, reload_deps=False)
Adjusts ``sys.path`` so that the cached software at *sw_dir* is used. *sw_dir* is evaluated with :py:func:`get_sw_dir`. When *reload_deps* is *True*, :py:func:`reload_dependencies` is invoked.
2.906712
2.971077
0.978336
if sw_dir is None: sw_dir = Config.instance().get("core", "software_dir") sw_dir = os.path.expandvars(os.path.expanduser(sw_dir)) return sw_dir
def get_sw_dir(sw_dir=None)
Returns the software directory defined in the ``config.software_dir`` config. When *sw_dir* is not *None*, it is expanded and returned instead.
2.981194
2.683127
1.111089
global guarded_tfile_cls if not guarded_tfile_cls: import ROOT class GuardedTFile(ROOT.TFile): def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): if self.IsOpen(): self.Close() guarded_tfile_cls = GuardedTFile return guarded_tfile_cls(*args, **kwargs)
def GuardedTFile(*args, **kwargs)
Factory function that lazily creates the guarded TFile class, and creates and returns an instance with all passed *args* and *kwargs*. This is required as we do not want to import ROOT in the global scope.
2.0948
1.918047
1.092153
@functools.wraps(func) def wrapper(self): return func(self.as_workflow()) return property(wrapper)
def workflow_property(func)
Decorator to declare a property that is stored only on a workflow but makes it also accessible from branch tasks. Internally, branch tasks are re-instantiated with ``branch=-1``, and its decorated property is invoked. You might want to use this decorator in case of a property that is common (and mutable) to a workflow and all its branch tasks, e.g. for static data. Example: .. code-block:: python class MyTask(Workflow): def __init__(self, *args, **kwargs): super(MyTask, self).__init__(*args, **kwargs) if self.is_workflow(): self._common_data = some_demanding_computation() @workflow_property def common_data(self): # this method is always called with *self* is the *workflow* return self._common_data
3.319103
6.313466
0.525718
def wrapper(func): _attr = attr or "_workflow_cached_" + func.__name__ @functools.wraps(func) def getter(self): wf = self.as_workflow() if not hasattr(wf, _attr): setattr(wf, _attr, func(wf)) return getattr(wf, _attr) _setter = None if setter: def _setter(self, value): wf = self.as_workflow() setattr(wf, _attr, value) _setter.__name__ = func.__name__ return property(fget=getter, fset=_setter) return wrapper if not func else wrapper(func)
def cached_workflow_property(func=None, attr=None, setter=True)
Decorator to declare an attribute that is stored only on a workflow and also cached for subsequent calls. Therefore, the decorated method is expected to (lazily) provide the value to cache. The resulting value is stored as ``_workflow_cached_<func.__name__>`` on the workflow, which can be overwritten by setting the *attr* argument. By default, a setter is provded to overwrite the cache value. Set *setter* to *False* to disable this feature. Example: .. code-block:: python class MyTask(Workflow): @cached_workflow_property def common_data(self): # this method is always called with *self* is the *workflow* return some_demanding_computation() @cached_workflow_property(attr="my_own_property", setter=False) def common_data2(self): return some_other_computation()
2.120768
2.432997
0.871669
if callable(self.task.workflow_complete): return self.task.workflow_complete() else: return super(BaseWorkflowProxy, self).complete()
def complete(self)
Custom completion check that invokes the task's *workflow_complete* if it is callable, or just does the default completion check otherwise.
5.955139
2.985456
1.994717
reqs = OrderedDict() reqs.update(self.task.workflow_requires()) return reqs
def requires(self)
Returns the default workflow requirements in an ordered dictionary, which is updated with the return value of the task's *workflow_requires* method.
11.437043
3.805487
3.005409
if self.task.target_collection_cls is not None: cls = self.task.target_collection_cls elif self.task.outputs_siblings: cls = SiblingFileCollection else: cls = TargetCollection targets = luigi.task.getpaths(self.task.get_branch_tasks()) collection = cls(targets, threshold=self.threshold(len(targets))) return OrderedDict([("collection", collection)])
def output(self)
Returns the default workflow outputs in an ordered dictionary. At the moment this is just the collection of outputs of the branch tasks, stored with the key ``"collection"``.
7.837573
5.627209
1.392799
if n is None: n = len(self.task.branch_map()) acceptance = self.task.acceptance return (acceptance * n) if acceptance <= 1 else acceptance
def threshold(self, n=None)
Returns the threshold number of tasks that need to be complete in order to consider the workflow as being complete itself. This takes into account the :py:attr:`law.BaseWorkflow.acceptance` parameter of the workflow. The threshold is passed to the :py:class:`law.TargetCollection` (or :py:class:`law.SiblingFileCollection`) within :py:meth:`output`. By default, the maximum number of tasks is taken from the length of the branch map. For performance purposes, you can set this value, *n*, directly.
9.969885
4.898302
2.035376