code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
def _pulse(): start_time = time.time() while True: data = json.dumps({"header": "pyblish-qml:server.pulse"}) if six.PY3: data = data.encode("ascii") try: self.popen.stdin.write(data + b"\n") self.popen.stdin.flush() except IOError: break # Send pulse every 5 seconds time.sleep(5.0 - ((time.time() - start_time) % 5.0)) thread = threading.Thread(target=_pulse) thread.daemon = True thread.start()
def _start_pulse(self)
Send pulse to child process Child process will run forever if parent process encounter such failure that not able to kill child process. This inform child process that server is still running and child process will auto kill itself after server stop sending pulse message.
3.380916
3.085513
1.095739
if aschild: print("Starting pyblish-qml") compat.main() app = Application(APP_PATH, targets) app.listen() print("Done, don't forget to call `show()`") return app.exec_() else: print("Starting pyblish-qml server..") service = ipc.service.MockService() if demo else ipc.service.Service() server = ipc.server.Server(service, targets=targets) proxy = ipc.server.Proxy(server) proxy.show(settings.to_dict()) server.listen() server.wait()
def main(demo=False, aschild=False, targets=[])
Start the Qt-runtime and show the window Arguments: aschild (bool, optional): Run as child of parent process
7.128738
7.441773
0.957935
if event.type() == QtCore.QEvent.Close: modifiers = self.app.queryKeyboardModifiers() shift_pressed = QtCore.Qt.ShiftModifier & modifiers states = self.app.controller.states if shift_pressed: print("Force quitted..") self.app.controller.host.emit("pyblishQmlCloseForced") event.accept() elif any(state in states for state in ("ready", "finished")): self.app.controller.host.emit("pyblishQmlClose") event.accept() else: print("Not ready, hold SHIFT to force an exit") event.ignore() return super(Window, self).event(event)
def event(self, event)
Allow GUI to be closed upon holding Shift
5.454304
5.037464
1.082748
window = self.window if client_settings: # Apply client-side settings settings.from_dict(client_settings) window.setWidth(client_settings["WindowSize"][0]) window.setHeight(client_settings["WindowSize"][1]) window.setTitle(client_settings["WindowTitle"]) window.setFramePosition( QtCore.QPoint( client_settings["WindowPosition"][0], client_settings["WindowPosition"][1] ) ) message = list() message.append("Settings: ") for key, value in settings.to_dict().items(): message.append(" %s = %s" % (key, value)) print("\n".join(message)) window.requestActivate() window.showNormal() # Work-around for window appearing behind # other windows upon being shown once hidden. previous_flags = window.flags() window.setFlags(previous_flags | QtCore.Qt.WindowStaysOnTopHint) window.setFlags(previous_flags) # Give statemachine enough time to boot up if not any(state in self.controller.states for state in ["ready", "finished"]): util.timer("ready") ready = QtTest.QSignalSpy(self.controller.ready) count = len(ready) ready.wait(1000) if len(ready) != count + 1: print("Warning: Could not enter ready state") util.timer_end("ready", "Awaited statemachine for %.2f ms") if client_settings: self.controller.data['autoValidate'] = client_settings.get('autoValidate', False) self.controller.data['autoPublish'] = client_settings.get('autoPublish', False) self.controller.show.emit() # Allow time for QML to initialise util.schedule(self.controller.reset, 500, channel="main")
def show(self, client_settings=None)
Display GUI Once the QML interface has been loaded, use this to display it. Arguments: port (int): Client asking to show GUI. client_settings (dict, optional): Visual settings, see settings.py
4.662523
4.716457
0.988565
previous_flags = self.window.flags() self.window.setFlags(previous_flags | QtCore.Qt.WindowStaysOnTopHint)
def inFocus(self)
Set GUI on-top flag
5.215304
4.726765
1.103356
def _listen(): while True: line = self.host.channels["parent"].get() payload = json.loads(line)["payload"] # We can't call methods directly, as we are running # in a thread. Instead, we emit signals that do the # job for us. signal = { "show": "shown", "hide": "hidden", "quit": "quitted", "publish": "published", "validate": "validated", "rise": "risen", "inFocus": "inFocused", "outFocus": "outFocused", }.get(payload["name"]) if not signal: print("'{name}' was unavailable.".format( **payload)) else: try: getattr(self, signal).emit( *payload.get("args", [])) except Exception: traceback.print_exc() thread = threading.Thread(target=_listen) thread.daemon = True thread.start()
def listen(self)
Listen on incoming messages from host TODO(marcus): We can't use this, as we are already listening on stdin through client.py. Do use this, we will have to find a way to receive multiple signals from the same stdin, and channel them to their corresponding source.
5.126724
4.597264
1.115169
obj = _defer(target, args, kwargs, callback) obj.finished.connect(lambda: _defer_cleanup(obj)) obj.start() _defer_threads.append(obj) return obj
def defer(target, args=None, kwargs=None, callback=None)
Perform operation in thread with callback Instances are cached until finished, at which point they are garbage collected. If we didn't do this, Python would step in and garbage collect the thread before having had time to finish, resulting in an exception. Arguments: target (callable): Method or function to call callback (callable, optional): Method or function to call once `target` has finished. Returns: None
4.08492
5.971524
0.684067
try: _jobs[channel].stop() except (AttributeError, KeyError): pass timer = QtCore.QTimer() timer.setSingleShot(True) timer.timeout.connect(func) timer.start(time) _jobs[channel] = timer
def schedule(func, time, channel="default")
Run `func` at a later `time` in a dedicated `channel` Given an arbitrary function, call this function after a given timeout. It will ensure that only one "job" is running within the given channel at any one time and cancel any currently running job if a new job is submitted before the timeout.
2.874473
2.918515
0.98491
result = "" for paragraph in text.split("\n\n"): result += " ".join(paragraph.split()) + "\n\n" result = result.rstrip("\n") # Remove last newlines # converting links to HTML pattern = r"(https?:\/\/(?:w{1,3}.)?[^\s]*?(?:\.[a-z]+)+)" pattern += r"(?![^<]*?(?:<\/\w+>|\/?>))" if re.search(pattern, result): html = r"<a href='\1'><font color='FF00CC'>\1</font></a>" result = re.sub(pattern, html, result) return result
def format_text(text)
Remove newlines, but preserve paragraphs
4.519508
4.127446
1.094989
# (NOTE) davidlatwe # Thanks to this answer # https://stackoverflow.com/questions/18740884 if len(args) == 0 or isinstance(args[0], types.FunctionType): args = [] @QtCore.pyqtSlot(*args) def slotdecorator(func): @wraps(func) def wrapper(*args, **kwargs): try: func(*args) except Exception: traceback.print_exc() return wrapper return slotdecorator
def SlotSentinel(*args)
Provides exception handling for all slots
5.683672
5.246748
1.083275
# Unicode if isinstance(data, six.text_type): return data.encode("utf-8") # Members of lists if isinstance(data, list): return [_byteify(item) for item in data] # Members of dicts if isinstance(data, dict): return { _byteify(key): _byteify(value) for key, value in data.items() } # Anything else, return the original form return data
def _byteify(data)
Convert unicode to bytes
2.5087
2.466756
1.017004
plugin = plugin.to_json() instance = instance.to_json() if instance is not None else None return self._dispatch("process", args=[plugin, instance, action])
def process(self, plugin, context, instance=None, action=None)
Transmit a `process` request to host Arguments: plugin (PluginProxy): Plug-in to process context (ContextProxy): Filtered context instance (InstanceProxy, optional): Instance to process action (str, optional): Action to process
4.682341
5.564596
0.841452
# This will give parent process 15 seconds to reset. self._kill = threading.Timer(15, lambda: os._exit(0)) self._kill.start()
def _self_destruct(self)
Auto quit exec if parent process failed
8.15798
6.284283
1.298156
def _listen(): for line in iter(sys.stdin.readline, b""): try: response = json.loads(line) except Exception as e: # The parent has passed on a message that # isn't formatted in any particular way. # This is likely a bug. raise e else: if response.get("header") == "pyblish-qml:popen.response": self.channels["response"].put(line) elif response.get("header") == "pyblish-qml:popen.parent": self.channels["parent"].put(line) elif response.get("header") == "pyblish-qml:server.pulse": self._kill.cancel() # reset timer self._self_destruct() else: # The parent has passed on a message that # is JSON, but not in any format we recognise. # This is likely a bug. raise Exception("Unhandled message " "passed to Popen, '%s'" % line) thread = threading.Thread(target=_listen) thread.daemon = True thread.start()
def _listen(self)
Listen for messages passed from parent This method distributes messages received via stdin to their corresponding channel. Based on the format of the incoming message, the message is forwarded to its corresponding channel to be processed by its corresponding handler.
5.072214
4.554266
1.113728
data = json.dumps( { "header": "pyblish-qml:popen.request", "payload": { "name": func, "args": args or list(), } } ) # This should never happen. Each request is immediately # responded to, always. If it isn't the next line will block. # If multiple responses were made, then this will fail. # Both scenarios are bugs. assert self.channels["response"].empty(), ( "There were pending messages in the response channel") sys.stdout.write(data + "\n") sys.stdout.flush() try: message = self.channels["response"].get() if six.PY3: response = json.loads(message) else: response = _byteify(json.loads(message, object_hook=_byteify)) except TypeError as e: raise e else: assert response["header"] == "pyblish-qml:popen.response", response return response["payload"]
def _dispatch(self, func, args=None)
Send message to parent process Arguments: func (str): Name of function for parent to call args (list, optional): Arguments passed to function when called
5.386047
5.321522
1.012125
process = None repair = None name = plugin["name"] + "Proxy" cls = type(name, (cls,), plugin) # Emulate function for name in ("process", "repair"): args = ", ".join(plugin["process"]["args"]) func = "def {name}({args}): pass".format(name=name, args=args) exec(func) cls.process = process cls.repair = repair cls.__orig__ = plugin return cls
def from_json(cls, plugin)
Build PluginProxy object from incoming dictionary Emulate a plug-in by providing access to attributes in the same way they are accessed using the remote object. This allows for it to be used by members of :mod:`pyblish.logic`.
5.489808
4.93743
1.111876
parent = kwargs.pop("parent", None) cls = type("Item", (AbstractItem,), kwargs.copy()) self = cls(parent) self.json = kwargs # Store as json for key, value in kwargs.items(): if hasattr(self, key): key = PropertyType.prefix + key setattr(self, key, value) return self
def Item(**kwargs)
Factory function for QAbstractListModel items Any class attributes are converted into pyqtProperties and must be declared with its type as value. Special keyword "parent" is not passed as object properties but instead passed to the QObject constructor. Usage: >>> item = Item(name="default name", ... age=5, ... alive=True) >>> assert item.name == "default name" >>> assert item.age == 5 >>> assert item.alive == True >>> >>> # Jsonifyable content >>> assert item.json == { ... "name": "default name", ... "age": 5, ... "alive": True ... }, item.json
5.597877
5.438179
1.029366
self.beginInsertRows(QtCore.QModelIndex(), self.rowCount(), self.rowCount()) item["parent"] = self item = Item(**item) self.items.append(item) self.endInsertRows() item.__datachanged__.connect(self._dataChanged) return item
def add_item(self, item)
Add new item to model Each keyword argument is passed to the :func:Item factory function.
4.507364
4.079167
1.104972
index = self.items.index(item) self.beginRemoveRows(QtCore.QModelIndex(), index, index) self.items.remove(item) self.endRemoveRows()
def remove_item(self, item)
Remove item from model
2.242289
1.980187
1.132362
index = self.items.index(item) qindex = self.createIndex(index, 0) self.dataChanged.emit(qindex, qindex)
def _dataChanged(self, item)
Explicitly emit dataChanged upon item changing
3.246956
2.886308
1.124951
item = {} item.update(defaults["common"]) item.update(defaults["plugin"]) for member in ["pre11", "name", "label", "optional", "category", "actions", "id", "order", "doc", "type", "module", "match", "hasRepair", "families", "contextEnabled", "instanceEnabled", "__instanceEnabled__", "path"]: item[member] = plugin[member] # Visualised in Perspective item["familiesConcatenated"] = ", ".join(plugin["families"]) # converting links to HTML pattern = r"(https?:\/\/(?:w{1,3}.)?[^\s]*?(?:\.[a-z]+)+)" pattern += r"(?![^<]*?(?:<\/\w+>|\/?>))" if item["doc"] and re.search(pattern, item["doc"]): html = r"<a href='\1'><font color='FF00CC'>\1</font></a>" item["doc"] = re.sub(pattern, html, item["doc"]) # Append GUI-only data item["itemType"] = "plugin" item["hasCompatible"] = True item["isToggled"] = plugin.get("active", True) item["verb"] = { "Selector": "Collect", "Collector": "Collect", "Validator": "Validate", "Extractor": "Extract", "Integrator": "Integrate", "Conformer": "Integrate", }.get(item["type"], "Other") for action in item["actions"]: if action["on"] == "all": item["actionsIconVisible"] = True self.add_section(item["verb"]) item = self.add_item(item) self.plugins.append(item)
def add_plugin(self, plugin)
Append `plugin` to model Arguments: plugin (dict): Serialised plug-in from pyblish-rpc Schema: plugin.json
6.386456
6.102021
1.046613
assert isinstance(instance, dict) item = defaults["common"].copy() item.update(defaults["instance"]) item.update(instance["data"]) item.update(instance) item["itemType"] = "instance" item["isToggled"] = instance["data"].get("publish", True) item["hasCompatible"] = True item["category"] = item["category"] or item["family"] self.add_section(item["category"]) # Visualised in Perspective families = [instance["data"]["family"]] families.extend(instance["data"].get("families", [])) item["familiesConcatenated"] += ", ".join(families) item = self.add_item(item) self.instances.append(item)
def add_instance(self, instance)
Append `instance` to model Arguments: instance (dict): Serialised instance Schema: instance.json
5.706875
5.850436
0.975462
self.instances.remove(item) self.remove_item(item)
def remove_instance(self, item)
Remove `instance` from model
4.626696
4.359278
1.061344
assert isinstance(name, str) # Skip existing sections for section in self.sections: if section.name == name: return section item = defaults["common"].copy() item["name"] = name item["itemType"] = "section" item = self.add_item(item) self.sections.append(item) return item
def add_section(self, name)
Append `section` to model Arguments: name (str): Name of section
4.559703
4.566751
0.998457
assert isinstance(context, dict) item = defaults["common"].copy() item.update(defaults["instance"]) item.update(context) item["family"] = None item["label"] = context["data"].get("label") or settings.ContextLabel item["itemType"] = "instance" item["isToggled"] = True item["optional"] = False item["hasCompatible"] = True item = self.add_item(item) self.instances.append(item)
def add_context(self, context, label=None)
Append `context` to model Arguments: context (dict): Serialised to add Schema: context.json
6.728445
6.797383
0.989858
assert isinstance(result, dict), "%s is not a dictionary" % result for type in ("instance", "plugin"): id = (result[type] or {}).get("id") is_context = not id if is_context: item = self.instances[0] else: item = self.items.get(id) if item is None: # If an item isn't there yet # no worries. It's probably because # reset is still running and the # item in question is a new instance # not yet added to the model. continue item.isProcessing = False item.currentProgress = 1 item.processed = True item.hasWarning = item.hasWarning or any([ record["levelno"] == logging.WARNING for record in result["records"] ]) if result.get("error"): item.hasError = True item.amountFailed += 1 else: item.succeeded = True item.amountPassed += 1 item.duration += result["duration"] item.finishedAt = time.time() if item.itemType == "plugin" and not item.actionsIconVisible: actions = list(item.actions) # Context specific actions for action in list(actions): if action["on"] == "failed" and not item.hasError: actions.remove(action) if action["on"] == "succeeded" and not item.succeeded: actions.remove(action) if action["on"] == "processed" and not item.processed: actions.remove(action) if actions: item.actionsIconVisible = True # Update section item class DummySection(object): hasWarning = False hasError = False succeeded = False section_item = DummySection() for section in self.sections: if item.itemType == "plugin" and section.name == item.verb: section_item = section if (item.itemType == "instance" and section.name == item.category): section_item = section section_item.hasWarning = ( section_item.hasWarning or item.hasWarning ) section_item.hasError = section_item.hasError or item.hasError section_item.succeeded = section_item.succeeded or item.succeeded section_item.isProcessing = False
def update_with_result(self, result)
Update item-model with result from host State is sent from host after processing had taken place and represents the events that took place; including log messages and completion status. Arguments: result (dict): Dictionary following the Result schema
3.757829
3.700293
1.015549
for item in self.items: item.isProcessing = False item.currentProgress = 0
def reset_status(self)
Reset progress bars
8.388819
7.616677
1.101375
self._add_rule(self.excludes, role, value)
def add_exclusion(self, role, value)
Exclude item if `role` equals `value` Attributes: role (int, string): Qt role or name to compare `value` to value (object): Value to exclude
10.87217
13.19055
0.824239
self._remove_rule(self.excludes, role, value)
def remove_exclusion(self, role, value=None)
Remove exclusion rule Arguments: role (int, string): Qt role or name to remove value (object, optional): Value to remove. If none is supplied, the entire role will be removed.
9.688647
8.100985
1.195984
self._add_rule(self.includes, role, value)
def add_inclusion(self, role, value)
Include item if `role` equals `value` Attributes: role (int): Qt role to compare `value` to value (object): Value to exclude
12.331148
15.128143
0.815113
self._remove_rule(self.includes, role, value)
def remove_inclusion(self, role, value=None)
Remove exclusion rule
10.443452
6.591344
1.584419
if role not in group: group[role] = list() group[role].append(value) self.invalidate()
def _add_rule(self, group, role, value)
Implementation detail
4.806565
4.359205
1.102624
if role not in group: return if value is None: group.pop(role, None) else: group[role].remove(value) self.invalidate()
def _remove_rule(self, group, role, value=None)
Implementation detail
3.130214
2.979831
1.050467
group.clear() for rule in rules: self._add_rule(group, *rule) self.invalidate()
def _set_rules(self, group, rules)
Implementation detail
5.740563
6.095092
0.941834
model = self.sourceModel() item = model.items[source_row] key = getattr(item, "filter", None) if key is not None: regex = self.filterRegExp() if regex.pattern(): match = regex.indexIn(key) return False if match == -1 else True for role, values in self.includes.items(): data = getattr(item, role, None) if data not in values: return False for role, values in self.excludes.items(): data = getattr(item, role, None) if data in values: return False return super(ProxyModel, self).filterAcceptsRow( source_row, source_parent)
def filterAcceptsRow(self, source_row, source_parent)
Exclude items in `self.excludes`
2.835112
2.682888
1.056739
instance = None error = None if result["instance"] is not None: instance = format_instance(result["instance"]) if result["error"] is not None: error = format_error(result["error"]) result = { "success": result["success"], "plugin": format_plugin(result["plugin"]), "instance": instance, "error": error, "records": format_records(result["records"]), "duration": result["duration"] } if os.getenv("PYBLISH_SAFE"): schema.validate(result, "result") return result
def format_result(result)
Serialise Result
3.255446
3.193316
1.019456
formatted = list() for record_ in records: formatted.append(format_record(record_)) return formatted
def format_records(records)
Serialise multiple records
4.44256
4.087235
1.086935
record = dict( (key, getattr(record, key, None)) for key in ( "threadName", "name", "thread", "created", "process", "processName", "args", "module", "filename", "levelno", "exc_text", "pathname", "lineno", "msg", "exc_info", "funcName", "relativeCreated", "levelname", "msecs") ) # Humanise output and conform to Exceptions record["message"] = str(record.pop("msg")) if os.getenv("PYBLISH_SAFE"): schema.validate(record, "record") return record
def format_record(record)
Serialise LogRecord instance
4.476944
4.376306
1.022996
formatted = {"message": str(error)} if hasattr(error, "traceback"): fname, line_no, func, exc = error.traceback formatted.update({ "fname": fname, "line_number": line_no, "func": func, "exc": exc }) return formatted
def format_error(error)
Serialise exception
4.127395
3.925222
1.051506
instance = { "name": instance.name, "id": instance.id, "data": format_data(instance.data), "children": list(), } if os.getenv("PYBLISH_SAFE"): schema.validate(instance, "instance") return instance
def format_instance(instance)
Serialise `instance` For children to be visualised and modified, they must provide an appropriate implementation of __str__. Data that isn't JSON compatible cannot be visualised nor modified. Attributes: name (str): Name of instance niceName (str, optional): Nice name of instance family (str): Name of compatible family data (dict, optional): Associated data publish (bool): Whether or not instance should be published Returns: Dictionary of JSON-compatible instance
5.967261
7.429458
0.803189
formatted = [] for plugin_ in plugins: formatted_plugin = format_plugin(plugin_) formatted.append(formatted_plugin) return formatted
def format_plugins(plugins)
Serialise multiple plug-in Returns: List of JSON-compatible plug-ins
3.557207
3.854661
0.922832
type = "Other" for order, _type in {pyblish.plugin.CollectorOrder: "Collector", pyblish.plugin.ValidatorOrder: "Validator", pyblish.plugin.ExtractorOrder: "Extractor", pyblish.plugin.IntegratorOrder: "Integrator"}.items(): if pyblish.lib.inrange(plugin.order, base=order): type = _type module = plugin.__module__ if module == "__main__": # Support for in-memory plug-ins. path = "mem:%s" % plugin.__name__ else: try: path = os.path.abspath(sys.modules[module].__file__) except Exception: path = "unknown" has_repair = False args = inspect.getargspec(plugin.repair).args if "context" in args or "instance" in args: has_repair = True # Legacy abilities if hasattr(plugin, "repair_context") or hasattr(plugin, "repair_instance"): has_repair = True output = { "label": plugin.label, "id": plugin.id, "version": plugin.version, "category": getattr(plugin, "category", None), "requires": plugin.requires, "order": plugin.order, "optional": plugin.optional, "hosts": plugin.hosts, "families": plugin.families, # New in pyblish-base 1.5.2 "targets": getattr(plugin, "targets", list()), "doc": inspect.getdoc(plugin), "active": plugin.active, "match": plugin.match, # Metadata "__pre11__": plugin.__pre11__, "__contextEnabled__": plugin.__contextEnabled__, "__instanceEnabled__": plugin.__instanceEnabled__, "path": path, "pre11": plugin.__pre11__, "contextEnabled": plugin.__contextEnabled__, "instanceEnabled": plugin.__instanceEnabled__, "name": plugin.__name__, "type": type, "module": module, "hasRepair": has_repair, "process": { "args": inspect.getargspec(plugin.process).args, }, "repair": { "args": inspect.getargspec(plugin.repair).args, }, "actions": [format_action(a) for a in plugin.actions], } if os.getenv("PYBLISH_SAFE"): schema.validate(output, "plugin") return output
def format_plugin(plugin)
Serialise `plugin` Attributes: name: Name of Python class id: Unique identifier version: Plug-in version category: Optional category requires: Plug-in requirements order: Plug-in order optional: Is the plug-in optional? doc: The plug-in documentation hasRepair: Can the plug-in perform a repair? hasCompatible: Does the plug-in have any compatible instances? type: Which baseclass does the plug-in stem from? E.g. Validator module: File in which plug-in was defined contextEnabled: Does it process the Context? instanceEnabled: Does it process Instance(s)?
3.15232
2.897426
1.087973
test = pyblish.logic.registered_test() state = { "nextOrder": None, "ordersWithError": set() } for plugin in plugins: state["nextOrder"] = plugin.order message = test(**state) if message: raise StopIteration("Stopped due to %s" % message) instances = pyblish.api.instances_by_plugin(context, plugin) if plugin.__instanceEnabled__: for instance in instances: yield plugin, instance else: yield plugin, None
def iterator(plugins, context)
An iterator for plug-in and instance pairs
6.128952
5.943991
1.031117
test = pyblish.logic.registered_test() state = { "nextOrder": None, "ordersWithError": set() } signals = { pyblish.api.ValidatorOrder: self.validating, pyblish.api.ExtractorOrder: self.extracting, pyblish.api.IntegratorOrder: self.integrating, } for plug, instance in iterator(plugins, context): if instance is not None and not instance.data.get("publish", True): continue state["nextOrder"] = plug.order for order in list(signals.keys()): if pyblish.lib.inrange(plug.order, order): signals.pop(order).emit() if not self.data["state"]["is_running"]: raise StopIteration("Stopped") if test(**state): raise StopIteration("Stopped due to %s" % test(**state)) try: # Notify GUI before commencing remote processing self.about_to_process.emit(plug, instance) result = self.host.process(plug, context, instance) except Exception as e: raise StopIteration("Unknown error: %s" % e) else: # Make note of the order at which the # potential error error occured. has_error = result["error"] is not None if has_error: state["ordersWithError"].add(plug.order) yield result
def iterator(self, plugins, context)
Primary iterator CAUTION: THIS RUNS IN A SEPARATE THREAD This is the brains of publishing. It handles logic related to which plug-in to process with which Instance or Context, in addition to stopping when necessary.
5.539783
5.430859
1.020057
index = self.data["proxies"]["plugin"].mapToSource( self.data["proxies"]["plugin"].index( index, 0, QtCore.QModelIndex())).row() item = self.data["models"]["item"].items[index] # Inject reference to the original index actions = [ dict(action, **{"index": index}) for action in item.actions ] # Context specific actions for action in list(actions): if action["on"] == "failed" and not item.hasError: actions.remove(action) if action["on"] == "succeeded" and not item.succeeded: actions.remove(action) if action["on"] == "processed" and not item.processed: actions.remove(action) if action["on"] == "notProcessed" and item.processed: actions.remove(action) # Discard empty categories, separators remaining_actions = list() index = 0 try: action = actions[index] except IndexError: pass else: while action: try: action = actions[index] except IndexError: break isempty = False if action["__type__"] in ("category", "separator"): try: next_ = actions[index + 1] if next_["__type__"] != "action": isempty = True except IndexError: isempty = True if not isempty: remaining_actions.append(action) index += 1 return remaining_actions
def getPluginActions(self, index)
Return actions from plug-in at `index` Arguments: index (int): Index at which item is located in model
3.547254
3.488947
1.016712
target = {"result": self.data["proxies"]["result"], "instance": self.data["proxies"]["instance"], "plugin": self.data["proxies"]["plugin"]}[target] if operation == "add": target.add_exclusion(role, value) elif operation == "remove": target.remove_exclusion(role, value) else: raise TypeError("operation must be either `add` or `remove`")
def exclude(self, target, operation, role, value)
Exclude a `role` of `value` at `target` Arguments: target (str): Destination proxy model operation (str): "add" or "remove" exclusion role (str): Role to exclude value (str): Value of `role` to exclude
3.706247
3.311606
1.119169
item = model.items[index] data = { "name": item.name, "data": item.data, "doc": getattr(item, "doc", None), "path": getattr(item, "path", None), } return data
def __item_data(self, model, index)
Return item data as dict
3.24232
3.054556
1.06147
self.host.update(key="comment", value=comment) self.host.emit("commented", comment=comment)
def comment_sync(self, comment)
Update comments to host and notify subscribers
9.536229
6.811892
1.399938
def update(): context = self.host.cached_context context.data["comment"] = comment self.data["comment"] = comment # Notify subscribers of the comment self.comment_sync(comment) self.commented.emit() # Update local cache a little later util.schedule(update, 100, channel="commenting")
def on_commenting(self, comment)
The user is entering a comment
10.824616
11.067598
0.978046
if instance is None: instance_item = self.data["models"]["item"].instances[0] else: instance_item = self.data["models"]["item"].instances[instance.id] plugin_item = self.data["models"]["item"].plugins[plugin.id] for section in self.data["models"]["item"].sections: if section.name == plugin_item.verb: section.isProcessing = True if section.name == instance_item.category: section.isProcessing = True instance_item.isProcessing = True plugin_item.isProcessing = True
def on_about_to_process(self, plugin, instance)
Reflect currently running pair in GUI
3.456757
3.368813
1.026105
def get_data(): model = self.data["models"]["item"] # Communicate with host to retrieve current plugins and instances # This can potentially take a very long time; it is run # asynchronously and initiates processing once complete. host_plugins = dict((p.id, p) for p in self.host.cached_discover) host_context = dict((i.id, i) for i in self.host.cached_context) plugins = list() instances = list() for plugin in models.ItemIterator(model.plugins): # Exclude Collectors if pyblish.lib.inrange( number=plugin.order, base=pyblish.api.Collector.order): continue plugins.append(host_plugins[plugin.id]) for instance in models.ItemIterator(model.instances): instances.append(host_context[instance.id]) return plugins, instances def on_data_received(args): self.run(*args, callback=on_finished) def on_finished(): self.host.emit("published", context=None) util.defer(get_data, callback=on_data_received)
def publish(self)
Start asynchonous publishing Publishing takes into account all available and currently toggled plug-ins and instances.
6.741343
6.235678
1.081092
# if "ready" not in self.states: # return self.error.emit("Not ready") # Initial set-up self.data["state"]["is_running"] = True # Setup statistics for better debugging. # (To be finalised in `on_finished`) util.timer("publishing") stats = {"requestCount": self.host.stats()["totalRequestCount"]} # For each completed task, update # the GUI and commence next task. def on_next(result): if isinstance(result, StopIteration): return on_finished(str(result)) self.data["models"]["item"].update_with_result(result) self.data["models"]["result"].update_with_result(result) # Once the main thread has finished updating # the GUI, we can proceed handling of next task. util.defer(self.host.context, callback=update_context) def update_context(ctx): item_model = self.data["models"]["item"] instance_items = {item.id: item for item in item_model.instances} for instance in ctx: id = instance.id item = instance_items.get(id) if item is not None: proxy = next((i for i in context if i.id == id), None) update_instance(item, proxy, instance.data) continue context.append(instance) item_model.add_instance(instance.to_json()) if len(ctx) < item_model.instance_count(): remove_instance(ctx, instance_items) util.defer(lambda: next(iterator), callback=on_next) def update_instance(item, proxy, data): # Update instance item model data for GUI item.isToggled = data.get("publish", True) item.optional = data.get("optional", True) item.category = data.get("category", data["family"]) families = [data["family"]] families.extend(data.get("families", [])) item.familiesConcatenated = ", ".join(families) if proxy is None: return # Update proxy instance data which currently being iterated in # the primary iterator proxy.data["publish"] = data.get("publish", True) proxy.data["family"] = data["family"] proxy.data["families"] = data.get("families", []) def remove_instance(ctx, items): instances = {i.id: i for i in context} instance_ids = set(i.id for i in ctx) instance_ids.add(ctx.id) for id, item in items.items(): if id not in instance_ids: # Remove from model self.data["models"]["item"].remove_instance(item) # Remove instance from list context.remove(instances[id]) def on_finished(message=None): self.data["state"]["is_running"] = False self.finished.emit() if message: self.info.emit(message) # Report statistics stats["requestCount"] -= self.host.stats()["totalRequestCount"] util.timer_end("publishing", "Spent %.2f ms resetting") util.echo("Made %i requests during publish." % abs(stats["requestCount"])) if callback: callback(*callback_args) # The iterator initiates processing and is # executed one item at a time in a separate thread. # Once the thread finishes execution, it signals # the `callback`. iterator = self.iterator(plugins, context) util.defer(lambda: next(iterator), callback=on_next)
def run(self, plugins, context, callback=None, callback_args=[])
Commence asynchronous tasks This method runs through the provided `plugins` in an asynchronous manner, interrupted by either completion or failure of a plug-in. Inbetween processes, the GUI is fed information from the task and redraws itself. Arguments: plugins (list): Plug-ins to process context (list): Instances to process callback (func, optional): Called on finish callback_args (list, optional): Arguments passed to callback
4.775676
4.654646
1.026002
assert isinstance(settings, dict), "`settings` must be of type dict" for key, value in settings.items(): setattr(self, key, value)
def from_dict(settings)
Apply settings from dictionary Arguments: settings (dict): Settings in the form of a dictionary
3.75686
4.056662
0.926097
if not isinstance(obj, ClassTypes): # already an instance return getattr(obj, '__call__', None) is not None klass = obj # uses __bases__ instead of __mro__ so that we work with old style classes if klass.__dict__.get('__call__') is not None: return True for base in klass.__bases__: if _instance_callable(base): return True return False
def _instance_callable(obj)
Given an object, return True if the object is callable. For classes, return True if instances would be callable.
4.420306
4.145127
1.066386
if type(target) in (unicode, str): getter = lambda: _importer(target) else: getter = lambda: target if not kwargs: raise ValueError( 'Must supply at least one keyword argument with patch.multiple' ) # need to wrap in a list for python 3, where items is a view items = list(kwargs.items()) attribute, new = items[0] patcher = _patch( getter, attribute, new, spec, create, spec_set, autospec, new_callable, {} ) patcher.attribute_name = attribute for attribute, new in items[1:]: this_patcher = _patch( getter, attribute, new, spec, create, spec_set, autospec, new_callable, {} ) this_patcher.attribute_name = attribute patcher.additional_patchers.append(this_patcher) return patcher
def _patch_multiple(target, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
Perform multiple patches in a single call. It takes the object to be patched (either as an object or a string to fetch the object by importing) and keyword arguments for the patches:: with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'): ... Use `DEFAULT` as the value if you want `patch.multiple` to create mocks for you. In this case the created mocks are passed into a decorated function by keyword, and a dictionary is returned when `patch.multiple` is used as a context manager. `patch.multiple` can be used as a decorator, class decorator or a context manager. The arguments `spec`, `spec_set`, `create`, `autospec` and `new_callable` have the same meaning as for `patch`. These arguments will be applied to *all* patches done by `patch.multiple`. When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX` for choosing which methods to wrap.
3.294321
3.444745
0.956332
getter, attribute = _get_target(target) return _patch( getter, attribute, new, spec, create, spec_set, autospec, new_callable, kwargs )
def patch( target, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs )
`patch` acts as a function decorator, class decorator or a context manager. Inside the body of the function or with statement, the `target` is patched with a `new` object. When the function/with statement exits the patch is undone. If `new` is omitted, then the target is replaced with a `MagicMock`. If `patch` is used as a decorator and `new` is omitted, the created mock is passed in as an extra argument to the decorated function. If `patch` is used as a context manager the created mock is returned by the context manager. `target` should be a string in the form `'package.module.ClassName'`. The `target` is imported and the specified object replaced with the `new` object, so the `target` must be importable from the environment you are calling `patch` from. The target is imported when the decorated function is executed, not at decoration time. The `spec` and `spec_set` keyword arguments are passed to the `MagicMock` if patch is creating one for you. In addition you can pass `spec=True` or `spec_set=True`, which causes patch to pass in the object being mocked as the spec/spec_set object. `new_callable` allows you to specify a different class, or callable object, that will be called to create the `new` object. By default `MagicMock` is used. A more powerful form of `spec` is `autospec`. If you set `autospec=True` then the mock with be created with a spec from the object being replaced. All attributes of the mock will also have the spec of the corresponding attribute of the object being replaced. Methods and functions being mocked will have their arguments checked and will raise a `TypeError` if they are called with the wrong signature. For mocks replacing a class, their return value (the 'instance') will have the same spec as the class. Instead of `autospec=True` you can pass `autospec=some_object` to use an arbitrary object as the spec instead of the one being replaced. By default `patch` will fail to replace attributes that don't exist. If you pass in `create=True`, and the attribute doesn't exist, patch will create the attribute for you when the patched function is called, and delete it again afterwards. This is useful for writing tests against attributes that your production code creates at runtime. It is off by by default because it can be dangerous. With it switched on you can write passing tests against APIs that don't actually exist! Patch can be used as a `TestCase` class decorator. It works by decorating each test method in the class. This reduces the boilerplate code when your test methods share a common patchings set. `patch` finds tests by looking for method names that start with `patch.TEST_PREFIX`. By default this is `test`, which matches the way `unittest` finds tests. You can specify an alternative prefix by setting `patch.TEST_PREFIX`. Patch can be used as a context manager, with the with statement. Here the patching applies to the indented block after the with statement. If you use "as" then the patched object will be bound to the name after the "as"; very useful if `patch` is creating a mock object for you. `patch` takes arbitrary keyword arguments. These will be passed to the `Mock` (or `new_callable`) on construction. `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are available for alternate use-cases.
3.85517
6.840896
0.563548
"Turns a callable object (like a mock) into a real function" def method(self, *args, **kw): return func(self, *args, **kw) method.__name__ = name return method
def _get_method(name, func)
Turns a callable object (like a mock) into a real function
4.375156
2.507251
1.745001
if _is_list(spec): # can't pass a list instance to the mock constructor as it will be # interpreted as a list of strings spec = type(spec) is_type = isinstance(spec, ClassTypes) _kwargs = {'spec': spec} if spec_set: _kwargs = {'spec_set': spec} elif spec is None: # None we mock with a normal mock without a spec _kwargs = {} _kwargs.update(kwargs) Klass = MagicMock if type(spec) in DescriptorTypes: # descriptors don't have a spec # because we don't know what type they return _kwargs = {} elif not _callable(spec): Klass = NonCallableMagicMock elif is_type and instance and not _instance_callable(spec): Klass = NonCallableMagicMock _new_name = _name if _parent is None: # for a top level object no _new_name should be set _new_name = '' mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name, name=_name, **_kwargs) if isinstance(spec, FunctionTypes): # should only happen at the top level because we don't # recurse for functions mock = _set_signature(mock, spec) else: _check_signature(spec, mock, is_type, instance) if _parent is not None and not instance: _parent._mock_children[_name] = mock if is_type and not instance and 'return_value' not in kwargs: mock.return_value = create_autospec(spec, spec_set, instance=True, _name='()', _parent=mock) for entry in dir(spec): if _is_magic(entry): # MagicMock already does the useful magic methods for us continue if isinstance(spec, FunctionTypes) and entry in FunctionAttributes: # allow a mock to actually be a function continue # XXXX do we need a better way of getting attributes without # triggering code execution (?) Probably not - we need the actual # object to mock it so we would rather trigger a property than mock # the property descriptor. Likewise we want to mock out dynamically # provided attributes. # XXXX what about attributes that raise exceptions other than # AttributeError on being fetched? # we could be resilient against it, or catch and propagate the # exception when the attribute is fetched from the mock try: original = getattr(spec, entry) except AttributeError: continue kwargs = {'spec': original} if spec_set: kwargs = {'spec_set': original} if not isinstance(original, FunctionTypes): new = _SpecState(original, spec_set, mock, entry, instance) mock._mock_children[entry] = new else: parent = mock if isinstance(spec, FunctionTypes): parent = mock.mock new = MagicMock(parent=parent, name=entry, _new_name=entry, _new_parent=parent, **kwargs) mock._mock_children[entry] = new skipfirst = _must_skip(spec, entry, is_type) _check_signature(original, new, skipfirst=skipfirst) # so functions created with _set_signature become instance attributes, # *plus* their underlying mock exists in _mock_children of the parent # mock. Adding to _mock_children may be unnecessary where we are also # setting as an instance attribute? if isinstance(new, FunctionTypes): setattr(mock, entry, new) return mock
def create_autospec(spec, spec_set=False, instance=False, _parent=None, _name=None, **kwargs)
Create a mock object using another object as a spec. Attributes on the mock will use the corresponding attribute on the `spec` object as their spec. Functions or methods being mocked will have their arguments checked to check that they are called with the correct signature. If `spec_set` is True then attempting to set attributes that don't exist on the spec object will raise an `AttributeError`. If a class is used as a spec then the return value of the mock (the instance of the class) will have the same spec. You can use a class as the spec for an instance object by passing `instance=True`. The returned mock will only be callable if instances of the mock are callable. `create_autospec` also takes arbitrary keyword arguments that are passed to the constructor of the created mock.
5.342958
5.499482
0.971538
global file_spec if file_spec is None: # set on first use if inPy3k: import _io file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))) else: file_spec = file if mock is None: mock = MagicMock(name='open', spec=open) handle = MagicMock(spec=file_spec) handle.write.return_value = None handle.__enter__.return_value = handle handle.read.return_value = read_data mock.return_value = handle return mock
def mock_open(mock=None, read_data='')
A helper function to create a mock to replace the use of `open`. It works for `open` called directly or used as a context manager. The `mock` argument is the mock object to configure. If `None` (the default) then a `MagicMock` will be created for you, with the API limited to methods or attributes available on standard file handles. `read_data` is a string for the `read` method of the file handle to return. This is an empty string by default.
3.410118
3.476716
0.980845
mock._mock_parent = None mock._mock_new_parent = None mock._mock_name = '' mock._mock_new_name = None setattr(self, attribute, mock)
def attach_mock(self, mock, attribute)
Attach a mock as an attribute of this one, replacing its name and parent. Calls to the attached mock will be recorded in the `method_calls` and `mock_calls` attributes of this one.
4.385824
4.726692
0.927885
"Restore the mock object to its initial state." self.called = False self.call_args = None self.call_count = 0 self.mock_calls = _CallList() self.call_args_list = _CallList() self.method_calls = _CallList() for child in self._mock_children.values(): if isinstance(child, _SpecState): continue child.reset_mock() ret = self._mock_return_value if _is_instance_mock(ret) and ret is not self: ret.reset_mock()
def reset_mock(self)
Restore the mock object to its initial state.
3.771199
3.534907
1.066845
for arg, val in sorted(kwargs.items(), # we sort on the number of dots so that # attributes are set before we set attributes on # attributes key=lambda entry: entry[0].count('.')): args = arg.split('.') final = args.pop() obj = self for entry in args: obj = getattr(obj, entry) setattr(obj, final, val)
def configure_mock(self, **kwargs)
Set attributes on the mock through keyword arguments. Attributes plus return values and side effects can be set on child mocks using standard dot notation and unpacking a dictionary in the method call: >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError} >>> mock.configure_mock(**attrs)
5.061237
6.123253
0.82656
self = _mock_self if self.call_args is None: expected = self._format_mock_call_signature(args, kwargs) raise AssertionError('Expected call: %s\nNot called' % (expected,)) if self.call_args != (args, kwargs): msg = self._format_mock_failure_message(args, kwargs) raise AssertionError(msg)
def assert_called_with(_mock_self, *args, **kwargs)
assert that the mock was called with the specified arguments. Raises an AssertionError if the args and keyword args passed in are different to the last call to the mock.
3.782413
3.682244
1.027203
if not any_order: if calls not in self.mock_calls: raise AssertionError( 'Calls not found.\nExpected: %r\n' 'Actual: %r' % (calls, self.mock_calls) ) return all_calls = list(self.mock_calls) not_found = [] for kall in calls: try: all_calls.remove(kall) except ValueError: not_found.append(kall) if not_found: raise AssertionError( '%r not all found in call list' % (tuple(not_found),) )
def assert_has_calls(self, calls, any_order=False)
assert the mock has been called with the specified calls. The `mock_calls` list is checked for the calls. If `any_order` is False (the default) then the calls must be sequential. There can be extra calls before or after the specified calls. If `any_order` is True then the calls can be in any order, but they must all appear in `mock_calls`.
2.699898
2.636551
1.024027
kall = call(*args, **kwargs) if kall not in self.call_args_list: expected_string = self._format_mock_call_signature(args, kwargs) raise AssertionError( '%s call not found' % expected_string )
def assert_any_call(self, *args, **kwargs)
assert the mock has been called with the specified arguments. The assert passes if the mock has *ever* been called, unlike `assert_called_with` and `assert_called_once_with` that only pass if the call is the most recent one.
6.127391
5.164378
1.186472
_type = type(self) if not issubclass(_type, CallableMixin): if issubclass(_type, NonCallableMagicMock): klass = MagicMock elif issubclass(_type, NonCallableMock) : klass = Mock else: klass = _type.__mro__[1] return klass(**kw)
def _get_child_mock(self, **kw)
Create the child mocks for attributes and return value. By default child mocks will be the same type as the parent. Subclasses of Mock may want to override this to customize the way child mocks are made. For non-callable mocks the callable variant will be used (rather than any custom subclass).
4.252579
3.810255
1.116088
result = self.__enter__() self._active_patches.add(self) return result
def start(self)
Activate a patch, returning any created mock.
13.482831
7.428076
1.815117
self._mock_add_spec(spec, spec_set) self._mock_set_magics()
def mock_add_spec(self, spec, spec_set=False)
Add a spec to a mock. `spec` can either be an object or a list of strings. Only attributes on the `spec` can be fetched as attributes from the mock. If `spec_set` is True then only attributes on the spec can be set.
6.223577
8.168489
0.761901
vals = [] thing = self while thing is not None: if thing.from_kall: vals.append(thing) thing = thing.parent return _CallList(reversed(vals))
def call_list(self)
For a call object that represents multiple calls, `call_list` returns a list of all the intermediate calls as well as the final call.
7.419879
6.500862
1.141369
assert isinstance(ptr, long), "Argument 'ptr' must be of type <long>" assert (base is None) or issubclass(base, Qt.QtCore.QObject), ( "Argument 'base' must be of type <QObject>") if base is None: q_object = func(long(ptr), Qt.QtCore.QObject) meta_object = q_object.metaObject() class_name = meta_object.className() super_class_name = meta_object.superClass().className() if hasattr(Qt.QtWidgets, class_name): base = getattr(Qt.QtWidgets, class_name) elif hasattr(Qt.QtWidgets, super_class_name): base = getattr(Qt.QtWidgets, super_class_name) else: base = Qt.QtCore.QObject return func(long(ptr), base)
def _wrapinstance(func, ptr, base=None)
Enable implicit cast of pointer to most suitable class This behaviour is available in sip per default. Based on http://nathanhorne.com/pyqtpyside-wrap-instance Usage: This mechanism kicks in under these circumstances. 1. Qt.py is using PySide 1 or 2. 2. A `base` argument is not provided. See :func:`QtCompat.wrapInstance()` Arguments: func (function): Original function ptr (long): Pointer to QObject in memory base (QObject, optional): Base class to wrap with. Defaults to QObject, which should handle anything.
2.52046
2.459012
1.024989
for src, dst in _misplaced_members[binding].items(): src_module, src_member = src.split(".") dst_module, dst_member = dst.split(".") try: src_object = getattr(Qt, dst_module) except AttributeError: # Skip reassignment of non-existing members. # This can happen if a request was made to # rename a member that didn't exist, for example # if QtWidgets isn't available on the target platform. continue dst_value = getattr(getattr(Qt, "_" + src_module), src_member) setattr( src_object, dst_member, dst_value )
def _reassign_misplaced_members(binding)
Apply misplaced members from `binding` to Qt.py Arguments: binding (dict): Misplaced members
4.808294
4.700799
1.022867
import PySide2 as module _setup(module, ["QtUiTools"]) Qt.__binding_version__ = module.__version__ try: try: # Before merge of PySide and shiboken import shiboken2 except ImportError: # After merge of PySide and shiboken, May 2017 from PySide2 import shiboken2 Qt.QtCompat.wrapInstance = ( lambda ptr, base=None: _wrapinstance( shiboken2.wrapInstance, ptr, base) ) Qt.QtCompat.getCppPointer = lambda object: \ shiboken2.getCppPointer(object)[0] except ImportError: pass # Optional if hasattr(Qt, "_QtUiTools"): Qt.QtCompat.loadUi = _loadUi if hasattr(Qt, "_QtCore"): Qt.__qt_version__ = Qt._QtCore.qVersion() Qt.QtCompat.qInstallMessageHandler = _qInstallMessageHandler Qt.QtCompat.translate = Qt._QtCore.QCoreApplication.translate if hasattr(Qt, "_QtWidgets"): Qt.QtCompat.setSectionResizeMode = \ Qt._QtWidgets.QHeaderView.setSectionResizeMode _reassign_misplaced_members("PySide2") _build_compatibility_members("PySide2")
def _pyside2()
Initialise PySide2 These functions serve to test the existence of a binding along with set it up in such a way that it aligns with the final step; adding members from the original binding to Qt.py
4.611264
4.407295
1.04628
import PySide as module _setup(module, ["QtUiTools"]) Qt.__binding_version__ = module.__version__ try: try: # Before merge of PySide and shiboken import shiboken except ImportError: # After merge of PySide and shiboken, May 2017 from PySide import shiboken Qt.QtCompat.wrapInstance = ( lambda ptr, base=None: _wrapinstance( shiboken.wrapInstance, ptr, base) ) Qt.QtCompat.getCppPointer = lambda object: \ shiboken.getCppPointer(object)[0] except ImportError: pass # Optional if hasattr(Qt, "_QtUiTools"): Qt.QtCompat.loadUi = _loadUi if hasattr(Qt, "_QtGui"): setattr(Qt, "QtWidgets", _new_module("QtWidgets")) setattr(Qt, "_QtWidgets", Qt._QtGui) if hasattr(Qt._QtGui, "QX11Info"): setattr(Qt, "QtX11Extras", _new_module("QtX11Extras")) Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info Qt.QtCompat.setSectionResizeMode = Qt._QtGui.QHeaderView.setResizeMode if hasattr(Qt, "_QtCore"): Qt.__qt_version__ = Qt._QtCore.qVersion() QCoreApplication = Qt._QtCore.QCoreApplication Qt.QtCompat.qInstallMessageHandler = _qInstallMessageHandler Qt.QtCompat.translate = ( lambda context, sourceText, disambiguation, n: QCoreApplication.translate( context, sourceText, disambiguation, QCoreApplication.CodecForTr, n ) ) _reassign_misplaced_members("PySide") _build_compatibility_members("PySide")
def _pyside()
Initialise PySide
4.132303
4.066586
1.01616
import PyQt5 as module _setup(module, ["uic"]) try: import sip Qt.QtCompat.wrapInstance = ( lambda ptr, base=None: _wrapinstance( sip.wrapinstance, ptr, base) ) Qt.QtCompat.getCppPointer = lambda object: \ sip.unwrapinstance(object) except ImportError: pass # Optional if hasattr(Qt, "_uic"): Qt.QtCompat.loadUi = _loadUi if hasattr(Qt, "_QtCore"): Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR Qt.QtCompat.qInstallMessageHandler = _qInstallMessageHandler Qt.QtCompat.translate = Qt._QtCore.QCoreApplication.translate if hasattr(Qt, "_QtWidgets"): Qt.QtCompat.setSectionResizeMode = \ Qt._QtWidgets.QHeaderView.setSectionResizeMode _reassign_misplaced_members("PyQt5") _build_compatibility_members('PyQt5')
def _pyqt5()
Initialise PyQt5
4.861306
4.86172
0.999915
import sip # Validation of envivornment variable. Prevents an error if # the variable is invalid since it's just a hint. try: hint = int(QT_SIP_API_HINT) except TypeError: hint = None # Variable was None, i.e. not set. except ValueError: raise ImportError("QT_SIP_API_HINT=%s must be a 1 or 2") for api in ("QString", "QVariant", "QDate", "QDateTime", "QTextStream", "QTime", "QUrl"): try: sip.setapi(api, hint or 2) except AttributeError: raise ImportError("PyQt4 < 4.6 isn't supported by Qt.py") except ValueError: actual = sip.getapi(api) if not hint: raise ImportError("API version already set to %d" % actual) else: # Having provided a hint indicates a soft constraint, one # that doesn't throw an exception. sys.stderr.write( "Warning: API '%s' has already been set to %d.\n" % (api, actual) ) import PyQt4 as module _setup(module, ["uic"]) try: import sip Qt.QtCompat.wrapInstance = ( lambda ptr, base=None: _wrapinstance( sip.wrapinstance, ptr, base) ) Qt.QtCompat.getCppPointer = lambda object: \ sip.unwrapinstance(object) except ImportError: pass # Optional if hasattr(Qt, "_uic"): Qt.QtCompat.loadUi = _loadUi if hasattr(Qt, "_QtGui"): setattr(Qt, "QtWidgets", _new_module("QtWidgets")) setattr(Qt, "_QtWidgets", Qt._QtGui) if hasattr(Qt._QtGui, "QX11Info"): setattr(Qt, "QtX11Extras", _new_module("QtX11Extras")) Qt.QtX11Extras.QX11Info = Qt._QtGui.QX11Info Qt.QtCompat.setSectionResizeMode = \ Qt._QtGui.QHeaderView.setResizeMode if hasattr(Qt, "_QtCore"): Qt.__binding_version__ = Qt._QtCore.PYQT_VERSION_STR Qt.__qt_version__ = Qt._QtCore.QT_VERSION_STR QCoreApplication = Qt._QtCore.QCoreApplication Qt.QtCompat.qInstallMessageHandler = _qInstallMessageHandler Qt.QtCompat.translate = ( lambda context, sourceText, disambiguation, n: QCoreApplication.translate( context, sourceText, disambiguation, QCoreApplication.CodecForTr, n) ) _reassign_misplaced_members("PyQt4") # QFileDialog QtCompat decorator def _standardizeQFileDialog(some_function): def wrapper(*args, **kwargs): ret = (some_function(*args, **kwargs)) # PyQt4 only returns the selected filename, force it to a # standard return of the selected filename, and a empty string # for the selected filter return ret, '' wrapper.__doc__ = some_function.__doc__ wrapper.__name__ = some_function.__name__ return wrapper decorators = { "QFileDialog": { "getOpenFileName": _standardizeQFileDialog, "getOpenFileNames": _standardizeQFileDialog, "getSaveFileName": _standardizeQFileDialog, } } _build_compatibility_members('PyQt4', decorators)
def _pyqt4()
Initialise PyQt4
4.418611
4.394482
1.005491
if hasattr(Qt, "_uic"): return Qt._uic.loadUi(uifile, baseinstance) elif hasattr(Qt, "_QtUiTools"): # Implement `PyQt5.uic.loadUi` for PySide(2) class _UiLoader(Qt._QtUiTools.QUiLoader): def __init__(self, baseinstance): super(_UiLoader, self).__init__(baseinstance) self.baseinstance = baseinstance def load(self, uifile, *args, **kwargs): from xml.etree.ElementTree import ElementTree # For whatever reason, if this doesn't happen then # reading an invalid or non-existing .ui file throws # a RuntimeError. etree = ElementTree() etree.parse(uifile) widget = Qt._QtUiTools.QUiLoader.load( self, uifile, *args, **kwargs) # Workaround for PySide 1.0.9, see issue #208 widget.parentWidget() return widget def createWidget(self, class_name, parent=None, name=""): if parent is None and self.baseinstance: # Supposed to create the top-level widget, # return the base instance instead return self.baseinstance # For some reason, Line is not in the list of available # widgets, but works fine, so we have to special case it here. if class_name in self.availableWidgets() + ["Line"]: # Create a new widget for child widgets widget = Qt._QtUiTools.QUiLoader.createWidget(self, class_name, parent, name) else: raise Exception("Custom widget '%s' not supported" % class_name) if self.baseinstance: # Set an attribute for the new child widget on the base # instance, just like PyQt5.uic.loadUi does. setattr(self.baseinstance, name, widget) return widget widget = _UiLoader(baseinstance).load(uifile) Qt.QtCore.QMetaObject.connectSlotsByName(widget) return widget else: raise NotImplementedError("No implementation available for loadUi")
def _loadUi(uifile, baseinstance=None)
Dynamically load a user interface from the given `uifile` This function calls `uic.loadUi` if using PyQt bindings, else it implements a comparable binding for PySide. Documentation: http://pyqt.sourceforge.net/Docs/PyQt5/designer.html#PyQt5.uic.loadUi Arguments: uifile (str): Absolute path to Qt Designer file. baseinstance (QWidget): Instantiated QWidget or subclass thereof Return: baseinstance if `baseinstance` is not `None`. Otherwise return the newly created instance of the user interface.
3.567816
3.441404
1.036733
signature = inspect.getargspec(wrapper) if any([len(signature.args) != 1, signature.varargs is None, signature.keywords is None]): raise TypeError("Wrapper signature mismatch") def _wrapper(func, *args, **kwargs): try: return wrapper(func, *args, **kwargs) except Exception as e: # Kill subprocess _state["currentServer"].stop() traceback.print_exc() raise e _state["dispatchWrapper"] = _wrapper
def register_dispatch_wrapper(wrapper)
Register a dispatch wrapper for servers The wrapper must have this exact signature: (func, *args, **kwargs)
4.435513
4.175719
1.062216
if _state.get("installed"): sys.stdout.write("Already installed, uninstalling..\n") uninstall() use_threaded_wrapper = not modal install_callbacks() install_host(use_threaded_wrapper) _state["installed"] = True
def install(modal)
Perform first time install
10.071974
9.721732
1.036027
# Get modal mode from environment if modal is None: modal = bool(os.environ.get("PYBLISH_QML_MODAL", False)) # Automatically install if not already installed. install(modal) show_settings = settings.to_dict() show_settings['autoPublish'] = auto_publish show_settings['autoValidate'] = auto_validate # Show existing GUI if _state.get("currentServer"): server = _state["currentServer"] proxy = ipc.server.Proxy(server) try: proxy.show(show_settings) return server except IOError: # The running instance has already been closed. _state.pop("currentServer") if not host.is_headless(): host.splash() try: service = ipc.service.Service() server = ipc.server.Server(service, targets=targets, modal=modal) except Exception: # If for some reason, the GUI fails to show. traceback.print_exc() return host.desplash() proxy = ipc.server.Proxy(server) proxy.show(show_settings) # Store reference to server for future calls _state["currentServer"] = server log.info("Success. QML server available as " "pyblish_qml.api.current_server()") server.listen() return server
def show(parent=None, targets=[], modal=None, auto_publish=False, auto_validate=False)
Attempt to show GUI Requires install() to have been run first, and a live instance of Pyblish QML in the background. Arguments: parent (None, optional): Deprecated targets (list, optional): Publishing targets modal (bool, optional): Block interactions to parent
5.347835
4.993282
1.071006
for install in (_install_maya, _install_houdini, _install_nuke, _install_nukeassist, _install_hiero, _install_nukestudio, _install_blender): try: install(use_threaded_wrapper) except ImportError: pass else: break
def install_host(use_threaded_wrapper)
Install required components into supported hosts An unsupported host will still run, but may encounter issues, especially with threading.
3.538384
3.546794
0.997629
keyword = "googleapiclient" # reconstruct python paths python_paths = os.environ["PYTHONPATH"].split(os.pathsep) paths = [path for path in python_paths if keyword not in path] os.environ["PYTHONPATH"] = os.pathsep.join(paths)
def _remove_googleapiclient()
Check if the compatibility must be maintained The Maya 2018 version tries to import the `http` module from Maya2018\plug-ins\MASH\scripts\googleapiclient\http.py in stead of the module from six.py. This import conflict causes a crash Avalon's publisher. This is due to Autodesk adding paths to the PYTHONPATH environment variable which contain modules instead of only packages.
3.581282
3.431641
1.043606
from maya import utils, cmds def threaded_wrapper(func, *args, **kwargs): return utils.executeInMainThreadWithResult( func, *args, **kwargs) sys.stdout.write("Setting up Pyblish QML in Maya\n") if cmds.about(version=True) == "2018": _remove_googleapiclient() _common_setup("Maya", threaded_wrapper, use_threaded_wrapper)
def _install_maya(use_threaded_wrapper)
Helper function to Autodesk Maya support
5.308569
5.477155
0.96922
import hdefereval def threaded_wrapper(func, *args, **kwargs): return hdefereval.executeInMainThreadWithResult( func, *args, **kwargs) _common_setup("Houdini", threaded_wrapper, use_threaded_wrapper)
def _install_houdini(use_threaded_wrapper)
Helper function to SideFx Houdini support
5.045031
5.031442
1.002701
import nuke not_nuke_launch = ( "--hiero" in nuke.rawArgs or "--studio" in nuke.rawArgs or "--nukeassist" in nuke.rawArgs ) if not_nuke_launch: raise ImportError def threaded_wrapper(func, *args, **kwargs): return nuke.executeInMainThreadWithResult( func, args, kwargs) _common_setup("Nuke", threaded_wrapper, use_threaded_wrapper)
def _install_nuke(use_threaded_wrapper)
Helper function to The Foundry Nuke support
4.897795
4.829197
1.014205
import nuke if "--nukeassist" not in nuke.rawArgs: raise ImportError def threaded_wrapper(func, *args, **kwargs): return nuke.executeInMainThreadWithResult( func, args, kwargs) _common_setup("NukeAssist", threaded_wrapper, use_threaded_wrapper)
def _install_nukeassist(use_threaded_wrapper)
Helper function to The Foundry NukeAssist support
5.378176
5.159469
1.04239
import hiero import nuke if "--hiero" not in nuke.rawArgs: raise ImportError def threaded_wrapper(func, *args, **kwargs): return hiero.core.executeInMainThreadWithResult( func, args, kwargs) _common_setup("Hiero", threaded_wrapper, use_threaded_wrapper)
def _install_hiero(use_threaded_wrapper)
Helper function to The Foundry Hiero support
5.476368
5.291718
1.034894
import bpy qml_to_blender = queue.Queue() blender_to_qml = queue.Queue() def threaded_wrapper(func, *args, **kwargs): qml_to_blender.put((func, args, kwargs)) return blender_to_qml.get() class PyblishQMLOperator(bpy.types.Operator): bl_idname = "wm.pyblish_qml_timer" bl_label = "Pyblish QML Timer Operator" _timer = None def modal(self, context, event): if event.type == 'TIMER': try: func, args, kwargs = qml_to_blender.get_nowait() except queue.Empty: pass else: result = func(*args, **kwargs) blender_to_qml.put(result) return {'PASS_THROUGH'} def execute(self, context): wm = context.window_manager # Check the queue ever 10 ms # The cost of checking the queue is neglible, but it # does mean having Python execute a command all the time, # even as the artist is working normally and is nowhere # near publishing anything. self._timer = wm.event_timer_add(0.01, context.window) wm.modal_handler_add(self) return {'RUNNING_MODAL'} def cancel(self, context): wm = context.window_manager wm.event_timer_remove(self._timer) log.info("Registering Blender + Pyblish operator") bpy.utils.register_class(PyblishQMLOperator) # Start the timer bpy.ops.wm.pyblish_qml_timer() # Expose externally, for debugging. It enables you to # pause the timer, and add/remove commands by hand. _state["QmlToBlenderQueue"] = qml_to_blender _state["BlenderToQmlQueue"] = blender_to_qml _common_setup("Blender", threaded_wrapper, use_threaded_wrapper)
def _install_blender(use_threaded_wrapper)
Blender is a special snowflake It doesn't have a mechanism with which to call commands from a thread other than the main thread. So what's happening below is we run a polling command every 10 milliseconds to see whether QML has any tasks for us. If it does, then Blender runs this command (blocking while it does it), and passes the result back to QML when ready. The consequence of this is that we're polling even though nothing is expected to arrive. The cost of polling is expected to be neglible, but it's worth keeping in mind and ideally optimise away. E.g. only poll when the QML window is actually open.
3.276132
3.163966
1.035451
print("Installing..") if self._state["installed"]: return if self.is_headless(): log.info("Headless host") return print("aboutToQuit..") self.app.aboutToQuit.connect(self._on_application_quit) if host == "Maya": print("Maya host..") window = { widget.objectName(): widget for widget in self.app.topLevelWidgets() }["MayaWindow"] else: window = self.find_window() # Install event filter print("event filter..") event_filter = self.EventFilter(window) window.installEventFilter(event_filter) for signal in SIGNALS_TO_REMOVE_EVENT_FILTER: pyblish.api.register_callback(signal, self.uninstall) log.info("Installed event filter") self.window = window self._state["installed"] = True self._state["eventFilter"] = event_filter
def install(self, host)
Setup common to all Qt-based hosts
4.947663
4.777631
1.035589
window = self.app.activeWindow() while True: parent_window = window.parent() if parent_window: window = parent_window else: break return window
def find_window(self)
Get top window in host
3.588146
3.258023
1.101326
b = get_app().current_buffer before_cursor = b.document.current_line_before_cursor return bool(b.text and (not before_cursor or before_cursor.isspace()))
def tab_should_insert_whitespace()
When the 'tab' key is pressed with only whitespace character before the cursor, do autocompletion. Otherwise, insert indentation. Except for the first character at the first line. Then always do a completion. It doesn't make sense to start the first line with indentation.
5.911696
5.894743
1.002876
bindings = KeyBindings() sidebar_visible = Condition(lambda: python_input.show_sidebar) handle = bindings.add @handle('c-l') def _(event): event.app.renderer.clear() @handle('c-z') def _(event): if python_input.enable_system_bindings: event.app.suspend_to_background() @handle('f2') def _(event): python_input.show_sidebar = not python_input.show_sidebar if python_input.show_sidebar: event.app.layout.focus(python_input.ptpython_layout.sidebar) else: event.app.layout.focus_last() @handle('f3') def _(event): python_input.enter_history() @handle('f4') def _(event): python_input.vi_mode = not python_input.vi_mode @handle('f6') def _(event): python_input.paste_mode = not python_input.paste_mode @handle('tab', filter= ~sidebar_visible & ~has_selection & tab_should_insert_whitespace) def _(event): event.app.current_buffer.insert_text(' ') @Condition def is_multiline(): return document_is_multiline_python(python_input.default_buffer.document) @handle('enter', filter= ~sidebar_visible & ~has_selection & (vi_insert_mode | emacs_insert_mode) & has_focus(DEFAULT_BUFFER) & ~is_multiline) @handle(Keys.Escape, Keys.Enter, filter= ~sidebar_visible & emacs_mode) def _(event): b = event.current_buffer if b.validate(): # When the cursor is at the end, and we have an empty line: # drop the empty lines, but return the value. b.document = Document( text=b.text.rstrip(), cursor_position=len(b.text.rstrip())) b.validate_and_handle() @handle('enter', filter= ~sidebar_visible & ~has_selection & (vi_insert_mode | emacs_insert_mode) & has_focus(DEFAULT_BUFFER) & is_multiline) def _(event): b = event.current_buffer empty_lines_required = python_input.accept_input_on_enter or 10000 def at_the_end(b): text = b.document.text_after_cursor return text == '' or (text.isspace() and not '\n' in text) if python_input.paste_mode: # In paste mode, always insert text. b.insert_text('\n') elif at_the_end(b) and b.document.text.replace(' ', '').endswith( '\n' * (empty_lines_required - 1)): # When the cursor is at the end, and we have an empty line: # drop the empty lines, but return the value. if b.validate(): b.document = Document( text=b.text.rstrip(), cursor_position=len(b.text.rstrip())) b.validate_and_handle() else: auto_newline(b) @handle('c-d', filter=~sidebar_visible & has_focus(python_input.default_buffer) & Condition(lambda: # The current buffer is empty. not get_app().current_buffer.text)) def _(event): if python_input.confirm_exit: python_input.show_exit_confirmation = True else: event.app.exit(exception=EOFError) @handle('c-c', filter=has_focus(python_input.default_buffer)) def _(event): " Abort when Control-C has been pressed. " event.app.exit(exception=KeyboardInterrupt, style='class:aborting') return bindings
def load_python_bindings(python_input)
Custom key bindings.
3.03201
3.01216
1.00659
bindings = KeyBindings() handle = bindings.add sidebar_visible = Condition(lambda: python_input.show_sidebar) @handle('up', filter=sidebar_visible) @handle('c-p', filter=sidebar_visible) @handle('k', filter=sidebar_visible) def _(event): " Go to previous option. " python_input.selected_option_index = ( (python_input.selected_option_index - 1) % python_input.option_count) @handle('down', filter=sidebar_visible) @handle('c-n', filter=sidebar_visible) @handle('j', filter=sidebar_visible) def _(event): " Go to next option. " python_input.selected_option_index = ( (python_input.selected_option_index + 1) % python_input.option_count) @handle('right', filter=sidebar_visible) @handle('l', filter=sidebar_visible) @handle(' ', filter=sidebar_visible) def _(event): " Select next value for current option. " option = python_input.selected_option option.activate_next() @handle('left', filter=sidebar_visible) @handle('h', filter=sidebar_visible) def _(event): " Select previous value for current option. " option = python_input.selected_option option.activate_previous() @handle('c-c', filter=sidebar_visible) @handle('c-d', filter=sidebar_visible) @handle('c-d', filter=sidebar_visible) @handle('enter', filter=sidebar_visible) @handle('escape', filter=sidebar_visible) def _(event): " Hide sidebar. " python_input.show_sidebar = False event.app.layout.focus_last() return bindings
def load_sidebar_bindings(python_input)
Load bindings for the navigation in the sidebar.
1.774729
1.781174
0.996382
bindings = KeyBindings() handle = bindings.add confirmation_visible = Condition(lambda: python_input.show_exit_confirmation) @handle('y', filter=confirmation_visible) @handle('Y', filter=confirmation_visible) @handle('enter', filter=confirmation_visible) @handle('c-d', filter=confirmation_visible) def _(event): event.app.exit(exception=EOFError, style='class:exiting') @handle(Keys.Any, filter=confirmation_visible) def _(event): python_input.show_exit_confirmation = False return bindings
def load_confirm_exit_bindings(python_input)
Handle yes/no key presses when the exit confirmation is shown.
3.182606
2.960234
1.07512
r insert_text = buffer.insert_text if buffer.document.current_line_after_cursor: # When we are in the middle of a line. Always insert a newline. insert_text('\n') else: # Go to new line, but also add indentation. current_line = buffer.document.current_line_before_cursor.rstrip() insert_text('\n') # Unident if the last line ends with 'pass', remove four spaces. unindent = current_line.rstrip().endswith(' pass') # Copy whitespace from current line current_line2 = current_line[4:] if unindent else current_line for c in current_line2: if c.isspace(): insert_text(c) else: break # If the last line ends with a colon, add four extra spaces. if current_line[-1:] == ':': for x in range(4): insert_text(' ')
def auto_newline(buffer)
r""" Insert \n at the cursor position. Also add necessary padding.
4.407835
4.188818
1.052286
# We make this lazy, because it delays startup time a little bit. # This way, the grammar is build during the first completion. if self._path_completer_grammar_cache is None: self._path_completer_grammar_cache = self._create_path_completer_grammar() return self._path_completer_grammar_cache
def _path_completer_grammar(self)
Return the grammar for matching paths inside strings inside Python code.
4.274474
3.874425
1.103254
# Do Path completions if complete_event.completion_requested or self._complete_path_while_typing(document): for c in self._path_completer.get_completions(document, complete_event): yield c # If we are inside a string, Don't do Jedi completion. if self._path_completer_grammar.match(document.text_before_cursor): return # Do Jedi Python completions. if complete_event.completion_requested or self._complete_python_while_typing(document): script = get_jedi_script_from_document(document, self.get_locals(), self.get_globals()) if script: try: completions = script.completions() except TypeError: # Issue #9: bad syntax causes completions() to fail in jedi. # https://github.com/jonathanslenders/python-prompt-toolkit/issues/9 pass except UnicodeDecodeError: # Issue #43: UnicodeDecodeError on OpenBSD # https://github.com/jonathanslenders/python-prompt-toolkit/issues/43 pass except AttributeError: # Jedi issue #513: https://github.com/davidhalter/jedi/issues/513 pass except ValueError: # Jedi issue: "ValueError: invalid \x escape" pass except KeyError: # Jedi issue: "KeyError: u'a_lambda'." # https://github.com/jonathanslenders/ptpython/issues/89 pass except IOError: # Jedi issue: "IOError: No such file or directory." # https://github.com/jonathanslenders/ptpython/issues/71 pass except AssertionError: # In jedi.parser.__init__.py: 227, in remove_last_newline, # the assertion "newline.value.endswith('\n')" can fail. pass except SystemError: # In jedi.api.helpers.py: 144, in get_stack_at_position # raise SystemError("This really shouldn't happen. There's a bug in Jedi.") pass except NotImplementedError: # See: https://github.com/jonathanslenders/ptpython/issues/223 pass except Exception: # Supress all other Jedi exceptions. pass else: for c in completions: yield Completion(c.name_with_symbols, len(c.complete) - len(c.name_with_symbols), display=c.name_with_symbols)
def get_completions(self, document, complete_event)
Get Python completions.
3.651045
3.613152
1.010487
# Get the current TK application. import _tkinter # Keep this imports inline! from six.moves import tkinter root = tkinter._default_root def wait_using_filehandler(): # Add a handler that sets the stop flag when `prompt-toolkit` has input # to process. stop = [False] def done(*a): stop[0] = True root.createfilehandler(inputhook_context.fileno(), _tkinter.READABLE, done) # Run the TK event loop as long as we don't receive input. while root.dooneevent(_tkinter.ALL_EVENTS): if stop[0]: break root.deletefilehandler(inputhook_context.fileno()) def wait_using_polling(): while not inputhook_context.input_is_ready(): while root.dooneevent(_tkinter.ALL_EVENTS | _tkinter.DONT_WAIT): pass # Sleep to make the CPU idle, but not too long, so that the UI # stays responsive. time.sleep(.01) if root is not None: if hasattr(root, 'createfilehandler'): wait_using_filehandler() else: wait_using_polling()
def _inputhook_tk(inputhook_context)
Inputhook for Tk. Run the Tk eventloop until prompt-toolkit needs to process the next input.
5.173679
4.860495
1.064435
assert isinstance(repl, PythonInput) assert isinstance(config_file, six.text_type) # Expand tildes. config_file = os.path.expanduser(config_file) def enter_to_continue(): six.moves.input('\nPress ENTER to continue...') # Check whether this file exists. if not os.path.exists(config_file): print('Impossible to read %r' % config_file) enter_to_continue() return # Run the config file in an empty namespace. try: namespace = {} with open(config_file, 'rb') as f: code = compile(f.read(), config_file, 'exec') six.exec_(code, namespace, namespace) # Now we should have a 'configure' method in this namespace. We call this # method with the repl as an argument. if 'configure' in namespace: namespace['configure'](repl) except Exception: traceback.print_exc() enter_to_continue()
def run_config(repl, config_file='~/.ptpython/config.py')
Execute REPL config file. :param repl: `PythonInput` instance. :param config_file: Path of the configuration file.
3.284563
3.149126
1.043008
assert configure is None or callable(configure) # Default globals/locals if globals is None: globals = { '__name__': '__main__', '__package__': None, '__doc__': None, '__builtins__': six.moves.builtins, } locals = locals or globals def get_globals(): return globals def get_locals(): return locals # Create eventloop. if return_asyncio_coroutine: use_asyncio_event_loop() # Create REPL. repl = PythonRepl(get_globals=get_globals, get_locals=get_locals, vi_mode=vi_mode, history_filename=history_filename, startup_paths=startup_paths) if title: repl.terminal_title = title if configure: configure(repl) app = repl.app # Start repl. patch_context = patch_stdout_context() if patch_stdout else DummyContext() if return_asyncio_coroutine: # XXX def coroutine(): with patch_context: while True: iterator = iter(app.run_async().to_asyncio_future()) try: while True: yield next(iterator) except StopIteration as exc: text = exc.args[0] repl._process_text(text) return coroutine() else: with patch_context: repl.run()
def embed(globals=None, locals=None, configure=None, vi_mode=False, history_filename=None, title=None, startup_paths=None, patch_stdout=False, return_asyncio_coroutine=False)
Call this to embed Python shell at the current point in your program. It's similar to `IPython.embed` and `bpython.embed`. :: from prompt_toolkit.contrib.repl import embed embed(globals(), locals()) :param vi_mode: Boolean. Use Vi instead of Emacs key bindings. :param configure: Callable that will be called with the `PythonRepl` as a first argument, to trigger configuration. :param title: Title to be displayed in the terminal titlebar. (None or string.)
3.204487
3.340601
0.959254
" Start the Read-Eval-Print Loop. " if self._startup_paths: for path in self._startup_paths: if os.path.exists(path): with open(path, 'rb') as f: code = compile(f.read(), path, 'exec') six.exec_(code, self.get_globals(), self.get_locals()) else: output = self.app.output output.write('WARNING | File not found: {}\n\n'.format(path))
def _load_start_paths(self)
Start the Read-Eval-Print Loop.
3.938957
3.19419
1.233163