message
stringlengths
13
484
diff
stringlengths
38
4.63k
Remove greenlet useless requirement This patch cleans up the requirements.txt list to remove greenlet module not used anymore.
@@ -10,7 +10,6 @@ Routes!=2.0,!=2.3.0,>=1.12.3;python_version!='2.7' # MIT debtcollector>=1.2.0 # Apache-2.0 eventlet!=0.18.3,>=0.18.2 # MIT pecan!=1.0.2,!=1.0.3,!=1.0.4,!=1.2,>=1.0.0 # BSD -greenlet>=0.3.2 # MIT httplib2>=0.7.5 # MIT requests!=2.12.2,>=2.10.0 # Apache-2.0 Jinja2>=2.8 # BSD License (3 clause)
Update mtsac_metaworld_mt50.py update the max_episode length of this example.
@@ -84,7 +84,7 @@ def mtsac_metaworld_mt50(ctxt=None, seed=1, use_gpu=False, _gpu=0): qf1=qf1, qf2=qf2, gradient_steps_per_itr=150, - max_episode_length=250, + max_episode_length=150, eval_env=mt50_test_envs, env_spec=mt50_train_envs.spec, num_tasks=10,
Fix docstring on expectation Column list can be in any order and does not need to match exactly when exact_match keyword argument set.
@@ -23,7 +23,7 @@ from great_expectations.rule_based_profiler.types import ( class ExpectTableColumnsToMatchSet(TableExpectation): - """Expect the columns to exactly match an *unordered* set. + """Expect the columns to match an *unordered* set. expect_table_columns_to_match_set is a :func:`expectation \ <great_expectations.validator.validator.Validator.expectation>`, not a @@ -31,7 +31,7 @@ class ExpectTableColumnsToMatchSet(TableExpectation): Args: column_set (list of str): \ - The column names, in the correct order. + The column names, in any order. exact_match (boolean): \ Whether the list of columns must exactly match the observed columns.
request details: show preview if user own request record tab was being hidden if the request was closed, for everyone. Now keeps it visible if is your own request, otherwise still hidden.
@@ -26,7 +26,10 @@ from sqlalchemy.orm.exc import NoResultFound def _resolve_topic_draft(request): """Resolve the record in the topic when it is a draft.""" - if request["is_closed"]: + user_owns_request = \ + str(request["expanded"]["created_by"]["id"]) == str(current_user.id) + + if request["is_closed"] and not user_owns_request: return dict(permissions={}, record_ui=None) recid = ResolverRegistry.resolve_entity_proxy(
Update INSTALL closes
@@ -124,7 +124,7 @@ like to contribute an installation script, we would welcome it!) Whichever version was installed last will be the default for `mn`. As long as Mininet is installed for the appropriate version of - Python, you can run it using that versinon of Python: + Python, you can run it using that version of Python: python3 `which mn` python2 `which mn`
Corrected typo Changed "acitive" to "active".
@@ -305,7 +305,7 @@ devices under a variety of operating systems. ## Third-Party Hosts Managers -* [Unified Hosts AutoUpdate](https://github.com/ScriptTiger/Unified-Hosts-AutoUpdate "Unified Hosts AutoUpdate") (for Windows): The Unified Hosts AutUpdate package is purpose-built for this unified hosts project as well as in active development by community members. It's sophisticated enough to allow any novice the ability to install and uninstall the blacklist of their choosing to their local hosts file and keep it automatically up to date, while also being minimal enough to be able to be easily placed in a shared network location and deployed across an organization via group policies. And since it is in acitive development by community members, your bug reports, feature requests, and other feedback are most welcome. +* [Unified Hosts AutoUpdate](https://github.com/ScriptTiger/Unified-Hosts-AutoUpdate "Unified Hosts AutoUpdate") (for Windows): The Unified Hosts AutUpdate package is purpose-built for this unified hosts project as well as in active development by community members. It's sophisticated enough to allow any novice the ability to install and uninstall the blacklist of their choosing to their local hosts file and keep it automatically up to date, while also being minimal enough to be able to be easily placed in a shared network location and deployed across an organization via group policies. And since it is in active development by community members, your bug reports, feature requests, and other feedback are most welcome. ## Interesting Applications
Testing: Fix building centos7-py37 The location of source RPMs was moved.
@@ -74,7 +74,7 @@ RUN if [ "$PYTHON" == "2.7" ] ; then \ usermod -G mock -a mockbuild && \ rpmdev-setuptree && \ echo -e '\n%_buildshell /bin/bash\n%python3_pkgversion 37\n' >> ~/.rpmmacros && \ - curl -sSL https://download-ib01.fedoraproject.org/pub/epel/7/SRPMS/Packages/b/boost-python3-1.53.0-30.el7.src.rpm > boost-python3-1.53.0-30.el7.src.rpm && \ + curl -sSL https://download-ib01.fedoraproject.org/pub/epel/7/source/tree/Packages/b/boost-python3-1.53.0-30.el7.src.rpm > boost-python3-1.53.0-30.el7.src.rpm && \ rpm -i boost-python3-1.53.0-30.el7.src.rpm && \ rm -f boost159-1.59.0-3.el7ost.src.rpm && \ curl -sSL https://github.com/boostorg/python/commit/660487c43fde76f3e64f1cb2e644500da92fe582.patch > ~/rpmbuild/SOURCES/boost-python37.patch && \
Fixed custom ICDS UCR expression. If you have a form in the case history that doesn't have a case block, this will fail with StopIteration.
@@ -3,7 +3,6 @@ from datetime import datetime from jsonobject.base_properties import DefaultProperty from six.moves import filter -from six.moves import map from casexml.apps.case.xform import extract_case_blocks from corehq.apps.receiverwrapper.util import get_version_from_appversion_text @@ -95,9 +94,10 @@ class GetCaseHistorySpec(JsonObject): for f in forms: case_blocks = extract_case_blocks(f) if case_blocks: - case_history.append( - next(case_block for case_block in case_blocks - if case_block['@case_id'] == case_id)) + for case_block in case_blocks: + if case_block['@case_id'] == case_id: + case_history.append(case_block) + context.set_cache_value(cache_key, case_history) return case_history @@ -105,7 +105,6 @@ class GetCaseHistorySpec(JsonObject): return "case_history(\n{cases}\n)".format(cases=add_tabbed_text(str(self._case_forms_expression))) - class GetCaseHistoryByDateSpec(JsonObject): type = TypeProperty('icds_get_case_history_by_date') case_id_expression = DefaultProperty(required=False)
Launch 4 experiments 1. Re-run the latest 3 experiments (`2022-12-05-rerun`, `2022-12-05-aflpp-cmplog`, and `2022-12-01-um`) and suffix their name with `-c`. 2. Run core fuzzers on the 5 new benchmarks
# You can run "make presubmit" to do basic validation on this file. # Please add new experiment requests towards the top of this file. # + + +- experiment: 2022-12-15-bug-based + description: "Test running core fuzzers on 5 new bug-based benchmarks." + fuzzers: + - afl + - aflfast + - aflplusplus + - aflsmart + - centipede + - eclipser + - fairfuzz + - honggfuzz + - libafl + - libfuzzer + - mopt + type: bug + benchmarks: + - arrow_parquet-arrow-fuzz + - aspell_aspell_fuzzer + - ffmpeg_ffmpeg_demuxer_fuzzer + - file_magic_fuzzer + - grok_grk_decompress_fuzzer + +- experiment: 2022-12-05-rerun-c + description: "Wingfuzz coverage experiment (compare against core fuzzers)" + fuzzers: + - afl + - aflfast + - aflplusplus + - aflsmart + - entropic + - eclipser + - fairfuzz + - honggfuzz + - lafintel + - libfuzzer + - mopt + - wingfuzz + +- experiment: 2022-12-05-rerun-c + description: "Wingfuzz coverage experiment (compare against core fuzzers)" + fuzzers: + - afl + - aflfast + - aflplusplus + - aflsmart + - entropic + - eclipser + - fairfuzz + - honggfuzz + - lafintel + - libfuzzer + - mopt + - wingfuzz + +- experiment: 2022-12-05-aflpp-cmplog-c + description: "afl++ cmplog enhancements" + fuzzers: + - aflplusplus_cmplog + - aflplusplus_cmplog_r + +- experiment: 2022-12-01-um-c + description: "Try out um prioritize 75 and random again" + fuzzers: + - aflplusplus_um_random + - aflplusplus_um_prioritize_75 + - experiment: 2022-12-05-rerun-b description: "Wingfuzz coverage experiment (compare against core fuzzers)" fuzzers:
Suggestion: VIM command I do not use YAPF so often, so I do not want a mapping. But calling the function is unhandy, too. Therefore I created a VIM command `:YAPF` that can take a range (see comments in code for how to use it).
" map <C-P> :call yapf#YAPF()<cr> " imap <C-P> <c-o>:call yapf#YAPF()<cr> " +" Alternatively, you can call the command YAPF. If you omit the range, +" it will reformat the whole buffer. +" +" example: +" :YAPF " formats whole buffer +" :'<,'>YAPF " formats lines selected in visual mode +" function! yapf#YAPF() range " Determine range to format. let l:line_ranges = a:firstline . '-' . a:lastline @@ -35,3 +42,5 @@ function! yapf#YAPF() range " Reset cursor to first line of the formatted range. call cursor(a:firstline, 1) endfunction + +command! -range=% YAPF <line1>,<line2>call yapf#YAPF()
cabana: fixed the column selector is always hidden if settings.chart_column_count is 1 fix column selector
@@ -229,7 +229,7 @@ void ChartsWidget::setColumnCount(int n) { } void ChartsWidget::updateLayout() { - int n = column_count; + int n = columns_cb->count(); for (; n > 1; --n) { if ((n * CHART_MIN_WIDTH + (n - 1) * charts_layout->spacing()) < charts_layout->geometry().width()) break; } @@ -238,6 +238,7 @@ void ChartsWidget::updateLayout() { columns_lb_action->setVisible(show_column_cb); columns_cb_action->setVisible(show_column_cb); + n = std::min(column_count, n); for (int i = 0; i < charts.size(); ++i) { charts_layout->addWidget(charts[charts.size() - i - 1], i / n, i % n); }
Use python logging instead of using print See:
"""Utils related to inline skipping of rules.""" from itertools import product +import logging import ruamel.yaml INLINE_SKIP_FLAG = '# noqa ' +_logger = logging.getLogger(__name__) + def get_rule_skips_from_line(line): """Return list of rule ids skipped via comment on the line of yaml.""" @@ -53,7 +56,7 @@ def append_skipped_rules(pyyaml_data, file_text, file_type): yaml_skip = _append_skipped_rules(pyyaml_data, file_text, file_type) except RuntimeError as exc: # Notify user of skip error, do not stop, do not change exit code - print('Error trying to append skipped rules: {!r}'.format(exc)) + _logger.error('Error trying to append skipped rules: %s', exc) return pyyaml_data return yaml_skip
pe: better handle invalid export filename if cannot be parsed, return None as documented. decode as ASCII, as documented by Microsoft. closes
@@ -454,7 +454,13 @@ class PE(object): ''' if self.IMAGE_EXPORT_DIRECTORY is not None: rawname = self.readAtRva(self.IMAGE_EXPORT_DIRECTORY.Name, 32) - return rawname.split(b'\x00')[0].decode('utf-8') + if not rawname: + return None + + try: + return rawname.partition(b'\x00')[0].decode('ascii') + except UnicodeDecodeError: + return None return None def getImports(self):
Fix typo: list_size, not list_length Tested-by: Mark Nunberg
@@ -2090,7 +2090,7 @@ class Bucket(_Base): :return: The length of the queue :raise: :cb_exc:`NotFoundError` if the queue does not exist. """ - return self.list_length(key) + return self.list_size(key) def get_attribute(self, key, attrname):
Update generic.txt Moved to ```trickbot```:
@@ -11262,12 +11262,6 @@ dnsfordomains.ru piratesmoker.com -# Reference: https://twitter.com/ffforward/status/1328761489067536384 -# Reference: https://tria.ge/201117-8m75mhtc9x/static1 - -http://194.36.191.186 -info.businesssec.me - # Reference: https://twitter.com/wwp96/status/1328857237452972032 http://185.239.242.117
fix: multiple recorder ui fixes fix broken list view syntax error in html showing template strings in output. UX of toggling rows. Click anywhere on same row to toggle instead of a button.
<div class="grid-body"> <div class="rows"> <div class="grid-row" :class="showing == call.index ? 'grid-row-open' : ''" v-for="call in paginated(sorted(grouped(request.calls)))" :key="call.index"> - <div class="data-row row" v-if="showing != call.index" style="display: block;" @click="showing = call.index" > + <div class="data-row row" @click="showing = showing == call.index ? null : call.index" > <div class="row-index col col-xs-1"><span>{{ call.index }}</span></div> <div class="col grid-static-col col-xs-6" data-fieldtype="Code"> <div class="static-area"><span>{{ call.query }}</span></div> <div class="static-area ellipsis text-right">{{ call.exact_copies }}</div> </div> <div class="col col-xs-1"><a class="close btn-open-row"> - <span class="octicon octicon-triangle-down"></span></a> + <span class="octicon" :class="showing == call.index? 'octicon-triangle-up' : 'octicon-triangle-down'"></span></a> </div> </div> <div class="recorder-form-in-grid" v-if="showing == call.index"> <div class="grid-form-heading" @click="showing = null"> <div class="toolbar grid-header-toolbar"> <span class="panel-title">{{ __("SQL Query") }} #<span class="grid-form-row-index">{{ call.index }}</span></span> - <div class="btn btn-default btn-xs pull-right" style="margin-left: 7px;"> - <span class="hidden-xs octicon octicon-triangle-up"></span> - </div> </div> </div> <div class="grid-form-body"> </div> <div class="frappe-control"> <div class="form-group"> - <div class="clearfix"><label class="control-label"{{ __("Stack Trace") }}</label></div> + <div class="clearfix"><label class="control-label">{{ __("Stack Trace") }}</label></div> <div class="control-value like-disabled-input for-description" style="overflow:auto"> <table class="table table-striped"> <thead>
June 2018 Disco Pane content update. Fixes
@@ -16,15 +16,15 @@ from olympia.discovery.utils import replace_extensions # Represents a dummy version of `olympia.discovery.data` def get_dummy_addons(): return OrderedDict([ - (16349, addon_factory(id=16349, type=amo.ADDON_PERSONA, - description=u'16349')), - (9609, addon_factory(id=9609, type=amo.ADDON_EXTENSION)), - (5890, addon_factory(id=5890, type=amo.ADDON_EXTENSION)), - (46852, addon_factory(id=46852, type=amo.ADDON_PERSONA)), - (954390, addon_factory(id=954390, type=amo.ADDON_EXTENSION)), + (42019, addon_factory(id=42019, type=amo.ADDON_PERSONA, + description=u'42019')), + (506646, addon_factory(id=506646, type=amo.ADDON_EXTENSION)), + (850407, addon_factory(id=850407, type=amo.ADDON_EXTENSION)), + (553386, addon_factory(id=553386, type=amo.ADDON_PERSONA)), + (445852, addon_factory(id=445852, type=amo.ADDON_EXTENSION)), (93451, addon_factory(id=93451, type=amo.ADDON_EXTENSION)), - (963836, addon_factory(id=963836, type=amo.ADDON_PERSONA, - description=u'963836')), + (482976, addon_factory(id=482976, type=amo.ADDON_PERSONA, + description=u'482976')), # And now the china edition addons (492244, addon_factory(id=492244, type=amo.ADDON_PERSONA, description=u'492244')),
help_docs: Add 'About Zulip' to relative help links. Adds the 'About Zulip' gear menu option to the available relative link patterns used in help center documentation.
@@ -26,6 +26,7 @@ gear_info = { "plans": ["Plans and pricing", "/plans"], "billing": ["Billing", "/billing"], "invite": ["Invite users", "/#invite"], + "about-zulip": ["About Zulip", "/#about-zulip"], } gear_instructions = """
[meta] update backport config for 7.12 branch This commits update sqren/backport config to handle `7.12` branch. Also remove `7.10` and `7.11` branchs.
{ "upstream": "elastic/helm-charts", - "targetBranchChoices": ["6.8", "7.10", "7.11", "7.x"], + "targetBranchChoices": [ + "6.8", + "7.12", + "7.x" + ], "all": true, "prFilter": "label:need-backport", "targetPRLabels": ["backport"],
packer: Only pull in kernel image, not headers Pulling in linux-{{ variant }} will pull in both an image and headers, with the headers pulling in a compiler.
@@ -76,8 +76,8 @@ function upgrade_system { function configure_linux_kernel { if [ -n "${kernel_version}" ]; then - apt-get install -y \ - "linux-${kernel_version}" "linux-tools-${kernel_version}" + apt-get install -y "linux-image-${kernel_version}" \ + "linux-tools-${kernel_version}" fi # Disable IPv6
C API: remove a memory leak in the context constructor Now that the C wrapper to create a context uses the internal API, there is no automatic dec-ref anymore (from the public Analysis_Context controlled type) and thus no need to manually inc-ref it. TN:
@@ -100,17 +100,12 @@ package body ${ada_lib_name}.Implementation.C is (if Charset = Null_Ptr then ${string_repr(ctx.default_charset)} else Value (Charset)); - - Internal_Ctx : constant Internal_Context := Create_Context + begin + return Create_Context (Charset => C, Unit_Provider => Unwrap_Private_Provider (Unit_Provider), With_Trivia => With_Trivia /= 0, Tab_Stop => Natural (Tab_Stop)); - begin - -- Create a new ownership share for the result since the one Context - -- owns will disappear once we return. - Inc_Ref (Internal_Ctx); - return Internal_Ctx; end; exception when Exc : others =>
Simplify getter code for attributes and enumerations A simple getattr is faster than a getattr in a try/except block. Fixes
@@ -198,11 +198,7 @@ class attribute(umlproperty, Generic[T]): return f"<attribute {self.name}: {self.type} = {self.default}>" def _get(self, obj): - try: - v: str | int | None = getattr(obj, self._name) - return v - except AttributeError: - return self.default + return getattr(obj, self._name, self.default) def _set(self, obj, value): if ( @@ -257,10 +253,7 @@ class enumeration(umlproperty): return f"<enumeration {self.name}: {self.values} = {self.default}>" def _get(self, obj): - try: - return getattr(obj, self._name) - except AttributeError: - return self.default + return getattr(obj, self._name, self.default) def load(self, obj, value): if value not in self.values:
fix [web-forms]: fix add_new_row for child table add df.data to this in add_new_row when frm not available fixed get_data function to send df.data if frm not available modified toggle for empty state
@@ -58,7 +58,7 @@ export default class Grid { <div class="grid-heading-row"></div> <div class="grid-body"> <div class="rows"></div> - <div class="grid-empty text-center hide">${__("No Data")}</div> + <div class="grid-empty text-center">${__("No Data")}</div> </div> </div> <div class="mt-2 small form-clickable-section grid-footer"> @@ -243,7 +243,7 @@ export default class Grid { this.grid_rows_by_docname[d.name] = grid_row; } - this.wrapper.find(".grid-empty").toggleClass("hide", !!data.length); + this.wrapper.find(".grid-empty").toggleClass("hidden", Boolean(data.length)); // toolbar this.setup_toolbar(); @@ -363,7 +363,7 @@ export default class Grid { get_data() { var data = this.frm ? this.frm.doc[this.df.fieldname] || [] - : this.get_modal_data(); + : this.df.data || this.get_modal_data(); data.sort(function(a, b) { return a.idx - b.idx}); return data; } @@ -479,6 +479,9 @@ export default class Grid { this.frm.script_manager.trigger(this.df.fieldname + "_add", d.doctype, d.name); this.refresh(); } else { + if (!this.df.data) { + this.df.data = []; + } this.df.data.push({name: "batch " + (this.df.data.length+1), idx: this.df.data.length+1}); this.refresh(); }
upd: Now Scheduler logger is set similarly as Task's logger.
@@ -62,11 +62,13 @@ class Scheduler: """ - _logger_basename = __name__ - logger = logging.getLogger(_logger_basename) + _logger_basename = "atlas.scheduler" parameters = GLOBAL_PARAMETERS # interfacing the global parameters. TODO: Support for multiple schedulers - def __init__(self, tasks, maintainer_tasks=None, shut_condition=None, min_sleep=0.1, max_sleep=600, parameters=None, name=None): + def __init__(self, tasks, maintainer_tasks=None, + shut_condition=None, + min_sleep=0.1, max_sleep=600, + parameters=None, logger=None, name=None): """[summary] Arguments: @@ -85,12 +87,13 @@ class Scheduler: self.min_sleep = min_sleep self.max_sleep = max_sleep - self.task_returns = Parameters() + self.task_returns = Parameters() # TODO if parameters is not None: self.parameters.update(parameters) self.name = name if name is not None else id(self) self._register_instance() + self.logger = logger def _register_instance(self): if self.name in _SCHEDULERS: @@ -626,3 +629,20 @@ class MultiScheduler(Scheduler): self.handle_return() else: self.terminate_all(reason="shutdown") + +# Logging + @property + def logger(self): + return self._logger + + @logger.setter + def logger(self, logger): + if logger is None: + # Get class logger (default logger) + logger = logging.getLogger(self._logger_basename) + + if not logger.name.startswith(self._logger_basename): + raise ValueError(f"Logger name must start with '{self._logger_basename}' as session finds loggers with names") + + # TODO: Use TaskAdapter to relay the scheduler name? + self._logger = logger \ No newline at end of file
simplified doc dict creation .update() is purely for partial explicit updates
@@ -365,13 +365,9 @@ class DocType(ObjectBase): # update given fields locally merge(self._d_, fields) - # prepare data for ES - doc = self.to_dict() - # if fields were given: partial update - if fields: doc = dict( - (k, doc.get(k)) + (k, self.to_dict()) for k in fields.keys() )
Test that error using compound models is working I added a test function to check that error for using compounds models (Added in works.
@@ -185,6 +185,22 @@ class TestJointFitter: class TestLinearLSQFitter: + def test_compound_model_raises_error(self): + """Test that if an user tries to use a compound model, raises an error""" + + with pytest.raises(ValueError) as excinfo: + init_model1 = models.Polynomial1D(degree=2, c0=[1, 1], n_models=2) + init_model2 = models.Polynomial1D(degree=2, c0=[1, 1], n_models=2) + init_model_comp = init_model1 + init_model2 + + x = np.arange(10) + y_expected = init_model_comp(x, model_set_axis=False) + with NumpyRNGContext(_RANDOM_SEED): + y = y_expected + np.random.normal(0, 0.01, size=y_expected.shape) + fitter = LinearLSQFitter() + fitted_model = fitter(init_model_comp, x, y) + assert "Model must be simple, not compound" in str(excinfo.value) + def test_chebyshev1D(self): """Tests fitting a 1D Chebyshev polynomial to some real world data."""
Update README.md I correct your grammatical mistakes
-## `trax`: Train Neural Nets with JAX +## `Trax`: Train Neural Nets with JAX ![train tracks](https://images.pexels.com/photos/461772/pexels-photo-461772.jpeg?dl&fit=crop&crop=entropy&w=640&h=426) -### `trax`: T2T Radically Simpler with JAX +### `Trax`: T2T Radically Simpler with JAX *Why?* Because T2T has gotten too complex. We are simplifying the main code too, but we wanted to try a more radical step. So you can write code as in pure @@ -53,7 +53,7 @@ python -m trax.trainer \ --config_file=$PWD/trax/configs/transformer_lm1b_8gb.gin ``` -### How `trax` differs from T2T +### How `Trax` differs from T2T * Configuration is done with [`gin`](https://github.com/google/gin-config). `trainer.py` takes `--config_file` as well as `--config` for file overrides.
Fix use custom fields in web HG-- branch : feature/microservices
@@ -15,6 +15,7 @@ import tornado.wsgi import django.core.handlers.wsgi ## NOC modules from noc.core.service.base import Service +from noc.main.models.customfield import CustomField class WebService(Service): @@ -42,6 +43,8 @@ class WebService(Service): from noc.lib.app.site import site site.service = self site.autodiscover() + # Install Custom fields + CustomField.install_fields() class NOCWSGIContainer(tornado.wsgi.WSGIContainer):
fix: disabled dashboard chart form fixes don't allow filters to be edited don't show empty sections
@@ -21,8 +21,10 @@ frappe.ui.form.on('Dashboard Chart', { refresh: function(frm) { frm.chart_filters = null; + frm.is_disabled = !frappe.boot.developer_mode && frm.doc.is_standard; - if (!frappe.boot.developer_mode && frm.doc.is_standard) { + if (!frm.is_disabled) { + !frm.doc.custom_options && frm.set_df_property('chart_options_section', 'hidden', 1); frm.disable_form(); } @@ -333,6 +335,7 @@ frappe.ui.form.on('Dashboard Chart', { } table.on('click', () => { + frm.is_disabled && frappe.throw(__('Cannot edit filters for standard charts')); let dialog = new frappe.ui.Dialog({ title: __('Set Filters'),
Generate the needed configuration files for devstack Generate the needed configuration files for devstack. Closes-Bug:
@@ -95,8 +95,9 @@ function configure_cloudkitty { sudo mkdir -m 755 -p $CLOUDKITTY_API_LOG_DIR sudo chown $STACK_USER $CLOUDKITTY_API_LOG_DIR + touch $CLOUDKITTY_CONF + cp $CLOUDKITTY_DIR$CLOUDKITTY_CONF_DIR/policy.json $CLOUDKITTY_CONF_DIR - cp $CLOUDKITTY_DIR$CLOUDKITTY_CONF.sample $CLOUDKITTY_CONF cp $CLOUDKITTY_DIR$CLOUDKITTY_CONF_DIR/api_paste.ini $CLOUDKITTY_CONF_DIR iniset_rpc_backend cloudkitty $CLOUDKITTY_CONF DEFAULT
fix(crypto_js_spot): fix crypto_js_spot interface fix crypto_js_spot interface
@@ -10,8 +10,6 @@ from datetime import datetime import pandas as pd import requests -from akshare.economic.cons import bitcoin_url, bitcoin_payload, bitcoin_headers - def crypto_js_spot() -> pd.DataFrame: """ @@ -19,10 +17,28 @@ def crypto_js_spot() -> pd.DataFrame: https://datacenter.jin10.com/reportType/dc_bitcoin_current :return: pandas.DataFrame """ - bit_payload = bitcoin_payload.copy() - bit_payload.update({"_": int(time.time() * 1000)}) - bit_payload.update({"_": int(time.time() * 1000)}) - r = requests.get(bitcoin_url, params=bit_payload, headers=bitcoin_headers) + url = "https://datacenter-api.jin10.com/crypto_currency/list" + params = { + "_": int(time.time() * 1000), + } + headers = { + "accept": "application/json, text/javascript, */*; q=0.01", + 'accept-encoding': 'gzip, deflate, br', + 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8', + 'origin': 'https://datacenter.jin10.com', + 'referer': 'https://datacenter.jin10.com/', + 'sec-ch-ua': '"Google Chrome";v="105", "Not)A;Brand";v="8", "Chromium";v="105"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"macOS"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'same-site', + 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36', + 'x-app-id': 'rU6QIu7JHe2gOUeR', + 'x-version': '1.0.0' + } + r = requests.get(url, params=params, headers=headers) + r.encoding = 'utf-8' data_json = r.json() data_df = pd.DataFrame(data_json["data"]) data_df["reported_at"] = pd.to_datetime(data_df["reported_at"])
script/find_max_server: fix the import of `ClientSession` This is nothing special, it just allows the module to run without throwing an error on the import. From ``` from lbry.wallet.client.basenetwork import ClientSession ``` To ``` from lbry.wallet.network import ClientSession ```
@@ -2,7 +2,7 @@ import time import asyncio import random from argparse import ArgumentParser -from lbry.wallet.client.basenetwork import ClientSession +from lbry.wallet.network import ClientSession class AgentSmith(ClientSession):
Tests: Fixups for standalone tests * Passlib outputs import warnings, ignore them. * No need to check against Python 3.2 anymore, not supported by Nuitka anyway. * Fix expression execution, use actual expression value, use the Python version under test, not the one executing the runner.
@@ -277,12 +277,16 @@ def checkRequirements(filename): expression = line[33:] with open(os.devnull, "w") as devnull: result = subprocess.call( - (sys.executable, "-c" "import sys, os; %s" % expression), + ( + os.environ["PYTHON"], + "-c", + "import sys, os; sys.exit(not bool(%s))" % expression, + ), stdout=devnull, stderr=subprocess.STDOUT, ) if not result == 0: - return (False, expression + "evaluated to false") + return (False, "Expression '%s' evaluated to false" % expression) elif line[21:30] == "imports: ": imports_needed = line[30:].rstrip().split(",") @@ -368,12 +372,12 @@ for filename in sorted(os.listdir(".")): elif filename == "GtkUsing.py": # Don't test on platforms not supported by current Debian testing, and # which should be considered irrelevant by now. - if python_version.startswith("2.6") or python_version.startswith("3.2"): + if python_version.startswith("2.6"): reportSkip("irrelevant Python version", ".", filename) continue # For the warnings. - extra_flags.append("ignore_stderr") + extra_flags.append("ignore_warnings") elif filename.startswith("Win"): if os.name != "nt": @@ -389,7 +393,7 @@ for filename in sorted(os.listdir(".")): elif filename == "FlaskUsing.py": # For the warnings. - extra_flags.append("ignore_stderr") + extra_flags.append("ignore_warnings") elif filename == "NumpyUsing.py": # TODO: Disabled for now. @@ -403,7 +407,11 @@ for filename in sorted(os.listdir(".")): elif filename == "OpenGLUsing.py": # For the warnings. - extra_flags.append("ignore_stderr") + extra_flags.append("ignore_warnings") + + elif filename == "PasslibUsing.py": + # For the warnings. + extra_flags.append("ignore_warnings") my_print("Consider output of recursively compiled program:", filename)
mypy: anotate stack variable Mypy required definition for this variable Also update function annotation to correct return type, to eb compatible with annotation of stack variable
@@ -17,7 +17,7 @@ import requests from requests.exceptions import SSLError, HTTPError, RetryError import shutil import tempfile -from typing import Iterator, Sequence, Dict, Union, List, BinaryIO, Tuple, Optional +from typing import Any, Iterator, Sequence, Dict, Union, List, BinaryIO, Tuple, Optional import logging import uuid import yaml @@ -1936,13 +1936,13 @@ def read_content_sets(workflow): return read_user_config_file(workflow, REPO_CONTENT_SETS_CONFIG) -def terminal_key_paths(obj: dict) -> Iterator[Sequence]: +def terminal_key_paths(obj: dict) -> Iterator[Tuple[str, ...]]: """Generates path to all terminal keys of nested dicts by yielding tuples of nested dict keys represented as path From `{'a': {'b': {'c': 1, 'd': 2}}}` yields `('a', 'b', 'c')`, `('a', 'b', 'd')` """ - stack = [ + stack: List[Tuple[Any, Tuple[str, ...]]] = [ (obj, tuple()) ] while stack:
help_docs: Update `invite-new-users` for dropdown option. Corrects an out of date dropdown option in organizational settings for enabling email sign-up.
@@ -53,7 +53,8 @@ invitation, but require them to authenticate via LDAP. 1. Toggle **Invitations are required for joining this organization**. 1. Set **Restrict email domains of new users?** to either - **Don't allow disposable email addresses** (recommended) or **No**. + **Don't allow disposable email addresses** (recommended) + or **No restrictions**. 1. Click **Save changes**.
Gt/make wse field * add papers * Revert "add papers" This reverts commit * make water surface elevation a field * node, not link (d'oh)
@@ -138,6 +138,14 @@ class LinearDiffusionOverlandFlowRouter(Component): "mapping": "link", "doc": "Downstream gradient of the water surface.", }, + "water_surface__elevation": { + "dtype": float, + "intent": "out", + "optional": False, + "units": "m", + "mapping": "node", + "doc": "Elevation of the water surface.", + }, } def __init__( @@ -203,8 +211,7 @@ class LinearDiffusionOverlandFlowRouter(Component): self._vel = grid.at_link["water__velocity"] self._disch = grid.at_link["water__specific_discharge"] self._wsgrad = grid.at_link["water_surface__gradient"] - - self._water_surf_elev = np.zeros(grid.number_of_nodes) + self._water_surf_elev = grid.at_node["water_surface__elevation"] self._inactive_links = grid.status_at_link == grid.BC_LINK_IS_INACTIVE
Fix arange in docs. In [the tutorial](https://pytorch.org/tutorials/beginner/blitz/neural_networks_tutorial.html), `torch.arange` is being used as a FloatTensor. This behavior was changed in to return a LongTensor. I removed the `torch.arange` usage and instead construct a tensor with `torch.randn`
@@ -156,7 +156,7 @@ out.backward(torch.randn(1, 10)) # For example: output = net(input) -target = torch.arange(1, 11) # a dummy target, for example +target = torch.randn(10) # a dummy target, for example target = target.view(1, -1) # make it the same shape as output criterion = nn.MSELoss()
Procedures: switch to ".backup" function for SQLite DB backup Instead of copying the database, we're now using a special ".backup" SQLite client function designed for creating database copies.
@@ -127,9 +127,9 @@ Execute the following commands on your local machine, not production. 7. Create local database backup: - $ cp db.sqlite3 backup-before-upgrade-to-v2.X.Y.sqlite3 + $ sqlite3 db.sqlite3 ".backup DB_backups/backup-before-upgrade-to-v2.X.Y.sqlite3" - Do not use $AMY_VERSION environment variable because it's not defined here. + Do not use `$AMY_VERSION` environment variable because it's not defined here. 8. Fetch newer AMY source code: @@ -142,7 +142,7 @@ Execute the following commands on your local machine, not production. 10. Test migrations: - $ cp db.sqlite3 migration-test.sqlite3 + $ sqlite3 db.sqlite3 ".backup migration-test.sqlite3" $ AMY_DB_FILENAME=migration-test.sqlite3 ./manage.py migrate $ rm migration-test.sqlite3
update release note Added `Get Session Id`
@@ -5,6 +5,7 @@ Release Notes ----- - Added message param to keywords `Location Should Be` and `Location Should Contain` to display custom error message [taniabhullar] - Added `Element Attribute Value Should Be` verifies element identified by locator contains expected attribute value.. [brian-mathews] +- Added `Get Session Id` keyword to get remote webdriver session id [ilfirinpl] 3.1.1 -----
Change major marker to black This makes it easier to visualize and also keeps compatible with older versions.
@@ -573,7 +573,7 @@ class Shape(Results): y=[orbit.major_x], z=[orbit.major_y], mode="markers", - marker=dict(color=orbit.color, symbol="x", size=2), + marker=dict(color="black", symbol="x", size=2), name="node {}".format(orbit.node), showlegend=False, customdata=np.array(
Typo fix Crismon -> Crimson
@@ -373,7 +373,7 @@ Expert Mode (and Crimson Force Field) When documentation or on-screen text is written for someone with considerable knowledge or expertise, instead of being designed for a new learner. In general, try to state things simply rather than speaking to just the "experts" reading the the text. -If something is extremely difficult to understand, and yet still justified in the mind of the writer, we call it "Crimson Force Field". This term is intended to evoke the emotional response of coming across something that is difficult to understand, so writers of Crimson Force Field material can empathize with the readers. Crimson Force Field is drawn from an esoteric episode of Star Trek and it is unlikely anyone but the originator of the term understands its complete meaning. Crismon Force Field is itself Crismon Force Field. +If something is extremely difficult to understand, and yet still justified in the mind of the writer, we call it "Crimson Force Field". This term is intended to evoke the emotional response of coming across something that is difficult to understand, so writers of Crimson Force Field material can empathize with the readers. Crimson Force Field is drawn from an esoteric episode of Star Trek and it is unlikely anyone but the originator of the term understands its complete meaning. Crimson Force Field is itself Crimson Force Field. Help Wanted ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Dep-up: set new deployment attributes * Dep-up: set new deployment attributes Set outputs, workflows, etc, here. * make excuses * drop unused line * commit a newline!
@@ -161,6 +161,21 @@ def create_new_instances(*, update_id): ) +def set_deployment_attributes(*, update_id): + client = get_rest_client() + dep_up = client.deployment_updates.get(update_id) + client.deployments.set_attributes( + dep_up.deployment_id, + blueprint_id=dep_up.new_blueprint_id, + workflows=dep_up.deployment_plan['workflows'], + outputs=dep_up.deployment_plan['outputs'] + ) + # in the currently-running execution, update the current context as well, + # so that later graphs downlod scripts from the new blueprint. Unfortunate, + # but no public method for this just yet + workflow_ctx._context['blueprint_id'] = dep_up.new_blueprint_id + + def _perform_update_graph(ctx, update_id, **kwargs): """Make a tasks-graph that performs the deployment-update. @@ -170,6 +185,9 @@ def _perform_update_graph(ctx, update_id, **kwargs): graph = ctx.graph_mode() seq = graph.sequence() seq.add( + ctx.local_task(set_deployment_attributes, kwargs={ + 'update_id': update_id, + }, total_retries=0), ctx.local_task(create_new_nodes, kwargs={ 'update_id': update_id, }, total_retries=0),
Increase service timeout to 5 minutes This punts on the need for engines to write retry loops around all service calls. Ideally, this should be configurable, but this will require modifying the Driver trait to allow customization.
@@ -38,6 +38,7 @@ use std::collections::HashMap; use std::sync::{Arc, mpsc::{channel, RecvTimeoutError, Sender}}; const REGISTER_TIMEOUT: u64 = 300; +const SERVICE_TIMEOUT: u64 = 300; /// Generates a random correlation id for use in Message fn generate_correlation_id() -> String { @@ -77,7 +78,7 @@ impl Driver for ZmqDriver { update_receiver, Box::new(ZmqService::new( validator_sender_clone, - ::std::time::Duration::from_secs(10), + ::std::time::Duration::from_secs(SERVICE_TIMEOUT), engine.name(), engine.version(), )),
Removed redifinition of method 'filename' fixes
@@ -49,15 +49,6 @@ class Icon: self._impl = factory.Icon(interface=self) return self._impl - @property - def filename(self): - if self.system: - toga_dir = os.path.dirname(os.path.dirname(__file__)) - return os.path.join(toga_dir, 'resources', self.path) - else: - from toga.app import App - return os.path.join(App.app_dir, self.path) - @classmethod def load(cls, path_or_icon, default=None): if path_or_icon:
Remove div_t usage This was an incredibly minor optimisation that had a ~2% speedup in some cases on Linux/Mac, but was preventing the build in some configurations on Windows due to failed type conversions. That's not a worthwhile trade-off for some functions that aren't even used that heavily in QuTiP code.
#cython: language_level=3 #cython: boundscheck=False, wraparound=False, initializedcheck=False, cdivision=True -from libc.stdlib cimport div, div_t from libc.string cimport memcpy, memset cimport cython @@ -33,7 +32,6 @@ cpdef CSR reshape_csr(CSR matrix, idxint n_rows_out, idxint n_cols_out): cdef size_t ptr, row_in, row_out=0, loc, cur=0 cdef size_t n_rows_in=matrix.shape[0], n_cols_in=matrix.shape[1] cdef idxint nnz = csr.nnz(matrix) - cdef div_t res cdef CSR out _reshape_check_input(matrix, n_rows_out, n_cols_out) out = csr.empty(n_rows_out, n_cols_out, nnz) @@ -45,23 +43,17 @@ cpdef CSR reshape_csr(CSR matrix, idxint n_rows_out, idxint n_cols_out): for row_in in range(n_rows_in): for ptr in range(matrix.row_index[row_in], matrix.row_index[row_in+1]): loc = cur + matrix.col_index[ptr] - # This stdlib.div method is a little bit faster when working - # with very dense large matrices, and doesn't make a difference - # for smaller ones. - res = div(loc, n_cols_out) - out.row_index[res.quot + 1] += 1 - out.col_index[ptr] = res.rem + out.row_index[loc // n_cols_out + 1] += 1 + out.col_index[ptr] = loc % n_cols_out cur += n_cols_in for row_out in range(n_rows_out): out.row_index[row_out + 1] += out.row_index[row_out] return out -# We have to use a signed integer type because the standard library doesn't -# provide overloads for unsigned types. cdef inline idxint _reshape_dense_reindex(idxint idx, idxint size): - cdef div_t res = div(idx, size) - return res.quot + res.rem + return (idx // size) + (idx % size) + cpdef Dense reshape_dense(Dense matrix, idxint n_rows_out, idxint n_cols_out): _reshape_check_input(matrix, n_rows_out, n_cols_out)
Fix OverflowError when displaying Stats objects This error occured for Stats with infinite error_of_mean. Now, this case is treated in the same way as NaN (by using a default display precision).
@@ -8,7 +8,7 @@ from . import total_size as _total_size def _format_decimal(value, std): - if not math.isnan(std): + if math.isfinite(std): decimals = max(int(_np.ceil(-_np.log10(std))), 0) else: decimals = 8
Reenable multiprocessing preserve sharing tests on ASAN. Summary: This issue was fixed in Fixes Pull Request resolved:
@@ -249,8 +249,6 @@ class TestMultiprocessing(TestCase): self._test_sharing(repeat=TEST_REPEATS) @unittest.skipIf(platform == 'darwin', "file descriptor strategy is not supported on macOS") - @unittest.skipIf(TEST_WITH_ASAN, - "test_fd_preserve_sharing is known buggy, see https://github.com/pytorch/pytorch/issues/5311") def test_fd_preserve_sharing(self): self._test_preserve_sharing(repeat=TEST_REPEATS) @@ -264,8 +262,6 @@ class TestMultiprocessing(TestCase): with fs_sharing(): self._test_sharing(repeat=TEST_REPEATS) - @unittest.skipIf(TEST_WITH_ASAN, - "test_fs_preserve_sharing is known buggy, see https://github.com/pytorch/pytorch/issues/5311") def test_fs_preserve_sharing(self): with fs_sharing(): self._test_preserve_sharing(repeat=TEST_REPEATS)
Fix a bug in generation of explanation for D-Bus NoReply error Previously, evaluating the "%" operation caused a TypeError to be raised because the format string did not contain any formatting arguments.
@@ -194,11 +194,10 @@ def _interpret_errors(errors): isinstance(error, dbus.exceptions.DBusException) and error.get_dbus_name() == "org.freedesktop.DBus.Error.NoReply" ): # pragma: no cover - fmt_str = ( + return ( "stratis attempted communication with the daemon, stratisd, " "over the D-Bus, but stratisd did not respond in the allowed time." ) - return fmt_str % error # The goal is to have an explanation for every error chain. If there is # none, then this will rapidly be fixed, so it will be difficult to
add necessary cast for Py3.7 see
@@ -228,14 +228,18 @@ static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_ if (PyUnicode_IS_ASCII(o)) { // cached for the lifetime of the object *length = PyUnicode_GET_LENGTH(o); - return PyUnicode_AsUTF8(o); + // Py3.7 returns a "const char*", need to cast to "char*" for backwards compatibility + // see https://bugs.python.org/issue28769 + return (char*) PyUnicode_AsUTF8(o); } else { // raise the error PyUnicode_AsASCIIString(o); return NULL; } #else /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */ - return PyUnicode_AsUTF8AndSize(o, length); + // Py3.7 returns a "const char*", need to cast to "char*" for backwards compatibility + // see https://bugs.python.org/issue28769 + return (char*) PyUnicode_AsUTF8AndSize(o, length); #endif /* __PYX_DEFAULT_STRING_ENCODING_IS_ASCII */ #endif /* PY_VERSION_HEX < 0x03030000 */ } else
Disable some pylint rules * Disable rules about numbers of variables and args, because neural network definitions and algorithm hyperparameters naturally create lots of attributes, locals, and function arguments. * Disable rules around naming because flake8 already does this (and is better at it).
@@ -5,5 +5,13 @@ load-plugins = pylint.extensions.docparams, pylint.extensions.docstyle # Go as fast as you can jobs = 0 +[MESSAGES CONTROL] +disable = + # Algorithms and neural networks generally have a lot of variables + too-many-instance-attributes, + too-many-arguments, + too-many-locals, + invalid-name, # let flake8 handle this + [REPORTS] msg-template = {path}:{symbol}:{line:3d},{column}: {msg}
[bugfix] re-enable tests for TestInterwikidataBot.test_without_repo Phase 1 of Wiktionary has launched in wikidata since 20th June. Now Wiktionary pages can be linked with every item for all the Wiktionaries namespaces (except main, citations, user and talk) Use wikitech for the tests instead.
@@ -47,7 +47,7 @@ class TestInterwikidataBot(SiteAttributeTestCase): 'code': 'fa', }, 'wt': { - 'family': 'wiktionary', + 'family': 'wikitech', 'code': 'en', }, } @@ -89,16 +89,16 @@ class TestInterwikidataBot(SiteAttributeTestCase): def test_without_repo(self): """Test throwing error when site does not have a data repo.""" wt_page = pywikibot.Page(self.wt, 'User:Ladsgroup') - self.assertRaises(ValueError, DummyBot, generator=[wt_page], site=self.wt) + self.assertRaises(ValueError, DummyBot, generator=[wt_page], + site=self.wt) - fa_wiktionary = pywikibot.Site('fa', 'wiktionary') self.assertRaisesRegex( ValueError, - r'wiktionary:fa does not have a data repository, ' + r'wikitech:en does not have a data repository, ' r'use interwiki\.py instead.', interwikidata.IWBot, - generator=[pywikibot.Page(fa_wiktionary, 'User:Dalba')], - site=fa_wiktionary, + generator=[pywikibot.Page(self.wt, 'User:Dalba')], + site=self.wt, )
[fix] irc: Print skipped line when skipping This simplifies debugging when trying to add expected lines to ignore.
@@ -689,7 +689,7 @@ class IRCConnection(IRCBot): # Generate the entry and process it through the linematched rules if not match: - log.error('Failed to parse message. Skipping.') + log.error('Failed to parse message. Skipping: %s', line) continue entry.update(match)
Transformer: fix the mock value was an object. Other changes: Improve log to dump varaibles from transformer for debugging.
@@ -39,7 +39,7 @@ class Transformer(subclasses.BaseClassWithRunbookMixin): # create mock up variables and validate output_names = self._output_names # add prefix - variables = {x: VariableEntry(x, "mock value") for x in output_names} + variables = {x: "mock value" for x in output_names} else: self._log.info("transformer is running.") variables = self._internal_run() @@ -52,6 +52,10 @@ class Transformer(subclasses.BaseClassWithRunbookMixin): del unmatched_rename[name] name = self.rename[name] results[name] = VariableEntry(name, value) + dry_run_string = "" + if is_dry_run: + dry_run_string = "(dry run)" + self._log.debug(f"{dry_run_string}returned variables: {[x for x in results]}") if unmatched_rename: raise LisaException(f"unmatched rename variable: {unmatched_rename}") return results
CompiledType.nullexpr: add misssing rtype annotation TN:
@@ -372,6 +372,8 @@ class CompiledType(object): Return a string to be used in code generation for "null" expressions. Must be overriden in subclasses. + + :rtype: str """ raise NotImplementedError()
solvers: workaround apparently broken code in _frobenius() I don't think if it's fully correct now (series order n=n+1 seems to be a heuristics), but the original code is meaningless. Perhaps, it was assumed that tseries is an Add instance and author (wrongly) do an attempt to exstract non-Order polynomial part from the tseries.
@@ -3792,16 +3792,10 @@ def _frobenius(n, m, p0, q0, p, q, x0, x, c, check=None): numsyms = [next(numsyms) for i in range(n + 1)] serlist = [] for ser in [p, q]: - # Order term not present - if ser.is_polynomial(x) and Poly(ser, x).degree() <= n: - if x0: - ser = ser.subs({x: x + x0}) - dict_ = Poly(ser, x).as_dict() - # Order term present - else: + if x0 != 0 or not ser.is_polynomial(x) or Poly(ser, x).degree() > n: tseries = series(ser, x=x, x0=x0, n=n + 1) - # Removing order - dict_ = Poly(list(ordered(tseries.args))[: -1], x).as_dict() + ser = tseries.removeO() + dict_ = Poly(ser, x).as_dict() # Fill in with zeros, if coefficients are zero. for i in range(n + 1): if (i,) not in dict_:
CollectionSingleton: document the `coerce_null` constructor kwarg TN:
@@ -701,6 +701,8 @@ class CollectionSingleton(AbstractExpression): """ :param AbstractExpression expr: The expression representing the single element to create the collection from. + :param bool coerce_null: If False, always return a 1-sized array. + Otherwise, return an empty array when `expr` is null. """ super(CollectionSingleton, self).__init__() self.expr = expr
fix: Consistency in `get_role_permissions` return value Return value contains `if_owner` propert in object same as py Elaborate code documentation
@@ -117,20 +117,38 @@ $.extend(frappe.perm, { }, get_role_permissions: (meta) => { + /** Returns a `dict` of evaluated Role Permissions like: + { + "read": 1, + "write": 0, + "if_owner": [if "if_owner" is enabled] + { + "read": 1, + "write": 0 + } + } */ let perm = [{ read: 0, permlevel: 0 }]; - // Returns a `dict` of evaluated Role Permissions + (meta.permissions || []).forEach((p) => { - // if user has this role let permlevel = cint(p.permlevel); if (!perm[permlevel]) { perm[permlevel] = {}; perm[permlevel]["permlevel"] = permlevel; } + // if user has this role if (frappe.user_roles.includes(p.role)) { frappe.perm.rights.forEach((right) => { let value = perm[permlevel][right] || p[right] || 0; - if (value) { + + if (p.if_owner && value) { + // if_owner is enabled for perm, + // construct perm object inside "if_owner" property + if (!perm[permlevel]["if_owner"]) { + perm[permlevel]["if_owner"] = {} + } + perm[permlevel]["if_owner"][right] = value; + } else if (value) { perm[permlevel][right] = value; } });
Catch KeyboardInterrupt when reading from stdin. Do not print traceback and directly exit with error code. This is more consistent with other UNIX CLI tools.
@@ -174,6 +174,8 @@ def main(argv): original_source.append(py3compat.raw_input()) except EOFError: break + except KeyboardInterrupt: + return 1 if style_config is None and not args.no_local_style: style_config = file_resources.GetDefaultStyleForDir(os.getcwd())
added Blynk-cloud API added Blynk-cloud API to Development
@@ -366,6 +366,7 @@ API | Description | Auth | HTTPS | CORS | | [Base](https://www.base-api.io/) | Building quick backends | `apiKey` | Yes | Yes | | [Bitbucket](https://developer.atlassian.com/bitbucket/api/2/reference/) | Bitbucket API | `OAuth` | Yes | Unknown | | [Blitapp](https://blitapp.com/api/) | Schedule screenshots of web pages and sync them to your cloud | `apiKey` | Yes | Unknown | +| [Blynk-Cloud](https://blynkapi.docs.apiary.io/#) | Control IoT Devices from Blynk IoT Cloud | `apiKey` | No | Unknown | | [Bored](https://www.boredapi.com/) | Find random activities to fight boredom | No | Yes | Unknown | | [Browshot](https://browshot.com/api/documentation) | Easily make screenshots of web pages in any screen size, as any device | `apiKey` | Yes | Yes | | [CDNJS](https://api.cdnjs.com/libraries/jquery) | Library info on CDNJS | No | Yes | Unknown |
Display no-DM error message originating from security cog's global check The check will behave like Discord.py's guild_only check by raising the NoPrivateMessage exception.
import logging -from discord.ext.commands import Bot, Context +from discord.ext.commands import Bot, Context, NoPrivateMessage log = logging.getLogger(__name__) @@ -19,7 +19,9 @@ class Security: return not ctx.author.bot def check_on_guild(self, ctx: Context): - return ctx.guild is not None + if ctx.guild is None: + raise NoPrivateMessage("This command cannot be used in private messages.") + return True def setup(bot):
Fixed interface.py I committed before testing, forgot base class.
class Interface: - def __init__(self, n, z): + def __init__(self, z, n): self.n = n self.z = z -class SphericalInterface: - def __init__(self, R, n, z): +class SphericalInterface(Interface): + def __init__(self, z, n, R): self.R = R - super(SphericalInterface, self).__init__(n=n, z=z) + super(SphericalInterface, self).__init__(z=z, n=n) -class FlatInterface: - def __init__(self, n, z): - super(FlatInterface, self).__init__(R=float("+inf", n=n, z=z)) +class FlatInterface(SphericalInterface): + def __init__(self, z, n): + super(FlatInterface, self).__init__(R=float("+inf",z=z, n=n)) -class ConicalInterface: - def __init__(self, alpha, n, z): +class ConicalInterface(Interface): + def __init__(self, z, n, alpha): self.alpha = alpha - super(ConicalInterface, self).__init__(R=float(n=n, z=z)) + super(ConicalInterface, self).__init__(R=float(z=z, n=n))
Defaulting to full width, except in cases with a large display (Desktop). Adjusting height of the pdfWrapper, now closer to the fold
this.getPage(1).then( firstPage => { const pdfPageWidth = firstPage.view[2]; - const pdfPageHeight = firstPage.view[3]; + const isDesktop = this.windowSize.breakpoint >= 5; - const isMobile = this.windowSize.breakpoint === 0; - - if (isMobile) { - this.scale = this.elSize.width / pdfPageWidth; + if (isDesktop) { + this.scale = 1; } else { - this.scale = this.elSize.height / pdfPageHeight; + this.scale = this.elSize.width / pdfPageWidth; } }, error => { .doc-viewer position: relative height: 100vh - max-height: calc(100vh - 24em) + max-height: calc(100vh - 20em) min-height: 400px &:fullscreen
Update README.md added git clone and made sure to point out that server start should be backgrounded
@@ -7,6 +7,7 @@ Python 3.7 is used for this project. Make sure your python version is >=3.7 by t # for Debian-based distros sudo apt-get install build-essential cmake python3-dev python3-venv --no-install-recommends +git clone https://github.com/Chia-Network/chia-blockchain.git git submodule update --init --recursive python3 -m venv .venv . .venv/bin/activate @@ -30,7 +31,7 @@ To run a full node on port 8002, and connect to the testnet, run the following c This wil also start an ssh server in port 8222 for the UI, which you can connect to to see the state of the node. ```bash -python -m src.server.start_full_node "127.0.0.1" 8002 -u 8222 +python -m src.server.start_full_node "127.0.0.1" 8002 -u 8222 & ssh -p 8222 localhost ```
Removes '-e' (editable) flag in TravisCI builds. I don't think we need to install using this option in TravisCI, and doing so can hide bugs in the usual install process, e.g. pip not copying all the needed files or other bugs in setup.py.
@@ -33,9 +33,9 @@ before_install: install: # Install base dependencies - - pip install -e . + - pip install . # Install test dependencies - - pip install -e .[testing] + - pip install .[testing] # Show pip environment, for diagnostics - pip freeze
Create a result var for FieldAccessExpr expressions TN:
@@ -2677,6 +2677,7 @@ class FieldAccessExpr(BasicExpr): super(FieldAccessExpr, self).__init__( '{}.{}', result_type, [NullCheckExpr(prefix_expr, result_var_name='Pfx'), field_name], + result_var_name='Fld', abstract_expr=abstract_expr, ) self.prefix_expr = prefix_expr
Update log.py one more attempt to get realpath working
@@ -6,18 +6,17 @@ from logging.config import fileConfig def getLogger(name=None, custompath=None): if custompath: + custompath = os.path.realpath(custompath) if not os.path.isdir(custompath): custompath = os.path.dirname(custompath) - custompath = os.path.realpath(custompath) logpath = '/var/log/sickbeard_mp4_automator' - rootpath = os.path.dirname(sys.argv[0]) - rootpath = os.path.realpath(rootpath) + rootpath = os.path.realpath(sys.argv[0]) + rootpath = os.path.dirname(rootpath) if os.name == 'nt': logpath = custompath if custompath else rootpath - logpath = os.path.realpath(logpath) elif not os.path.isdir(logpath): try: os.mkdir(logpath)
Add comments and check for clean marathon state Before the test_vip cases are run, marathon will be verified as clean, and after ALL the cases will run, the suite will block on marathon cleaning up.
@@ -78,9 +78,21 @@ def generate_vip_app_permutations(): return permutations [email protected](scope='module') +def clean_state_for_test_vip(dcos_api_session): + """ This fixture is intended only for use with only test_vip so that the + test suite only blocks on ensuring marathon has a clean state before and + after the all test_vip cases are invoked rather than per-case + """ + dcos_api_session.marathon.ensure_deployments_complete() + yield + dcos_api_session.marathon.ensure_deployments_complete() + + @pytest.mark.slow @pytest.mark.skipif(not lb_enabled(), reason='Load Balancer disabled') @pytest.mark.parametrize('container,vip_net,proxy_net', generate_vip_app_permutations()) [email protected]('clean_state_for_test_vip') def test_vip(dcos_api_session, container: Container, vip_net: Network, proxy_net: Network): '''Test VIPs between the following source and destination configurations: * containers: DOCKER, UCR and NONE @@ -161,7 +173,10 @@ def setup_vip_workload_tests(dcos_api_session, container, vip_net, proxy_net): @retrying.retry( wait_fixed=5000, - stop_max_delay=180 * 1000, + stop_max_delay=240 * 1000, + # the app monitored by this function typically takes 2 minutes when starting from + # a fresh state, but in this case the previous app load may still be winding down, + # so allow a larger buffer time retry_on_result=lambda res: res is None) def wait_for_tasks_healthy(dcos_api_session, app_definition): proxy_info = dcos_api_session.marathon.get('v2/apps/{}'.format(app_definition['id'])).json()
Silence warning when console is started on its own Add a bit of border
@@ -11,6 +11,10 @@ import sys import pydoc from rlcompleter import Completer +if __name__ == '__main__': + import gi + gi.require_version('Gdk', '3.0') + from gi.repository import Gdk from gi.repository import Gtk from gi.repository import Pango @@ -82,6 +86,7 @@ class GTKInterpreterConsole(Gtk.ScrolledWindow): self.text = Gtk.TextView() self.text.set_wrap_mode(True) self.text.set_monospace(True) + self.text.set_border_width(4) self.interpreter = code.InteractiveInterpreter(locals)
[personalroles] fix exception on unassign with id FeelsDumbMan: show user's id instead of discordtag in mr_list()
@@ -57,6 +57,7 @@ class PersonalRoles(commands.Cog): await self.config.member(user).role.clear() elif isinstance(user, int): await self.config.member_from_ids(ctx.guild.id, user).role.clear() + user = await self.bot.fetch_user(user) else: await ctx.send_help() return @@ -81,8 +82,7 @@ class PersonalRoles(commands.Cog): if not data["role"]: continue dic = { - _("User"): ctx.guild.get_member(member) - or f"[X] {await self.bot.fetch_user(member)}", + _("User"): ctx.guild.get_member(member) or f"[X] {member}", _("Role"): await self.smart_truncate( ctx.guild.get_role(data["role"]) or "[X] {}".format(data["role"]) ),
Mitigate issue with parsing of tuples in sudoswap trace definition We currently do not parse nested fields correctly for traces, turning them into nulls in BigQuery. To mitigate we cast them to STRING instead.
"schema": [ { "description": "", - "fields": [ - { - "description": "", - "name": "token", - "type": "STRING" - }, - { - "description": "", - "name": "nft", - "type": "STRING" - }, - { - "description": "", - "name": "bondingCurve", - "type": "STRING" - }, - { - "description": "", - "name": "assetRecipient", - "type": "STRING" - }, - { - "description": "", - "name": "poolType", - "type": "STRING" - }, - { - "description": "", - "name": "delta", - "type": "STRING" - }, - { - "description": "", - "name": "fee", - "type": "STRING" - }, - { - "description": "", - "name": "spotPrice", - "type": "STRING" - }, - { - "description": "", - "name": "initialNFTIDs", - "type": "STRING" - }, - { - "description": "", - "name": "initialTokenBalance", - "type": "STRING" - } - ], "name": "params", - "type": "RECORD" + "type": "STRING" } ], "table_description": "",
fix thermometer skill. the problem was that 'has_sensor: true' was not replaced correctly with 'has_sensor: false'.
@@ -47,7 +47,7 @@ def _read_error(pid: subprocess.Popen): print("stderr: " + line.replace("\n", "")) -class TestWeatherSkillsFetchaiLedger: +class TestThermometerSkill: """Test that thermometer skills work.""" @pytest.fixture(autouse=True) @@ -118,6 +118,25 @@ class TestWeatherSkillsFetchaiLedger: assert result.exit_code == 0 # Load the agent yaml file and manually insert the things we need + file = open("aea-config.yaml", mode="r") + + # read all lines at once + whole_file = file.read() + + # add in the ledger address + find_text = "ledger_apis: {}" + replace_text = """ledger_apis: + fetchai: + network: testnet""" + + whole_file = whole_file.replace(find_text, replace_text) + + file.close() + + with open("aea-config.yaml", "w") as f: + f.write(whole_file) + + # Load the skill yaml file and manually insert the things we need yaml_path = os.path.join( "vendor", "fetchai", "skills", "thermometer", "skill.yaml" ) @@ -126,7 +145,7 @@ class TestWeatherSkillsFetchaiLedger: # read all lines at once whole_file = file.read() - whole_file = whole_file.replace("has_sensor: True", "has_sensor: False") + whole_file = whole_file.replace("has_sensor: true", "has_sensor: false") # close the file file.close() @@ -256,7 +275,7 @@ class TestWeatherSkillsFetchaiLedger: ) error_read_thread.start() - time.sleep(10) + time.sleep(20) process_one.send_signal(signal.SIGINT) process_two.send_signal(signal.SIGINT)
Add Auth to Clash Royale API Clash Royale is protected by an API key
@@ -364,7 +364,7 @@ API | Description | Auth | HTTPS | CORS | | [Battlefield 4](https://bf4stats.com/api) | Battlefield 4 Information | No | Yes | Unknown | | [Chuck Norris Database](http://www.icndb.com/api/) | Jokes | No | No | Unknown | | [Clash of Clans](https://developer.clashofclans.com) | Clash of Clans Game Information | `apiKey` | Yes | Unknown | -| [Clash Royale](https://developer.clashroyale.com) | Clash Royale Game Information | No | Yes | Unknown | +| [Clash Royale](https://developer.clashroyale.com) | Clash Royale Game Information | `apiKey` | Yes | Unknown | | [Comic Vine](https://comicvine.gamespot.com/api/documentation) | Comics | No | Yes | Unknown | | [Deck of Cards](http://deckofcardsapi.com/) | Deck of Cards | No | No | Unknown | | [Destiny The Game](https://github.com/Bungie-net/api) | Bungie Platform API | `apiKey` | Yes | Unknown |
[DOCS] Fix ansible fact caching information Implements: blueprint create-ops-guide
@@ -124,9 +124,11 @@ cache host facts and information. OpenStack-Ansible enables fact caching by default. The facts are cached in JSON files within ``/etc/openstack_deploy/ansible_facts``. -Fact caching can be disabled by commenting out the ``fact_caching`` -parameter in ``playbooks/ansible.cfg``. Refer to the Ansible -documentation on `fact caching`_ for more details. +Fact caching can be disabled by running +``export ANSIBLE_CACHE_PLUGIN=memory``. +To set this permanently, set this variable in +``/usr/local/bin/openstack-ansible.rc``. +Refer to the Ansible documentation on `fact caching`_ for more details. .. _fact caching: http://docs.ansible.com/ansible/playbooks_variables.html#fact-caching
Make Browser._wait_for sleep time a varible Useful to be able to tweak this value in other apps using `Browser`.
@@ -288,6 +288,7 @@ class Browser: self.websock_thread = None self.is_browsing = False self._command_id = Counter() + self._wait_interval = 0.5 def __enter__(self): self.start() @@ -309,7 +310,7 @@ class Browser: raise BrowsingTimeout( 'timed out after %.1fs waiting for: %s' % ( elapsed, callback)) - brozzler.sleep(0.5) + brozzler.sleep(self._wait_interval) def send_to_chrome(self, suppress_logging=False, **kwargs): msg_id = next(self._command_id)
Variable type mismatch in Pico In python 3, the output from pico2wave -l (list languages) needs to be converted from bytes to string before we can check to make sure the currently selected language is a valid choice.
@@ -40,7 +40,7 @@ class PicoTTSPlugin(plugin.TTSPlugin): with tempfile.SpooledTemporaryFile() as f: subprocess.call(cmd, stderr=f) f.seek(0) - output = f.read() + output = f.read().decode('utf-8') pattern = re.compile(r'Unknown language: NULL\nValid languages:\n' + r'((?:[a-z]{2}-[A-Z]{2}\n)+)') matchobj = pattern.match(output)
fix(UX): allow clicking on row to open in new tab ctrl+click on row is toggling checkbox instead of opening it in new row This only happens on non-mac devices Root cause: incorrect grouping of predicate towards fixing [skip ci]
@@ -1179,7 +1179,7 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList { this.$result.on("click", ".list-row, .image-view-header, .file-header", (e) => { const $target = $(e.target); // tick checkbox if Ctrl/Meta key is pressed - if (e.ctrlKey || (e.metaKey && !$target.is("a"))) { + if ((e.ctrlKey || e.metaKey) && !$target.is("a")) { const $list_row = $(e.currentTarget); const $check = $list_row.find(".list-row-checkbox"); $check.prop("checked", !$check.prop("checked"));
Change defaults for random_complex_number() This fixes test_hyperexpand.py after
from diofant.core.compatibility import is_sequence, as_int -def random_complex_number(a=2, b=-1, c=3, d=1, rational=False): +def random_complex_number(a=2, b=-1, c=3, d=1, rational=True): """ Return a random complex number.
Makefile: remove dead code There is no `upload-bcpc` tag defined in the playbooks, so this step of the Makefile is a no-op.
@@ -237,10 +237,6 @@ upload-all : -i ${inventory} ${playbooks}/site.yml \ -t upload-extra-cookbooks --limit bootstraps - ansible-playbook -v \ - -i ${inventory} ${playbooks}/site.yml \ - -t upload-bcpc --limit bootstraps - configure-web-server : ansible-playbook -v \
chore: stop using td as an argument Stop using `td` as an argument for py> operator since it might conflict with arguments for td> operator e.g. `td.database`.
@@ -35,7 +35,7 @@ def _prepare_td_spark() -> TDSparkContext: def process_data( - database_name: str, table_name: str, td: Optional[TDSparkContext] = None + database_name: str, table_name: str, td_spark: Optional[TDSparkContext] = None ) -> None: """ Load a Treasure Data table and upload it to Treasure Data after PySpark processing. @@ -61,7 +61,7 @@ def process_data( def execute_sql( - database_name: str, table_name: str, td: Optional[TDSparkContext] = None + database_name: str, table_name: str, td_spark: Optional[TDSparkContext] = None ) -> None: """ Execute SparkSQL @@ -90,7 +90,7 @@ def execute_sql( def upload_dataframe( - database_name: str, table_name: str, td: Optional[TDSparkContext] = None + database_name: str, table_name: str, td_spark: Optional[TDSparkContext] = None ) -> None: """ Create Pandas DataFrame and upload it to Treasure Data
un-pin opencv We were temporarily pinning opencv to an older version while waiting for wheels to be built for the latest version (so that our CircleCI jobs wouldn't time out trying to build from source). The wheels are built, so this pin isn't necessary!
@@ -8,11 +8,7 @@ graphviz keras matplotlib mysqlclient -# 2020.11.2: opencv-python 4.4.0.46 (the latest) doesn't have pre-built wheels -# for our Docker Linux version. A full compile takes ages - long enough that -# CircleCI times out - so we're pinning to a lower version. Please remove this -# version pin in the future! -opencv-python==4.4.0.44 +opencv-python plotly prometheus-client psycopg2-binary
Add arm64e architecture to building path generation library Fixes running FontGoggles development app on Apple silicon.
@@ -7,6 +7,6 @@ cd "${0%/*}" # cd into the folder containing this script mkdir -p build -cc -g -fPIC -c -mmacosx-version-min=10.9 -o build/makePathFromOutline.o makePathFromOutline.m +cc -g -fPIC -c -mmacosx-version-min=10.9 -arch x86_64 -arch arm64e -o build/makePathFromOutline.o makePathFromOutline.m -cc -dynamiclib -mmacosx-version-min=10.9 -o ../Lib/fontgoggles/mac/libmakePathFromOutline.dylib -framework AppKit -arch x86_64 -lsystem.b build/makePathFromOutline.o +cc -dynamiclib -mmacosx-version-min=10.9 -o ../Lib/fontgoggles/mac/libmakePathFromOutline.dylib -framework AppKit -arch x86_64 -arch arm64e -lsystem.b build/makePathFromOutline.o
Commented out open link simulation commented out simulation of "open link in app"
@@ -26,16 +26,20 @@ def like( self.delay("like") if check_media and not self.check_media(media_id): return False - if container_module == "feed_short_url": - if "_" in str(media_id): - media_pk = int(media_id.split("_")[0]) - else: - media_pk = int(media_id) - link = self.get_link_from_media_id(media_pk) - self.logger.debug("Opening link {}".format(link)) - self.api.open_instagram_link(link) - self.logger.debug("Getting media info...") - self.api.media_info(media_id) + # + # TODO: commented out simulation "open link in app" + # + # if container_module == "feed_short_url": + # if "_" in str(media_id): + # media_pk = int(media_id.split("_")[0]) + # else: + # media_pk = int(media_id) + # link = self.get_link_from_media_id(media_pk) + # self.logger.debug("Opening link {}".format(link)) + # self.api.open_instagram_link(link) + # self.logger.debug("Getting media info...") + # self.api.media_info(media_id) + # _r = self.api.like( media_id, container_module=container_module,
Update test to not test for anything like cli arg Use a clearly unparseable package name instead
@@ -178,9 +178,11 @@ class TestPipenv: @pytest.mark.install def test_install_parse_error(self): with PipenvInstance() as p: - c = p.pipenv('install tablib --upgrade') + # Make sure unparseable packages don't wind up in the pipfile + # Escape $ for shell input + c = p.pipenv('install tablib u/\\/p@r\$34b13+pkg') assert c.return_code != 0 - assert '--upgrade' not in p.pipfile['packages'] + assert 'u/\\/p@r$34b13+pkg' not in p.pipfile['packages'] @pytest.mark.install @pytest.mark.setup
probe: j-link: fix is_reset_asserted(). Incorrectly accessing the JLink.hardware_status property as a callable method.
@@ -279,7 +279,7 @@ class JLinkProbe(DebugProbe): def is_reset_asserted(self): try: - status = self._link.hardware_status() + status = self._link.hardware_status return status.tres == 0 except JLinkException as exc: raise self._convert_exception(exc) from exc
dashboard: fix a typo introduced a typo, the role that should be run is ceph-container-common, not ceph-common
- import_role: name: ceph-node-exporter - import_role: - name: ceph-common + name: ceph-container-common - import_role: name: ceph-config tags: ['ceph_update_config']
DUMMY EDIT Dummy edit to trigger re-deploy for testing
@@ -192,3 +192,4 @@ fiaas-deploy-daemon will read a fiaas-config to determine how to deploy your app This configuration is a YAML file. If any field is missing, a default value will be used. The default values, and explanation of their meaning are available at `/defaults` on any running instance. +
CodeSnippets: use a lower log level for 404 responses Just cause a URL looks valid doesn't mean it will be valid, so a 404 is a normal and harmless error. Fixes Fixes BOT-Z4 Fixes BOT-Z8 Fixes BOT-Z9
@@ -229,7 +229,8 @@ class CodeSnippets(Cog): snippet = await handler(**match.groupdict()) all_snippets.append((match.start(), snippet)) except ClientResponseError as error: - log.error( + log.log( + logging.DEBUG if error.status == 404 else logging.ERROR, f'Failed to fetch code snippet from {error.request_info.real_url}. ' f'Status: {error.status}. Message: {error}.' )
Fix site.service HG-- branch : feature/microservices
@@ -131,7 +131,7 @@ class Application(object): def __init__(self, site): self.site = site - self.service = site.service # Set by web + self.service = None # Set by web parts = self.__class__.__module__.split(".") if parts[1] == "custom": self.module = parts[5]
Call previous excepthook in tvm_excepthook. * Call previous excepthook in tvm_excepthook. * Rename prev_excepthook. * Create a tvm_wrap_excepthook to wrap a given excepthook with tvm custom excepthook work and call it on system previous excepthook. * Add docstring.
@@ -63,12 +63,17 @@ from . import arith # Contrib initializers from .contrib import rocm as _rocm, nvcc as _nvcc, sdaccel as _sdaccel -# Clean subprocesses when TVM is interrupted -def tvm_excepthook(exctype, value, trbk): - print('\n'.join(traceback.format_exception(exctype, value, trbk))) +def tvm_wrap_excepthook(exception_hook): + """Wrap given excepthook with TVM additional work.""" + + def wrapper(exctype, value, trbk): + """Clean subprocesses when TVM is interrupted.""" + exception_hook(exctype, value, trbk) if hasattr(multiprocessing, 'active_children'): # pylint: disable=not-callable for p in multiprocessing.active_children(): p.terminate() -sys.excepthook = tvm_excepthook + return wrapper + +sys.excepthook = tvm_wrap_excepthook(sys.excepthook)
More PEP fixes E128 in diagnostic.py, and W291 in test_diagnostic.py
@@ -418,8 +418,7 @@ def acorr_ljungbox(x, lags=None, boxpierce=False, model_df=0, period=None, if not boxpierce: q_sacf = ( nobs * (nobs + 2) * np.cumsum(sacf[1:maxlag + 1] ** 2 - / (nobs - np.arange(1, maxlag + 1))) - ) + / (nobs - np.arange(1, maxlag + 1)))) else: q_sacf = nobs * np.cumsum(sacf[1:maxlag + 1] ** 2)
HelpChannels: add/remove a cooldown role rather than using overwrites Overwrites had issues syncing with channels in the category. * Remove update_category_permissions; obsolete * Add constant for the cooldown role wrapped in a discord.Object
@@ -21,6 +21,7 @@ log = logging.getLogger(__name__) ASKING_GUIDE_URL = "https://pythondiscord.com/pages/asking-good-questions/" MAX_CHANNELS_PER_CATEGORY = 50 +COOLDOWN_ROLE = discord.Object(constants.Roles.help_cooldown) AVAILABLE_TOPIC = """ This channel is available. Feel free to ask a question in order to claim this channel! @@ -624,13 +625,6 @@ class HelpChannels(Scheduler, commands.Cog): # be put in the queue. await self.move_to_available() - async def update_category_permissions( - self, category: discord.CategoryChannel, member: discord.Member, **permissions - ) -> None: - """Update the permissions of the given `member` for the given `category` with `permissions` passed.""" - log.trace(f"Updating permissions for `{member}` in `{category}` with {permissions}.") - await category.set_permissions(member, **permissions) - async def reset_send_permissions(self) -> None: """Reset send permissions in the Available category for claimants.""" log.trace("Resetting send permissions in the Available category.") @@ -640,8 +634,7 @@ class HelpChannels(Scheduler, commands.Cog): for member in guild.members: if self.is_claimant(member): log.trace(f"Resetting send permissions for {member} ({member.id}).") - role = discord.Object(constants.Roles.help_cooldown) - await member.remove_roles(role) + await member.remove_roles(COOLDOWN_ROLE) async def reset_claimant_send_permission(self, channel: discord.TextChannel) -> None: """Reset send permissions in the Available category for the help `channel` claimant.""" @@ -649,11 +642,15 @@ class HelpChannels(Scheduler, commands.Cog): try: member = self.help_channel_claimants[channel] except KeyError: - log.trace(f"Channel #{channel.name} ({channel.id}) not in claimant cache, permissions unchanged.") + log.trace( + f"Channel #{channel.name} ({channel.id}) not in claimant cache, " + f"permissions unchanged." + ) return log.trace(f"Resetting send permissions for {member} ({member.id}).") - await self.update_category_permissions(self.available_category, member, overwrite=None) + await member.remove_roles(COOLDOWN_ROLE) + # Ignore missing task when claim cooldown has passed but the channel still isn't dormant. self.cancel_task(member.id, ignore_missing=True) @@ -668,14 +665,14 @@ class HelpChannels(Scheduler, commands.Cog): f"Revoking {member}'s ({member.id}) send message permissions in the Available category." ) - await self.update_category_permissions(self.available_category, member, send_messages=False) + await member.add_roles(COOLDOWN_ROLE) # Cancel the existing task, if any. # Would mean the user somehow bypassed the lack of permissions (e.g. user is guild owner). self.cancel_task(member.id, ignore_missing=True) timeout = constants.HelpChannels.claim_minutes * 60 - callback = self.update_category_permissions(self.available_category, member, overwrite=None) + callback = member.remove_roles(COOLDOWN_ROLE) log.trace(f"Scheduling {member}'s ({member.id}) send message permissions to be reinstated.") self.schedule_task(member.id, TaskData(timeout, callback))
Relabel component group exergy data E_F relabeled to E_in E_P relabeled to E_out Remove epsilon as it cannot be defined for component groups
@@ -505,18 +505,16 @@ class ExergyAnalysis: # create overview of component groups self.group_data = pd.DataFrame( - columns=['E_F', 'E_P', 'E_D'], dtype='float64') + columns=['E_in', 'E_out', 'E_D'], dtype='float64') for fkt_group in self.component_data['group'].unique(): - self.group_data.loc[fkt_group, 'E_F'] = ( + self.group_data.loc[fkt_group, 'E_in'] = ( self.calculate_group_input_value(fkt_group)) self.group_data.loc[fkt_group, 'E_D'] = ( self.sankey_data[fkt_group].loc['E_D', 'value']) # calculate missing values - self.group_data['E_P'] = ( - self.group_data['E_F'] - self.group_data['E_D']) - self.group_data['epsilon'] = ( - self.group_data['E_P'] / self.group_data['E_F']) + self.group_data['E_out'] = ( + self.group_data['E_in'] - self.group_data['E_D']) self.group_data['y_Dk'] = ( self.group_data['E_D'] / self.network_data.loc['E_F']) self.group_data['y*_Dk'] = ( @@ -735,18 +733,18 @@ class ExergyAnalysis: print(tabulate( df, headers='keys', tablefmt='psql', floatfmt='.3e')) + if network: + print('##### RESULTS: Network exergy analysis #####') + print(tabulate( + self.network_data.to_frame().transpose(), + headers='keys', tablefmt='psql', floatfmt='.3e', + showindex=False)) + if groups: df = self.group_data.copy() if sort_desc: df.sort_values(by=['E_D'], ascending=False, inplace=True) - print('##### RESULTS: Component group exergy analysis #####') + print('##### RESULTS: Component group exergy inflows and outflows #####') print(tabulate( df, headers='keys', tablefmt='psql', floatfmt='.3e')) - - if network: - print('##### RESULTS: Network exergy analysis #####') - print(tabulate( - self.network_data.to_frame().transpose(), - headers='keys', tablefmt='psql', floatfmt='.3e', - showindex=False))
auth bearer fix x-api-key now says unauthorized
from functools import wraps from http import HTTPStatus +from base64 import b64decode from fastapi.security import api_key from pydantic.types import UUID4 @@ -12,6 +13,7 @@ from fastapi.exceptions import HTTPException from fastapi.openapi.models import APIKey, APIKeyIn from fastapi.params import Security from fastapi.security.api_key import APIKeyHeader, APIKeyQuery +from fastapi.security import OAuth2PasswordBearer from fastapi.security.base import SecurityBase from starlette.requests import Request @@ -85,14 +87,19 @@ class WalletTypeInfo(): api_key_header = APIKeyHeader(name="X-API-KEY", auto_error=False, description="Admin or Invoice key for wallet API's") api_key_query = APIKeyQuery(name="api-key", auto_error=False, description="Admin or Invoice key for wallet API's") +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") async def get_key_type(r: Request, + token: str = Security(oauth2_scheme), api_key_header: str = Security(api_key_header), api_key_query: str = Security(api_key_query)) -> WalletTypeInfo: # 0: admin # 1: invoice # 2: invalid + # print("TOKEN", b64decode(token).decode("utf-8").split(":")) + + key_type, key = b64decode(token).decode("utf-8").split(":") try: - checker = WalletAdminKeyChecker(api_key=api_key_query) + checker = WalletAdminKeyChecker(api_key=key if token else api_key_query) await checker.__call__(r) return WalletTypeInfo(0, checker.wallet) except HTTPException as e: @@ -104,7 +111,7 @@ async def get_key_type(r: Request, raise try: - checker = WalletInvoiceKeyChecker() + checker = WalletInvoiceKeyChecker(api_key=key if token else None) await checker.__call__(r) return WalletTypeInfo(1, checker.wallet) except HTTPException as e:
Update avcodecs.py only used on patched variant for backwards compatability, can remove if this is merged into FFMPEG
@@ -1107,7 +1107,6 @@ class H265QSVCodec(H265Codec): codec_name = 'h265qsv' ffmpeg_codec_name = 'hevc_qsv' scale_filter = 'scale_qsv' - codec_params = 'qsv_params' class H265QSVCodecAlt(H265QSVCodec): @@ -1117,6 +1116,14 @@ class H265QSVCodecAlt(H265QSVCodec): codec_name = 'hevcqsv' +class H265QSVCodecPatched(H265QSVCodec): + """ + HEVC video codec alternate. + """ + codec_name = 'hevcqsvpatched' + codec_params = 'qsv_params' + + class H265VAAPICodec(H265Codec): """ H.265/AVC VAAPI video codec. @@ -1384,7 +1391,8 @@ audio_codec_list = [ ] video_codec_list = [ - VideoNullCodec, VideoCopyCodec, TheoraCodec, H264Codec, H264CodecAlt, H264QSVCodec, H265QSVCodecAlt, H265QSVCodec, H265Codec, H265CodecAlt, + VideoNullCodec, VideoCopyCodec, TheoraCodec, H264Codec, H264CodecAlt, H264QSVCodec, + H265QSVCodecAlt, H265QSVCodec, H265Codec, H265CodecAlt, H265QSVCodecPatched, DivxCodec, Vp8Codec, H263Codec, FlvCodec, Mpeg1Codec, NVEncH264Codec, NVEncH265Codec, NVEncH265CodecAlt, Mpeg2Codec, H264VAAPICodec, H265VAAPICodec, OMXH264Codec, VideotoolboxEncH264, VideotoolboxEncH265 ]
docs: quickstart: model: Spelling correction of accessible There was a typo in line number 242 (in HTTP section). The spelling of the word "accessable" was corrected to "accessible"
@@ -239,7 +239,7 @@ information on the HTTP service. $ pip install -U dffml-service-http -We start the HTTP service and tell it that we want to make our model accessable +We start the HTTP service and tell it that we want to make our model accessible via the HTTP :ref:`plugin_service_http_api_model` API. .. warning::
Update capella_opendata.yaml fixed typo in TileDB Resources
@@ -32,7 +32,7 @@ Resources: - '[STAC Catalog](https://capella-open-data.s3.us-west-2.amazonaws.com/stac/catalog.json)' - '[STAC Browser](https://capella-open-data.s3.us-west-2.amazonaws.com/index.html)' - Description: Capella Space Open Data in TileDB format - ARN: arn:aws:s3:::capella-space-open-data/data/tiledb/ + ARN: arn:aws:s3:::capella-open-data/data/tiledb/ Region: us-west-2 Type: S3 Bucket RequesterPays: False
dagster-airflow CLI fixes Test Plan: docs and manual CLI test Reviewers: alangenfeld
import os +from datetime import datetime, timedelta import click import six @@ -40,6 +41,8 @@ def construct_environment_yaml(preset_name, env, pipeline_name, module_name): def construct_scaffolded_file_contents(module_name, pipeline_name, environment_dict): + yesterday = datetime.now() - timedelta(1) + printer = IndentingStringIoPrinter(indent_level=4) printer.line('\'\'\'') printer.line( @@ -58,17 +61,31 @@ def construct_scaffolded_file_contents(module_name, pipeline_name, environment_d printer.line('from dagster_airflow.factory import make_airflow_dag') printer.blank_line() printer.blank_line() + printer.line('#' * 80) + printer.comment('#') + printer.comment('# This environment is auto-generated from your configs and/or presets') + printer.comment('#') + printer.line('#' * 80) printer.line('ENVIRONMENT = \'\'\'') printer.line(yaml.dump(environment_dict, default_flow_style=False)) printer.line('\'\'\'') printer.blank_line() printer.blank_line() - printer.comment('NOTE: these arguments should be edited for your environment') + printer.line('#' * 80) + printer.comment('#') + printer.comment('# NOTE: these arguments should be edited for your environment') + printer.comment('#') + printer.line('#' * 80) printer.line('DEFAULT_ARGS = {') with printer.with_indent(): printer.line("'owner': 'airflow',") printer.line("'depends_on_past': False,") - printer.line("'start_date': datetime.datetime(2019, 5, 7),") + + # start date -> yesterday + printer.line( + "'start_date': datetime.datetime(%s, %d, %d)," + % (yesterday.year, yesterday.month, yesterday.day) + ) printer.line("'email': ['[email protected]'],") printer.line("'email_on_failure': False,") printer.line("'email_on_retry': False,") @@ -77,9 +94,9 @@ def construct_scaffolded_file_contents(module_name, pipeline_name, environment_d printer.line('dag, tasks = make_airflow_dag(') with printer.with_indent(): printer.comment( - 'NOTE: you must ensure that {module_name} is installed or available on sys.path, ' - 'otherwise, this import will fail.'.format(module_name=module_name) + 'NOTE: you must ensure that {module_name} is '.format(module_name=module_name) ) + printer.comment('installed or available on sys.path, otherwise, this import will fail.') printer.line('module_name=\'{module_name}\','.format(module_name=module_name)) printer.line('pipeline_name=\'{pipeline_name}\','.format(pipeline_name=pipeline_name)) printer.line("environment_dict=yaml.load(ENVIRONMENT),")
tests: Drop redundant `expected_state` parameter of `run_sync_step` The expected state is already present in `sync_step`.
@@ -122,8 +122,8 @@ def post_function_validate(receiver: Receiver, data: Union[List[Plot], List[str] @pytest.mark.asyncio -async def run_sync_step(receiver: Receiver, sync_step: SyncStepData, expected_state: State) -> None: - assert receiver.current_sync().state == expected_state +async def run_sync_step(receiver: Receiver, sync_step: SyncStepData) -> None: + assert receiver.current_sync().state == sync_step.state last_sync_time_before = receiver._last_sync.time_done # For the the list types invoke the trigger function in batches if sync_step.payload_type == PlotSyncPlotList or sync_step.payload_type == PlotSyncPathList: @@ -134,12 +134,12 @@ async def run_sync_step(receiver: Receiver, sync_step: SyncStepData, expected_st for i in range(0, len(indexes) - 1): plots_processed_before = receiver.current_sync().plots_processed invoke_data = step_data[indexes[i] : indexes[i + 1]] - pre_function_validate(receiver, invoke_data, expected_state) + pre_function_validate(receiver, invoke_data, sync_step.state) await sync_step.function( create_payload(sync_step.payload_type, False, invoke_data, i == (len(indexes) - 2)) ) - post_function_validate(receiver, invoke_data, expected_state) - if expected_state == State.removed: + post_function_validate(receiver, invoke_data, sync_step.state) + if sync_step.state == State.removed: assert receiver.current_sync().plots_processed == plots_processed_before else: assert receiver.current_sync().plots_processed == plots_processed_before + len(invoke_data) @@ -147,7 +147,7 @@ async def run_sync_step(receiver: Receiver, sync_step: SyncStepData, expected_st # For Start/Done just invoke it.. await sync_step.function(create_payload(sync_step.payload_type, sync_step.state == State.idle, *sync_step.args)) # Make sure we moved to the next state - assert receiver.current_sync().state != expected_state + assert receiver.current_sync().state != sync_step.state if sync_step.payload_type == PlotSyncDone: assert receiver._last_sync.time_done != last_sync_time_before assert receiver.last_sync().plots_processed == receiver.last_sync().plots_total @@ -258,7 +258,7 @@ async def test_to_dict(counts_only: bool) -> None: # Walk through all states from idle to done and run them with the test data and validate the sync progress for state in State: - await run_sync_step(receiver, sync_steps[state], state) + await run_sync_step(receiver, sync_steps[state]) if state != State.idle and state != State.removed and state != State.done: expected_plot_files_processed += len(sync_steps[state].args[0]) @@ -323,7 +323,7 @@ async def test_sync_flow() -> None: # Walk through all states from idle to done and run them with the test data for state in State: - await run_sync_step(receiver, sync_steps[state], state) + await run_sync_step(receiver, sync_steps[state]) for plot_info in sync_steps[State.loaded].args[0]: assert plot_info.filename in receiver.plots()
Make the NoAppException clearer This should make the original error clearer by noting it as the direct cause of the NoAppException error.
@@ -54,8 +54,8 @@ class ScriptInfo: module_name, app_name = self.app_import_path.split(":", 1) except ValueError: module_name, app_name = self.app_import_path, "app" - except AttributeError: - raise NoAppException() + except AttributeError as error: + raise NoAppException() from error module_path = Path(module_name).resolve() sys.path.insert(0, str(module_path.parent)) @@ -67,14 +67,14 @@ class ScriptInfo: module = import_module(import_name) except ModuleNotFoundError as error: if error.name == import_name: - raise NoAppException() + raise NoAppException() from error else: raise try: self._app = eval(app_name, vars(module)) - except NameError: - raise NoAppException() + except NameError as error: + raise NoAppException() from error from .app import Quart
Implementation of create_subnet() varies in manila-tempest-plugin There is need of stable implementation of create_subnet(). For the stable implementation of create_subnet() following parameters have been added: 1. Condition to check empty str_cidr 2. More attributes in case of ipv6 3. Usage of default_subnet_pool Implements: blueprint tempest-scenario-manager-stable
@@ -18,6 +18,7 @@ import os import subprocess import netaddr + from oslo_log import log from oslo_serialization import jsonutils as json from oslo_utils import netutils @@ -1098,6 +1099,8 @@ class NetworkScenarioTest(ScenarioTest): :Keyword Arguments: * *ip_version = ip version of the given network, + use_default_subnetpool = default subnetpool to + manage IPv6 addresses range. """ if not subnets_client: @@ -1120,43 +1123,65 @@ class NetworkScenarioTest(ScenarioTest): network_id=ext_net['id'], cidr=cidr)['subnets']) return len(tenant_subnets + external_subnets) != 0 + def _make_create_subnet_request(namestart, network, + ip_version, subnets_client, **kwargs): + + subnet = dict( + name=data_utils.rand_name(namestart), + network_id=network['id'], + project_id=network['project_id'], + ip_version=ip_version, + **kwargs + ) + + if ip_version == 6: + subnet['ipv6_address_mode'] = 'slaac' + subnet['ipv6_ra_mode'] = 'slaac' + + try: + return subnets_client.create_subnet(**subnet) + except lib_exc.Conflict as e: + if 'overlaps with another subnet' not in str(e): + raise + + result = None + str_cidr = None + + use_default_subnetpool = kwargs.get('use_default_subnetpool', False) ip_version = kwargs.pop('ip_version', 4) + if not use_default_subnetpool: + if ip_version == 6: tenant_cidr = netaddr.IPNetwork( CONF.network.project_network_v6_cidr) num_bits = CONF.network.project_network_v6_mask_bits else: - tenant_cidr = netaddr.IPNetwork(CONF.network.project_network_cidr) + tenant_cidr = netaddr.IPNetwork( + CONF.network.project_network_cidr) num_bits = CONF.network.project_network_mask_bits - result = None - str_cidr = None # Repeatedly attempt subnet creation with sequential cidr # blocks until an unallocated block is found. for subnet_cidr in tenant_cidr.subnet(num_bits): str_cidr = str(subnet_cidr) if cidr_in_use(str_cidr, project_id=network['project_id']): continue + result = _make_create_subnet_request( + namestart, network, ip_version, subnets_client, + cidr=str_cidr, **kwargs) - subnet = dict( - name=data_utils.rand_name(namestart), - network_id=network['id'], - project_id=network['project_id'], - cidr=str_cidr, - ip_version=ip_version, - **kwargs - ) - try: - result = subnets_client.create_subnet(**subnet) + if result is not None: break - except lib_exc.Conflict as e: - is_overlapping_cidr = 'overlaps with another subnet' in str(e) - if not is_overlapping_cidr: - raise + + else: + result = _make_create_subnet_request( + namestart, network, ip_version, subnets_client, + **kwargs) self.assertIsNotNone(result, 'Unable to allocate tenant network') subnet = result['subnet'] + if str_cidr is not None: self.assertEqual(subnet['cidr'], str_cidr) self.addCleanup(test_utils.call_and_ignore_notfound_exc,