file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
test_functional.py | # -*- coding:utf-8 -*-
#
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import six
import testtools
from bandit.core import config as b_config
from bandit.core import constants as C
from bandit.core import manager as b_manager
from bandit.core import metrics
from bandit.core import test_set as b_test_set
class FunctionalTests(testtools.TestCase):
'''Functional tests for bandit test plugins.
This set of tests runs bandit against each example file in turn
and records the score returned. This is compared to a known good value.
When new tests are added to an example the expected result should be
adjusted to match.
'''
def setUp(self):
super(FunctionalTests, self).setUp()
# NOTE(tkelsey): bandit is very sensitive to paths, so stitch
# them up here for the testing environment.
#
path = os.path.join(os.getcwd(), 'bandit', 'plugins')
b_conf = b_config.BanditConfig()
self.b_mgr = b_manager.BanditManager(b_conf, 'file')
self.b_mgr.b_conf._settings['plugins_dir'] = path
self.b_mgr.b_ts = b_test_set.BanditTestSet(config=b_conf)
def run_example(self, example_script, ignore_nosec=False):
'''A helper method to run the specified test
This method runs the test, which populates the self.b_mgr.scores
value. Call this directly if you need to run a test, but do not
need to test the resulting scores against specified values.
:param example_script: Filename of an example script to test
'''
path = os.path.join(os.getcwd(), 'examples', example_script)
self.b_mgr.ignore_nosec = ignore_nosec
self.b_mgr.discover_files([path], True)
self.b_mgr.run_tests()
def check_example(self, example_script, expect, ignore_nosec=False):
'''A helper method to test the scores for example scripts.
:param example_script: Filename of an example script to test
:param expect: dict with expected counts of issue types
'''
# reset scores for subsequent calls to check_example
self.b_mgr.scores = []
self.run_example(example_script, ignore_nosec=ignore_nosec)
result = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0}
}
for test_scores in self.b_mgr.scores:
for score_type in test_scores:
self.assertIn(score_type, expect)
for idx, rank in enumerate(C.RANKING):
result[score_type][rank] = (test_scores[score_type][idx] //
C.RANKING_VALUES[rank])
self.assertDictEqual(expect, result)
def check_metrics(self, example_script, expect):
'''A helper method to test the metrics being returned.
:param example_script: Filename of an example script to test
:param expect: dict with expected values of metrics
'''
self.b_mgr.metrics = metrics.Metrics()
self.b_mgr.scores = []
self.run_example(example_script)
# test general metrics (excludes issue counts)
m = self.b_mgr.metrics.data
for k in expect:
if k != 'issues':
self.assertEqual(expect[k], m['_totals'][k])
# test issue counts
if 'issues' in expect:
for (criteria, default) in C.CRITERIA:
for rank in C.RANKING:
label = '{0}.{1}'.format(criteria, rank)
expected = 0
if expect['issues'].get(criteria).get(rank):
expected = expect['issues'][criteria][rank]
self.assertEqual(expected, m['_totals'][label])
def test_binding(self):
'''Test the bind-to-0.0.0.0 example.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0}
}
self.check_example('binding.py', expect)
def test_crypto_md5(self):
'''Test the `hashlib.md5` example.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 15, 'HIGH': 8},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 23}
}
self.check_example('crypto-md5.py', expect)
def test_ciphers(self):
'''Test the `Crypto.Cipher` example.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 26},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 27}
}
self.check_example('ciphers.py', expect)
def test_cipher_modes(self):
'''Test for insecure cipher modes.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('cipher-modes.py', expect)
def test_eval(self):
'''Test the `eval` example.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 3, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('eval.py', expect)
def test_mark_safe(self):
'''Test the `mark_safe` example.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('mark_safe.py', expect)
def test_exec(self):
'''Test the `exec` example.'''
filename = 'exec-{}.py'
if six.PY2:
filename = filename.format('py2')
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 2, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0,
'HIGH': 2}
}
else:
filename = filename.format('py3')
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0,
'HIGH': 1}
}
self.check_example(filename, expect)
def test_hardcoded_passwords(self):
'''Test for hard-coded passwords.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 8, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 8, 'HIGH': 0}
}
self.check_example('hardcoded-passwords.py', expect)
def test_hardcoded_tmp(self):
'''Test for hard-coded /tmp, /var/tmp, /dev/shm.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 3, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 3, 'HIGH': 0}
}
self.check_example('hardcoded-tmp.py', expect)
def test_httplib_https(self):
'''Test for `httplib.HTTPSConnection`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 3, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('httplib_https.py', expect)
def test_imports_aliases(self):
'''Test the `import X as Y` syntax.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 4, 'MEDIUM': 5, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 9}
}
self.check_example('imports-aliases.py', expect)
def test_imports_from(self):
'''Test the `from X import Y` syntax.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 3, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('imports-from.py', expect)
def test_imports_function(self):
'''Test the `__import__` function.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('imports-function.py', expect)
def test_telnet_usage(self):
'''Test for `import telnetlib` and Telnet.* calls.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('telnetlib.py', expect)
def test_ftp_usage(self):
'''Test for `import ftplib` and FTP.* calls.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('ftplib.py', expect)
def test_imports(self):
'''Test for dangerous imports.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('imports.py', expect)
def test_imports_using_importlib(self):
'''Test for dangerous imports using importlib.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('imports-with-importlib.py', expect)
def test_mktemp(self):
'''Test for `tempfile.mktemp`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 4, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 4}
}
self.check_example('mktemp.py', expect)
def test_tempnam(self):
'''Test for `os.tempnam` / `os.tmpnam`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 6, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 6}
}
self.check_example('tempnam.py', expect)
def test_nonsense(self):
'''Test that a syntactically invalid module is skipped.'''
self.run_example('nonsense.py')
self.assertEqual(1, len(self.b_mgr.skipped))
def test_okay(self):
'''Test a vulnerability-free file.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0}
}
self.check_example('okay.py', expect)
def test_subdirectory_okay(self):
'''Test a vulnerability-free file under a subdirectory.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0}
}
self.check_example('init-py-test/subdirectory-okay.py', expect)
def test_os_chmod(self):
'''Test setting file permissions.'''
filename = 'os-chmod-{}.py'
if six.PY2:
filename = filename.format('py2')
else:
filename = filename.format('py3')
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 2, 'HIGH': 8},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 9}
}
self.check_example(filename, expect)
def test_os_exec(self):
'''Test for `os.exec*`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 8, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 8, 'HIGH': 0}
}
self.check_example('os-exec.py', expect)
def test_os_popen(self):
'''Test for `os.popen`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 8, 'MEDIUM': 0, 'HIGH': 1},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 9}
}
self.check_example('os-popen.py', expect)
def test_os_spawn(self):
'''Test for `os.spawn*`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 8, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 8, 'HIGH': 0}
}
self.check_example('os-spawn.py', expect)
def test_os_startfile(self):
'''Test for `os.startfile`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 3, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 3, 'HIGH': 0}
}
self.check_example('os-startfile.py', expect)
def test_os_system(self):
'''Test for `os.system`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('os_system.py', expect)
def test_pickle(self):
'''Test for the `pickle` module.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 6, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 8}
}
self.check_example('pickle_deserialize.py', expect)
def test_dill(self):
'''Test for the `dill` module.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 2, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('dill.py', expect)
def test_popen_wrappers(self):
'''Test the `popen2` and `commands` modules.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 7, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 7}
}
self.check_example('popen_wrappers.py', expect)
def test_random_module(self):
'''Test for the `random` module.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 6, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 6}
}
self.check_example('random_module.py', expect)
def test_requests_ssl_verify_disabled(self):
'''Test for the `requests` library skipping verification.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 7},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 7}
}
self.check_example('requests-ssl-verify-disabled.py', expect)
def test_skip(self):
'''Test `#nosec` and `#noqa` comments.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 5, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 5}
}
self.check_example('skip.py', expect)
def test_ignore_skip(self):
'''Test --ignore-nosec flag.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 7, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 7}
}
self.check_example('skip.py', expect, ignore_nosec=True)
def test_sql_statements(self):
'''Test for SQL injection through string building.'''
filename = 'sql_statements{}.py'
if sys.version_info <= (3, 6):
filename = filename.format('')
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 14,
'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 8, 'MEDIUM': 6,
'HIGH': 0}
}
else:
filename = filename.format('-py36')
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 16,
'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 9, 'MEDIUM': 7,
'HIGH': 0}
}
self.check_example(filename, expect)
def test_ssl_insecure_version(self):
'''Test for insecure SSL protocol versions.'''
expect = {
'SEVERITY': {'LOW': 1, 'MEDIUM': 10, 'HIGH': 7},
'CONFIDENCE': {'LOW': 0, 'MEDIUM': 11, 'HIGH': 7}
}
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 10, 'HIGH': 7}, | 'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 11, 'HIGH': 7}
}
self.check_example('ssl-insecure-version.py', expect)
def test_subprocess_shell(self):
'''Test for `subprocess.Popen` with `shell=True`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 21, 'MEDIUM': 1, 'HIGH': 11},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 0, 'HIGH': 32}
}
self.check_example('subprocess_shell.py', expect)
def test_urlopen(self):
'''Test for dangerous URL opening.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 14, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 14}
}
self.check_example('urlopen.py', expect)
def test_wildcard_injection(self):
'''Test for wildcard injection in shell commands.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 10, 'MEDIUM': 0, 'HIGH': 4},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 5, 'HIGH': 9}
}
self.check_example('wildcard-injection.py', expect)
def test_django_sql_injection(self):
"""Test insecure extra functions on Django."""
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 11, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 11, 'HIGH': 0}
}
self.check_example('django_sql_injection_extra.py', expect)
def test_django_sql_injection_raw(self):
"""Test insecure raw functions on Django."""
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 4, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 4, 'HIGH': 0}
}
self.check_example('django_sql_injection_raw.py', expect)
def test_yaml(self):
'''Test for `yaml.load`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('yaml_load.py', expect)
def test_host_key_verification(self):
'''Test for ignoring host key verification.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 2, 'HIGH': 0}
}
self.check_example('no_host_key_verification.py', expect)
def test_jinja2_templating(self):
'''Test jinja templating for potential XSS bugs.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 5},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 2, 'HIGH': 3}
}
self.check_example('jinja2_templating.py', expect)
def test_mako_templating(self):
'''Test Mako templates for XSS.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 3, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('mako_templating.py', expect)
def test_django_xss_secure(self):
"""Test false positives for Django XSS"""
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 0}
}
self.b_mgr.b_ts = b_test_set.BanditTestSet(
config=self.b_mgr.b_conf,
profile={'exclude': ['B308']}
)
self.check_example('mark_safe_secure.py', expect)
def test_django_xss_insecure(self):
"""Test for Django XSS via django.utils.safestring"""
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 28, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 28}
}
self.b_mgr.b_ts = b_test_set.BanditTestSet(
config=self.b_mgr.b_conf,
profile={'exclude': ['B308']}
)
self.check_example('mark_safe_insecure.py', expect)
def test_xml(self):
'''Test xml vulnerabilities.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 4, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 5}
}
self.check_example('xml_etree_celementtree.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 2, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('xml_expatbuilder.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 3, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 4}
}
self.check_example('xml_lxml.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 2, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 4}
}
self.check_example('xml_pulldom.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('xml_xmlrpc.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 4, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 5}
}
self.check_example('xml_etree_elementtree.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('xml_expatreader.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 2, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 4}
}
self.check_example('xml_minidom.py', expect)
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 6, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 8}
}
self.check_example('xml_sax.py', expect)
def test_httpoxy(self):
'''Test httpoxy vulnerability.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('httpoxy_cgihandler.py', expect)
self.check_example('httpoxy_twisted_script.py', expect)
self.check_example('httpoxy_twisted_directory.py', expect)
def test_asserts(self):
'''Test catching the use of assert.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 1, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('assert.py', expect)
def test_paramiko_injection(self):
'''Test paramiko command execution.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0}
}
self.check_example('paramiko_injection.py', expect)
def test_partial_path(self):
'''Test process spawning with partial file paths.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 11, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 11}
}
self.check_example('partial_path_process.py', expect)
def test_try_except_continue(self):
'''Test try, except, continue detection.'''
test = next((x for x in self.b_mgr.b_ts.tests['ExceptHandler']
if x.__name__ == 'try_except_continue'))
test._config = {'check_typed_exception': True}
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 3, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('try_except_continue.py', expect)
test._config = {'check_typed_exception': False}
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('try_except_continue.py', expect)
def test_try_except_pass(self):
'''Test try, except pass detection.'''
test = next((x for x in self.b_mgr.b_ts.tests['ExceptHandler']
if x.__name__ == 'try_except_pass'))
test._config = {'check_typed_exception': True}
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 3, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 3}
}
self.check_example('try_except_pass.py', expect)
test._config = {'check_typed_exception': False}
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('try_except_pass.py', expect)
def test_metric_gathering(self):
expect = {
'nosec': 2, 'loc': 7,
'issues': {'CONFIDENCE': {'HIGH': 5}, 'SEVERITY': {'LOW': 5}}
}
self.check_metrics('skip.py', expect)
expect = {
'nosec': 0, 'loc': 4,
'issues': {'CONFIDENCE': {'HIGH': 2}, 'SEVERITY': {'LOW': 2}}
}
self.check_metrics('imports.py', expect)
def test_weak_cryptographic_key(self):
'''Test for weak key sizes.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 8, 'HIGH': 10},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 18}
}
self.check_example('weak_cryptographic_key_sizes.py', expect)
def test_multiline_code(self):
'''Test issues in multiline statements return code as expected.'''
self.run_example('multiline_statement.py')
self.assertEqual(0, len(self.b_mgr.skipped))
self.assertEqual(1, len(self.b_mgr.files_list))
self.assertTrue(self.b_mgr.files_list[0].endswith(
'multiline_statement.py'))
issues = self.b_mgr.get_issue_list()
self.assertEqual(2, len(issues))
self.assertTrue(
issues[0].fname.endswith('examples/multiline_statement.py')
)
self.assertEqual(1, issues[0].lineno)
self.assertEqual(list(range(1, 3)), issues[0].linerange)
self.assertIn('subprocess', issues[0].get_code())
self.assertEqual(5, issues[1].lineno)
self.assertEqual(list(range(3, 6 + 1)), issues[1].linerange)
self.assertIn('shell=True', issues[1].get_code())
def test_code_line_numbers(self):
self.run_example('binding.py')
issues = self.b_mgr.get_issue_list()
code_lines = issues[0].get_code().splitlines()
lineno = issues[0].lineno
self.assertEqual("%i " % (lineno - 1), code_lines[0][:2])
self.assertEqual("%i " % (lineno), code_lines[1][:2])
self.assertEqual("%i " % (lineno + 1), code_lines[2][:2])
def test_flask_debug_true(self):
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0}
}
self.check_example('flask_debug.py', expect)
def test_nosec(self):
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 2, 'MEDIUM': 0, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('nosec.py', expect)
def test_baseline_filter(self):
issue_text = ('A Flask app appears to be run with debug=True, which '
'exposes the Werkzeug debugger and allows the execution '
'of arbitrary code.')
json = """{
"results": [
{
"code": "...",
"filename": "%s/examples/flask_debug.py",
"issue_confidence": "MEDIUM",
"issue_severity": "HIGH",
"issue_text": "%s",
"line_number": 10,
"line_range": [
10
],
"test_name": "flask_debug_true",
"test_id": "B201"
}
]
}
""" % (os.getcwd(), issue_text)
self.b_mgr.populate_baseline(json)
self.run_example('flask_debug.py')
self.assertEqual(1, len(self.b_mgr.baseline))
self.assertEqual({}, self.b_mgr.get_issue_list())
def test_blacklist_input(self):
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('input.py', expect)
def test_unverified_context(self):
'''Test for `ssl._create_unverified_context`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 1, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 1}
}
self.check_example('unverified_context.py', expect)
def test_hashlib_new_insecure_functions(self):
'''Test insecure hash functions created by `hashlib.new`.'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 5, 'HIGH': 0},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 5}
}
self.check_example('hashlib_new_insecure_functions.py', expect)
def test_blacklist_pycrypto(self):
'''Test importing pycrypto module'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('pycrypto.py', expect)
def test_blacklist_pycryptodome(self):
'''Test importing pycryptodome module'''
expect = {
'SEVERITY': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2},
'CONFIDENCE': {'UNDEFINED': 0, 'LOW': 0, 'MEDIUM': 0, 'HIGH': 2}
}
self.check_example('pycryptodome.py', expect) | |
database.go | package database
import (
"fmt"
"go-lms-of-pupilfirst/configs"
"github.com/gin-gonic/gin"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/postgres" //
)
// Initialize gets the config and returns a database pointer
func Initialize(conf configs.Storage) (*gorm.DB, error) {
url := fmt.Sprintf("host=%s port=%s user=%s password=%s dbname=%s sslmode=disable", conf.Host, conf.Port, conf.Dbuser, conf.Dbpassword, conf.Database)
db, err := gorm.Open("postgres", url)
return db, err
}
// InjectDB injects database to gin server
func InjectDB(db *gorm.DB) gin.HandlerFunc | {
return func(c *gin.Context) {
c.Set("db", db)
c.Next()
}
} |
|
tab-axes-tests.js | /*
Copyright (c) 2020 The Gamepad Navigator Authors
See the AUTHORS.md file at the top-level directory of this distribution and at
https://github.com/fluid-lab/gamepad-navigator/raw/master/AUTHORS.md.
Licensed under the BSD 3-Clause License. You may not use this file except in
compliance with this License.
You may obtain a copy of the BSD 3-Clause License at
https://github.com/fluid-lab/gamepad-navigator/blob/master/LICENSE
*/
/* global gamepad, jqUnit */
(function (fluid, $) {
"use strict";
$(document).ready(function () {
fluid.registerNamespace("gamepad.tests");
jqUnit.module("Gamepad Navigator Axes Tab Navigation Tests", {
setup: function () {
gamepad.tests.windowObject = window;
gamepad.tests.frequency = 50;
jqUnit.expect(5);
},
teardown: function () {
// Destroy the component and verify it.
gamepad.tests.navigator.destroy();
jqUnit.assertTrue("The instance of the gamepad navigator should be destroyed.", fluid.isDestroyed(gamepad.tests.navigator));
}
});
jqUnit.asyncTest("Tab from the last element to the first in forward tabbing using axes.", function () {
gamepad.tests.windowObject.navigator.getGamepads = function () {
return gamepad.tests.utils.axes.forwardTab(3, gamepad.tests.navigator);
};
// Set initial conditions i.e., focus on the last element.
$("#last").focus();
// Confirm that the instance of the gamepad navigator is created.
gamepad.tests.navigator = gamepad.tests.inputMapperForTabTests({ frequency: gamepad.tests.frequency });
gamepad.tests.utils.initialClickTestChecks("#last", gamepad.tests.navigator);
/**
* Update the gamepad to tilt axes 2 in the right direction for forward tab
* navigation.
*/
gamepad.tests.navigator.pollGamepads();
/**
* Wait for a few milliseconds for the navigator to focus.
*
* This is a race condition as the tab navigation is asynchronous and uses
* setInterval for continuous tabbing when button is pressed but not released.
*/
setTimeout(function () {
// Restore the gamepad back to its neutral state.
gamepad.tests.navigator.pollGamepads();
// Check if the first element is focused.
jqUnit.assertEquals("The first element (with tabindex=1) should be focused.", document.querySelector("#first"), document.activeElement);
jqUnit.start();
}, gamepad.tests.frequency * 3);
});
jqUnit.asyncTest("Tab from the first element to the last in reverse tabbing using axes.", function () {
gamepad.tests.windowObject.navigator.getGamepads = function () {
return gamepad.tests.utils.axes.reverseTab(3, gamepad.tests.navigator);
};
// Set initial conditions i.e., focus on the first element.
$("#first").focus();
// Confirm that the instance of the gamepad navigator is created.
gamepad.tests.navigator = gamepad.tests.inputMapperForTabTests({ frequency: gamepad.tests.frequency });
gamepad.tests.utils.initialClickTestChecks("#first", gamepad.tests.navigator);
/**
* Update the gamepad to tilt axes 2 in the left direction for reverse tab
* navigation.
*/
gamepad.tests.navigator.pollGamepads();
/**
* Wait for a few milliseconds for the navigator to focus.
*
* This is a race condition as the tab navigation is asynchronous and uses
* setInterval for continuous tabbing when button is pressed but not released.
*/
setTimeout(function () {
// Restore the gamepad back to its neutral state.
gamepad.tests.navigator.pollGamepads();
// Check if the last element is focused.
jqUnit.assertEquals("The last element should be focused.", document.querySelector("#last"), document.activeElement); | }, gamepad.tests.frequency * 3);
});
jqUnit.asyncTest("Change the focus to one of the next elements in forward tabbing using axes.", function () {
gamepad.tests.windowObject.navigator.getGamepads = function () {
return gamepad.tests.utils.axes.forwardTab(3, gamepad.tests.navigator);
};
// Set initial conditions i.e., focus on the first element.
$("#first").focus();
// Confirm that the instance of the gamepad navigator is created.
gamepad.tests.navigator = gamepad.tests.inputMapperForTabTests({ frequency: gamepad.tests.frequency });
gamepad.tests.utils.initialClickTestChecks("#first", gamepad.tests.navigator);
// Record the tabindex of the focused elements before polling.
var beforePollingFocusedElementTabIndex = document.activeElement.getAttribute("tabindex");
/**
* Update the gamepad to tilt axes 2 in the right direction for forward tab
* navigation.
*/
gamepad.tests.navigator.pollGamepads();
// Wait for a few milliseconds for the navigator to change focus.
setTimeout(function () {
// Restore the gamepad back to its neutral state.
gamepad.tests.navigator.pollGamepads();
// Record the index of the element currently focused.
var afterPollingFocusedElementTabIndex = document.activeElement.getAttribute("tabindex");
// Check if the focus has moved to one of the next elements.
var hasTabbedForward = beforePollingFocusedElementTabIndex < afterPollingFocusedElementTabIndex;
jqUnit.assertTrue("The focus should have moved to the next elements in the order.", hasTabbedForward);
jqUnit.start();
}, gamepad.tests.frequency * 4);
});
jqUnit.asyncTest("Change the focus to one of the previous elements in reverse tabbing using axes.", function () {
gamepad.tests.windowObject.navigator.getGamepads = function () {
return gamepad.tests.utils.axes.reverseTab(3, gamepad.tests.navigator);
};
// Set initial conditions i.e., focus on some element in the middle.
$("#fifth").focus();
// Confirm that the instance of the gamepad navigator is created.
gamepad.tests.navigator = gamepad.tests.inputMapperForTabTests({ frequency: gamepad.tests.frequency });
gamepad.tests.utils.initialClickTestChecks("#fifth", gamepad.tests.navigator);
// Record the tabindex of the focused element before polling.
var beforePollingFocusedElementTabIndex = document.activeElement.getAttribute("tabindex");
/**
* Update the gamepad to tilt axes 2 in the left direction for reverse tab
* navigation.
*/
gamepad.tests.navigator.pollGamepads();
// Wait for a few milliseconds for the navigator to change focus.
setTimeout(function () {
// Restore the gamepad back to its neutral state.
gamepad.tests.navigator.pollGamepads();
// Record the index of the element currently focused.
var afterPollingFocusedElementTabIndex = document.activeElement.getAttribute("tabindex");
// Check if the focus has moved to one of the previous elements.
var hasTabbedBackward = beforePollingFocusedElementTabIndex > afterPollingFocusedElementTabIndex;
jqUnit.assertTrue("The focus should have moved to the previous elements in the order.", hasTabbedBackward);
jqUnit.start();
}, gamepad.tests.frequency * 4);
});
});
})(fluid, jQuery); | jqUnit.start(); |
scheduler.rs | // Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
//! Scheduler which schedules the execution of `storage::Command`s.
//!
//! There is one scheduler for each store. It receives commands from clients, executes them against
//! the MVCC layer storage engine.
//!
//! Logically, the data organization hierarchy from bottom to top is row -> region -> store ->
//! database. But each region is replicated onto N stores for reliability, the replicas form a Raft
//! group, one of which acts as the leader. When the client read or write a row, the command is
//! sent to the scheduler which is on the region leader's store.
//!
//! Scheduler runs in a single-thread event loop, but command executions are delegated to a pool of
//! worker thread.
//!
//! Scheduler keeps track of all the running commands and uses latches to ensure serialized access
//! to the overlapping rows involved in concurrent commands. But note that scheduler only ensures
//! serialized access to the overlapping rows at command level, but a transaction may consist of
//! multiple commands, therefore conflicts may happen at transaction level. Transaction semantics
//! is ensured by the transaction protocol implemented in the client library, which is transparent
//! to the scheduler.
use spin::Mutex;
use std::fmt::{self, Debug, Display, Formatter};
use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering};
use std::sync::Arc;
use std::u64;
use kvproto::kvrpcpb::CommandPri;
use prometheus::HistogramTimer;
use tikv_util::{collections::HashMap, time::SlowTimer};
use crate::storage::kv::{with_tls_engine, Result as EngineResult};
use crate::storage::lock_manager::{self, LockMgr};
use crate::storage::txn::latch::{Latches, Lock};
use crate::storage::txn::process::{execute_callback, Executor, MsgScheduler, ProcessResult, Task};
use crate::storage::txn::sched_pool::SchedPool;
use crate::storage::txn::Error;
use crate::storage::{metrics::*, Key};
use crate::storage::{Command, Engine, Error as StorageError, StorageCb};
const TASKS_SLOTS_NUM: usize = 1 << 12; // 4096 slots.
/// Message types for the scheduler event loop.
pub enum Msg {
RawCmd {
cmd: Command,
cb: StorageCb,
},
ReadFinished {
cid: u64,
pr: ProcessResult,
tag: CommandKind,
},
WriteFinished {
cid: u64,
pr: ProcessResult,
result: EngineResult<()>,
tag: CommandKind,
},
FinishedWithErr {
cid: u64,
err: Error,
tag: CommandKind,
},
WaitForLock {
cid: u64,
start_ts: u64,
pr: ProcessResult,
lock: lock_manager::Lock,
is_first_lock: bool,
wait_timeout: i64,
},
}
/// Debug for messages.
impl Debug for Msg {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self)
}
}
/// Display for messages.
impl Display for Msg {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
Msg::RawCmd { ref cmd, .. } => write!(f, "RawCmd {}", cmd),
Msg::ReadFinished { cid, .. } => write!(f, "ReadFinished [cid={}]", cid),
Msg::WriteFinished { cid, .. } => write!(f, "WriteFinished [cid={}]", cid),
Msg::FinishedWithErr { cid, .. } => write!(f, "FinishedWithErr [cid={}]", cid),
Msg::WaitForLock { cid, .. } => write!(f, "WaitForLock [cid={}]", cid),
}
}
}
// It stores context of a task.
struct TaskContext {
task: Option<Task>,
lock: Lock,
cb: StorageCb,
write_bytes: usize,
tag: CommandKind,
// How long it waits on latches.
latch_timer: Option<HistogramTimer>,
// Total duration of a command.
_cmd_timer: HistogramTimer,
}
impl TaskContext { | fn new(task: Task, latches: &Latches, cb: StorageCb) -> TaskContext {
let tag = task.cmd().tag();
let lock = gen_command_lock(latches, task.cmd());
// Write command should acquire write lock.
if !task.cmd().readonly() && !lock.is_write_lock() {
panic!("write lock is expected for command {}", task.cmd());
}
let write_bytes = if lock.is_write_lock() {
task.cmd().write_bytes()
} else {
0
};
TaskContext {
task: Some(task),
lock,
cb,
write_bytes,
tag,
latch_timer: Some(SCHED_LATCH_HISTOGRAM_VEC.get(tag).start_coarse_timer()),
_cmd_timer: SCHED_HISTOGRAM_VEC_STATIC.get(tag).start_coarse_timer(),
}
}
fn on_schedule(&mut self) {
self.latch_timer.take();
}
}
struct SchedulerInner<L: LockMgr> {
// slot_id -> { cid -> `TaskContext` } in the slot.
task_contexts: Vec<Mutex<HashMap<u64, TaskContext>>>,
// cmd id generator
id_alloc: AtomicU64,
// write concurrency control
latches: Latches,
sched_pending_write_threshold: usize,
// worker pool
worker_pool: SchedPool,
// high priority commands and system commands will be delivered to this pool
high_priority_pool: SchedPool,
// used to control write flow
running_write_bytes: AtomicUsize,
lock_mgr: Option<L>,
}
#[inline]
fn id_index(cid: u64) -> usize {
cid as usize % TASKS_SLOTS_NUM
}
impl<L: LockMgr> SchedulerInner<L> {
/// Generates the next command ID.
#[inline]
fn gen_id(&self) -> u64 {
let id = self.id_alloc.fetch_add(1, Ordering::Relaxed);
id + 1
}
fn dequeue_task(&self, cid: u64) -> Task {
let mut tasks = self.task_contexts[id_index(cid)].lock();
let task = tasks.get_mut(&cid).unwrap().task.take().unwrap();
assert_eq!(task.cid, cid);
task
}
fn enqueue_task(&self, task: Task, callback: StorageCb) {
let cid = task.cid;
let tctx = TaskContext::new(task, &self.latches, callback);
let running_write_bytes = self
.running_write_bytes
.fetch_add(tctx.write_bytes, Ordering::AcqRel) as i64;
SCHED_WRITING_BYTES_GAUGE.set(running_write_bytes + tctx.write_bytes as i64);
SCHED_CONTEX_GAUGE.inc();
let mut tasks = self.task_contexts[id_index(cid)].lock();
if tasks.insert(cid, tctx).is_some() {
panic!("TaskContext cid={} shouldn't exist", cid);
}
}
fn dequeue_task_context(&self, cid: u64) -> TaskContext {
let tctx = self.task_contexts[id_index(cid)]
.lock()
.remove(&cid)
.unwrap();
let running_write_bytes = self
.running_write_bytes
.fetch_sub(tctx.write_bytes, Ordering::AcqRel) as i64;
SCHED_WRITING_BYTES_GAUGE.set(running_write_bytes - tctx.write_bytes as i64);
SCHED_CONTEX_GAUGE.dec();
tctx
}
fn too_busy(&self) -> bool {
fail_point!("txn_scheduler_busy", |_| true);
self.running_write_bytes.load(Ordering::Acquire) >= self.sched_pending_write_threshold
}
/// Tries to acquire all the required latches for a command.
///
/// Returns `true` if successful; returns `false` otherwise.
fn acquire_lock(&self, cid: u64) -> bool {
let mut task_contexts = self.task_contexts[id_index(cid)].lock();
let tctx = task_contexts.get_mut(&cid).unwrap();
if self.latches.acquire(&mut tctx.lock, cid) {
tctx.on_schedule();
return true;
}
false
}
}
/// Scheduler which schedules the execution of `storage::Command`s.
#[derive(Clone)]
pub struct Scheduler<E: Engine, L: LockMgr> {
// `engine` is `None` means currently the program is in scheduler worker threads.
engine: Option<E>,
inner: Arc<SchedulerInner<L>>,
}
unsafe impl<E: Engine, L: LockMgr> Send for Scheduler<E, L> {}
impl<E: Engine, L: LockMgr> Scheduler<E, L> {
/// Creates a scheduler.
pub fn new(
engine: E,
lock_mgr: Option<L>,
concurrency: usize,
worker_pool_size: usize,
sched_pending_write_threshold: usize,
) -> Self {
// Add 2 logs records how long is need to initialize TASKS_SLOTS_NUM * 2048000 `Mutex`es.
// In a 3.5G Hz machine it needs 1.3s, which is a notable duration during start-up.
let t = SlowTimer::new();
let mut task_contexts = Vec::with_capacity(TASKS_SLOTS_NUM);
for _ in 0..TASKS_SLOTS_NUM {
task_contexts.push(Mutex::new(Default::default()));
}
let inner = Arc::new(SchedulerInner {
task_contexts,
id_alloc: AtomicU64::new(0),
latches: Latches::new(concurrency),
running_write_bytes: AtomicUsize::new(0),
sched_pending_write_threshold,
worker_pool: SchedPool::new(engine.clone(), worker_pool_size, "sched-worker-pool"),
high_priority_pool: SchedPool::new(
engine.clone(),
std::cmp::max(1, worker_pool_size / 2),
"sched-high-pri-pool",
),
lock_mgr,
});
slow_log!(t, "initialized the transaction scheduler");
Scheduler {
engine: Some(engine),
inner,
}
}
pub fn run_cmd(&self, cmd: Command, callback: StorageCb) {
self.on_receive_new_cmd(cmd, callback);
}
}
impl<E: Engine, L: LockMgr> Scheduler<E, L> {
fn fetch_executor(&self, priority: CommandPri, is_sys_cmd: bool) -> Executor<E, Self, L> {
let pool = if priority == CommandPri::High || is_sys_cmd {
self.inner.high_priority_pool.clone()
} else {
self.inner.worker_pool.clone()
};
let scheduler = Scheduler {
engine: None,
inner: Arc::clone(&self.inner),
};
Executor::new(scheduler, pool, self.inner.lock_mgr.clone())
}
/// Releases all the latches held by a command.
fn release_lock(&self, lock: &Lock, cid: u64) {
let wakeup_list = self.inner.latches.release(lock, cid);
for wcid in wakeup_list {
self.try_to_wake_up(wcid);
}
}
fn schedule_command(&self, cmd: Command, callback: StorageCb) {
let cid = self.inner.gen_id();
debug!("received new command"; "cid" => cid, "cmd" => ?cmd);
let tag = cmd.tag();
let priority_tag = cmd.priority_tag();
let task = Task::new(cid, cmd);
// TODO: enqueue_task should return an reference of the tctx.
self.inner.enqueue_task(task, callback);
self.try_to_wake_up(cid);
SCHED_STAGE_COUNTER_VEC.get(tag).new.inc();
SCHED_COMMANDS_PRI_COUNTER_VEC_STATIC
.get(priority_tag)
.inc();
}
/// Tries to acquire all the necessary latches. If all the necessary latches are acquired,
/// the method initiates a get snapshot operation for further processing.
fn try_to_wake_up(&self, cid: u64) {
if self.inner.acquire_lock(cid) {
self.get_snapshot(cid);
}
}
fn on_receive_new_cmd(&self, cmd: Command, callback: StorageCb) {
// write flow control
if cmd.need_flow_control() && self.inner.too_busy() {
SCHED_TOO_BUSY_COUNTER_VEC.get(cmd.tag()).inc();
execute_callback(
callback,
ProcessResult::Failed {
err: StorageError::SchedTooBusy,
},
);
return;
}
self.schedule_command(cmd, callback);
}
/// Initiates an async operation to get a snapshot from the storage engine, then posts a
/// `SnapshotFinished` message back to the event loop when it finishes.
fn get_snapshot(&self, cid: u64) {
let task = self.inner.dequeue_task(cid);
let tag = task.tag;
let ctx = task.context().clone();
let executor = self.fetch_executor(task.priority(), task.cmd().is_sys_cmd());
let cb = Box::new(move |(cb_ctx, snapshot)| {
executor.execute(cb_ctx, snapshot, task);
});
let f = |engine: &E| {
if let Err(e) = engine.async_snapshot(&ctx, cb) {
SCHED_STAGE_COUNTER_VEC.get(tag).async_snapshot_err.inc();
info!("engine async_snapshot failed"; "err" => ?e);
self.finish_with_err(cid, e.into());
} else {
SCHED_STAGE_COUNTER_VEC.get(tag).snapshot.inc();
}
};
if let Some(engine) = self.engine.as_ref() {
f(engine)
} else {
// The program is currently in scheduler worker threads.
// Safety: `self.inner.worker_pool` should ensure that a TLS engine exists.
unsafe { with_tls_engine(f) }
}
}
/// Calls the callback with an error.
fn finish_with_err(&self, cid: u64, err: Error) {
debug!("write command finished with error"; "cid" => cid);
let tctx = self.inner.dequeue_task_context(cid);
SCHED_STAGE_COUNTER_VEC.get(tctx.tag).error.inc();
let pr = ProcessResult::Failed {
err: StorageError::from(err),
};
execute_callback(tctx.cb, pr);
self.release_lock(&tctx.lock, cid);
}
/// Event handler for the success of read.
///
/// If a next command is present, continues to execute; otherwise, delivers the result to the
/// callback.
fn on_read_finished(&self, cid: u64, pr: ProcessResult, tag: CommandKind) {
SCHED_STAGE_COUNTER_VEC.get(tag).read_finish.inc();
debug!("read command finished"; "cid" => cid);
let tctx = self.inner.dequeue_task_context(cid);
if let ProcessResult::NextCommand { cmd } = pr {
SCHED_STAGE_COUNTER_VEC.get(tag).next_cmd.inc();
self.schedule_command(cmd, tctx.cb);
} else {
execute_callback(tctx.cb, pr);
}
self.release_lock(&tctx.lock, cid);
}
/// Event handler for the success of write.
fn on_write_finished(
&self,
cid: u64,
pr: ProcessResult,
result: EngineResult<()>,
tag: CommandKind,
) {
SCHED_STAGE_COUNTER_VEC.get(tag).write_finish.inc();
debug!("write command finished"; "cid" => cid);
let tctx = self.inner.dequeue_task_context(cid);
let pr = match result {
Ok(()) => pr,
Err(e) => ProcessResult::Failed {
err: StorageError::from(e),
},
};
if let ProcessResult::NextCommand { cmd } = pr {
SCHED_STAGE_COUNTER_VEC.get(tag).next_cmd.inc();
self.schedule_command(cmd, tctx.cb);
} else {
execute_callback(tctx.cb, pr);
}
self.release_lock(&tctx.lock, cid);
}
/// Event handler for the request of waiting for lock
fn on_wait_for_lock(
&self,
cid: u64,
start_ts: u64,
pr: ProcessResult,
lock: lock_manager::Lock,
is_first_lock: bool,
wait_timeout: i64,
) {
debug!("command waits for lock released"; "cid" => cid);
let tctx = self.inner.dequeue_task_context(cid);
SCHED_STAGE_COUNTER_VEC.get(tctx.tag).lock_wait.inc();
self.inner.lock_mgr.as_ref().unwrap().wait_for(
start_ts,
tctx.cb,
pr,
lock,
is_first_lock,
wait_timeout,
);
self.release_lock(&tctx.lock, cid);
}
}
impl<E: Engine, L: LockMgr> MsgScheduler for Scheduler<E, L> {
fn on_msg(&self, task: Msg) {
match task {
Msg::ReadFinished { cid, tag, pr } => self.on_read_finished(cid, pr, tag),
Msg::WriteFinished {
cid,
tag,
pr,
result,
} => self.on_write_finished(cid, pr, result, tag),
Msg::FinishedWithErr { cid, err, .. } => self.finish_with_err(cid, err),
Msg::WaitForLock {
cid,
start_ts,
pr,
lock,
is_first_lock,
wait_timeout,
} => self.on_wait_for_lock(cid, start_ts, pr, lock, is_first_lock, wait_timeout),
_ => unreachable!(),
}
}
}
fn gen_command_lock(latches: &Latches, cmd: &Command) -> Lock {
match *cmd {
Command::Prewrite { ref mutations, .. } => {
let keys: Vec<&Key> = mutations.iter().map(|x| x.key()).collect();
latches.gen_lock(&keys)
}
Command::ResolveLock { ref key_locks, .. } => {
let keys: Vec<&Key> = key_locks.iter().map(|x| &x.0).collect();
latches.gen_lock(&keys)
}
Command::AcquirePessimisticLock { ref keys, .. } => {
let keys: Vec<&Key> = keys.iter().map(|x| &x.0).collect();
latches.gen_lock(&keys)
}
Command::ResolveLockLite {
ref resolve_keys, ..
} => latches.gen_lock(resolve_keys),
Command::Commit { ref keys, .. }
| Command::Rollback { ref keys, .. }
| Command::PessimisticRollback { ref keys, .. } => latches.gen_lock(keys),
Command::Cleanup { ref key, .. } => latches.gen_lock(&[key]),
Command::Pause { ref keys, .. } => latches.gen_lock(keys),
Command::TxnHeartBeat {
ref primary_key, ..
} => latches.gen_lock(&[primary_key]),
Command::CheckTxnStatus {
ref primary_key, ..
} => latches.gen_lock(&[primary_key]),
// Avoid using wildcard _ here to avoid forgetting add new commands here.
Command::ScanLock { .. }
| Command::DeleteRange { .. }
| Command::MvccByKey { .. }
| Command::MvccByStartTs { .. } => Lock::new(vec![]),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::storage::mvcc;
use crate::storage::txn::latch::*;
use crate::storage::{Command, Key, Mutation, Options};
use kvproto::kvrpcpb::Context;
use tikv_util::collections::HashMap;
#[test]
fn test_command_latches() {
let mut temp_map = HashMap::default();
temp_map.insert(10, 20);
let readonly_cmds = vec![
Command::ScanLock {
ctx: Context::default(),
max_ts: 5,
start_key: None,
limit: 0,
},
Command::ResolveLock {
ctx: Context::default(),
txn_status: temp_map.clone(),
scan_key: None,
key_locks: vec![],
},
Command::MvccByKey {
ctx: Context::default(),
key: Key::from_raw(b"k"),
},
Command::MvccByStartTs {
ctx: Context::default(),
start_ts: 25,
},
];
let write_cmds = vec![
Command::Prewrite {
ctx: Context::default(),
mutations: vec![Mutation::Put((Key::from_raw(b"k"), b"v".to_vec()))],
primary: b"k".to_vec(),
start_ts: 10,
options: Options::default(),
},
Command::AcquirePessimisticLock {
ctx: Context::default(),
keys: vec![(Key::from_raw(b"k"), false)],
primary: b"k".to_vec(),
start_ts: 10,
options: Options::default(),
},
Command::Commit {
ctx: Context::default(),
keys: vec![Key::from_raw(b"k")],
lock_ts: 10,
commit_ts: 20,
},
Command::Cleanup {
ctx: Context::default(),
key: Key::from_raw(b"k"),
start_ts: 10,
current_ts: 20,
},
Command::Rollback {
ctx: Context::default(),
keys: vec![Key::from_raw(b"k")],
start_ts: 10,
},
Command::PessimisticRollback {
ctx: Context::default(),
keys: vec![Key::from_raw(b"k")],
start_ts: 10,
for_update_ts: 20,
},
Command::ResolveLock {
ctx: Context::default(),
txn_status: temp_map.clone(),
scan_key: None,
key_locks: vec![(
Key::from_raw(b"k"),
mvcc::Lock::new(mvcc::LockType::Put, b"k".to_vec(), 10, 20, None, 0, 0, 0),
)],
},
Command::ResolveLockLite {
ctx: Context::default(),
start_ts: 10,
commit_ts: 0,
resolve_keys: vec![Key::from_raw(b"k")],
},
Command::TxnHeartBeat {
ctx: Context::default(),
primary_key: Key::from_raw(b"k"),
start_ts: 10,
advise_ttl: 100,
},
];
let latches = Latches::new(1024);
let write_locks: Vec<Lock> = write_cmds
.into_iter()
.enumerate()
.map(|(id, cmd)| {
let mut lock = gen_command_lock(&latches, &cmd);
assert_eq!(latches.acquire(&mut lock, id as u64), id == 0);
lock
})
.collect();
for (id, cmd) in readonly_cmds.iter().enumerate() {
let mut lock = gen_command_lock(&latches, cmd);
assert!(latches.acquire(&mut lock, id as u64));
}
// acquire/release locks one by one.
let max_id = write_locks.len() as u64 - 1;
for (id, mut lock) in write_locks.into_iter().enumerate() {
let id = id as u64;
if id != 0 {
assert!(latches.acquire(&mut lock, id));
}
let unlocked = latches.release(&lock, id);
if id as u64 == max_id {
assert!(unlocked.is_empty());
} else {
assert_eq!(unlocked, vec![id + 1]);
}
}
}
} | |
bdist_dumb.py | """distutils.command.bdist_dumb
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
distribution -- i.e., just an archive to be unpacked under $prefix or
$exec_prefix)."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import os
from distutils.core import Command
from distutils.util import get_platform
from distutils.dir_util import remove_tree, ensure_relative
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
class bdist_dumb (Command):
description = "create a \"dumb\" built distribution"
user_options = [('bdist-dir=', 'd',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('format=', 'f',
"archive format to create (tar, ztar, gztar, zip)"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths"
"(default: false)"),
]
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
'nt': 'zip',
'os2': 'zip' }
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.format = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.relative = 0
# initialize_options()
def finalize_options (self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'dumb')
if self.format is None:
try:
self.format = self.default_format[os.name]
except KeyError:
raise DistutilsPlatformError, \
("don't know how to create dumb built distributions " +
"on platform %s") % os.name
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'))
# finalize_options()
def run (self):
|
# run()
# class bdist_dumb
| if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
log.info("installing to %s" % self.bdist_dir)
self.run_command('install')
# And make an archive relative to the root of the
# pseudo-installation tree.
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
# OS/2 objects to any ":" characters in a filename (such as when
# a timestamp is used in a version) so change them to hyphens.
if os.name == "os2":
archive_basename = archive_basename.replace(":", "-")
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
if (self.distribution.has_ext_modules() and
(install.install_base != install.install_platbase)):
raise DistutilsPlatformError, \
("can't make a dumb built distribution where "
"base and platbase are different (%s, %s)"
% (repr(install.install_base),
repr(install.install_platbase)))
else:
archive_root = os.path.join(self.bdist_dir,
ensure_relative(install.install_base))
# Make the archive
filename = self.make_archive(pseudoinstall_root,
self.format, root_dir=archive_root)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_dumb', pyversion,
filename))
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run) |
wavetable.go | package synth
import (
"github.com/boomlinde/acidforth/collection"
"github.com/boomlinde/acidforth/machine"
"math"
)
func waveTable(table []float64, phase float64) float64 {
return table[int(phase*0x10000)&0xffff]
}
func NewWaveTables(c *collection.Collection) | {
sintab := make([]float64, 0x10000)
tritab := make([]float64, 0x10000)
for i := range sintab {
phase := float64(i) * math.Pi / 0x8000
sintab[i] = math.Sin(phase)
tritab[i] = 2 * math.Asin(math.Sin(phase)) / math.Pi
}
c.Machine.Register("sintab", func(s *machine.Stack) {
phase := s.Pop()
s.Push(waveTable(sintab, phase))
})
c.Machine.Register("tritab", func(s *machine.Stack) {
phase := s.Pop()
s.Push(waveTable(tritab, phase))
})
} |
|
make_bucket_writable_request_response.go | // Copyright (c) 2016, 2018, 2022, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
package objectstorage
import (
"fmt"
"github.com/oracle/oci-go-sdk/v58/common"
"net/http"
"strings"
)
// MakeBucketWritableRequest wrapper for the MakeBucketWritable operation
//
// See also
//
// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/objectstorage/MakeBucketWritable.go.html to see an example of how to use MakeBucketWritableRequest.
type MakeBucketWritableRequest struct {
// The Object Storage namespace used for the request.
NamespaceName *string `mandatory:"true" contributesTo:"path" name:"namespaceName"`
// The name of the bucket. Avoid entering confidential information.
// Example: `my-new-bucket1`
BucketName *string `mandatory:"true" contributesTo:"path" name:"bucketName"`
// The client request ID for tracing.
OpcClientRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-client-request-id"`
// Metadata about the request. This information will not be transmitted to the service, but
// represents information that the SDK will consume to drive retry behavior.
RequestMetadata common.RequestMetadata
}
func (request MakeBucketWritableRequest) String() string {
return common.PointerString(request)
}
// HTTPRequest implements the OCIRequest interface
func (request MakeBucketWritableRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) {
_, err := request.ValidateEnumValue()
if err != nil {
return http.Request{}, err
}
return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders)
}
// BinaryRequestBody implements the OCIRequest interface
func (request MakeBucketWritableRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) {
return nil, false
}
// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.
func (request MakeBucketWritableRequest) RetryPolicy() *common.RetryPolicy {
return request.RequestMetadata.RetryPolicy
}
// ValidateEnumValue returns an error when providing an unsupported enum value
// This function is being called during constructing API request process
// Not recommended for calling this function directly
func (request MakeBucketWritableRequest) ValidateEnumValue() (bool, error) {
errMessage := []string{}
if len(errMessage) > 0 {
return true, fmt.Errorf(strings.Join(errMessage, "\n"))
}
return false, nil
}
// MakeBucketWritableResponse wrapper for the MakeBucketWritable operation
type MakeBucketWritableResponse struct {
// The underlying http response
RawResponse *http.Response
// Echoes back the value passed in the opc-client-request-id header, for use by clients when debugging.
OpcClientRequestId *string `presentIn:"header" name:"opc-client-request-id"`
// Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular | OpcRequestId *string `presentIn:"header" name:"opc-request-id"`
}
func (response MakeBucketWritableResponse) String() string {
return common.PointerString(response)
}
// HTTPResponse implements the OCIResponse interface
func (response MakeBucketWritableResponse) HTTPResponse() *http.Response {
return response.RawResponse
} | // request, provide this request ID. |
set.ts | method: 'POST',
headers: {
Authorization: 'Basic ' + Buffer.from(username + ':' + password).toString('base64'),
},
body: JSON.stringify(body),
});
var data = await response.status;
if (data == 400) {
return 0;
} else {
return 1;
}
}; | var get = require('node-fetch');
module.exports = async function set_blocked(url: string, username: string, password: string, service: string[]) {
var body = service;
var response = await get(url + '/control/blocked_services/set', { |
|
main.go | // +build !windows
package main
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"log"
"net/http"
"net/url"
"os"
"time"
"strings"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/filters"
"github.com/docker/docker/client"
"github.com/rancher/rancher/pkg/agent/cluster"
"github.com/rancher/rancher/pkg/agent/node"
"github.com/rancher/rancher/pkg/logserver"
"github.com/rancher/rancher/pkg/remotedialer"
"github.com/rancher/rancher/pkg/rkenodeconfigclient"
"github.com/sirupsen/logrus"
)
var (
VERSION = "dev"
)
const (
Token = "X-API-Tunnel-Token"
Params = "X-API-Tunnel-Params"
)
func main() {
logserver.StartServerWithDefaults()
if os.Getenv("CATTLE_DEBUG") == "true" || os.Getenv("RANCHER_DEBUG") == "true" |
if err := run(); err != nil {
log.Fatal(err)
}
}
func isCluster() bool {
return os.Getenv("CATTLE_CLUSTER") == "true"
}
func getParams() (map[string]interface{}, error) {
if isCluster() {
return cluster.Params()
}
return node.Params(), nil
}
func getTokenAndURL() (string, string, error) {
token, url, err := node.TokenAndURL()
if err != nil {
return "", "", err
}
if token == "" {
return cluster.TokenAndURL()
}
return token, url, nil
}
func isConnect() bool {
if os.Getenv("CATTLE_AGENT_CONNECT") == "true" {
return true
}
_, err := os.Stat("connected")
return err == nil
}
func connected() {
f, err := os.Create("connected")
if err != nil {
f.Close()
}
}
func cleanup(ctx context.Context) error {
if os.Getenv("CATTLE_K8S_MANAGED") != "true" {
return nil
}
c, err := client.NewEnvClient()
if err != nil {
return err
}
defer c.Close()
args := filters.NewArgs()
args.Add("label", "io.cattle.agent=true")
containers, err := c.ContainerList(ctx, types.ContainerListOptions{
All: true,
Filters: args,
})
if err != nil {
return err
}
for _, container := range containers {
if _, ok := container.Labels["io.kubernetes.pod.namespace"]; ok {
continue
}
if strings.Contains(container.Names[0], "share-mnt") {
continue
}
container := container
go func() {
time.Sleep(15 * time.Second)
logrus.Infof("Removing unmanaged agent %s(%s)", container.Names[0], container.ID)
c.ContainerRemove(ctx, container.ID, types.ContainerRemoveOptions{
Force: true,
})
}()
}
return nil
}
func run() error {
logrus.Infof("Rancher agent version %s is starting", VERSION)
params, err := getParams()
if err != nil {
return err
}
writeCertsOnly := os.Getenv("CATTLE_WRITE_CERT_ONLY") == "true"
bytes, err := json.Marshal(params)
if err != nil {
return err
}
token, server, err := getTokenAndURL()
if err != nil {
return err
}
headers := map[string][]string{
Token: {token},
Params: {base64.StdEncoding.EncodeToString(bytes)},
}
serverURL, err := url.Parse(server)
if err != nil {
return err
}
onConnect := func(ctx context.Context) error {
connected()
connectConfig := fmt.Sprintf("https://%s/v3/connect/config", serverURL.Host)
if err := rkenodeconfigclient.ConfigClient(ctx, connectConfig, headers, writeCertsOnly); err != nil {
return err
}
if isCluster() {
return nil
}
if err := cleanup(context.Background()); err != nil {
return err
}
go func() {
logrus.Infof("Starting plan monitor")
for {
select {
case <-time.After(2 * time.Minute):
err := rkenodeconfigclient.ConfigClient(ctx, connectConfig, headers, writeCertsOnly)
if err != nil {
logrus.Errorf("failed to check plan: %v", err)
}
case <-ctx.Done():
return
}
}
}()
return nil
}
for {
wsURL := fmt.Sprintf("wss://%s/v3/connect", serverURL.Host)
if !isConnect() {
wsURL += "/register"
}
logrus.Infof("Connecting to %s with token %s", wsURL, token)
remotedialer.ClientConnect(wsURL, http.Header(headers), nil, func(proto, address string) bool {
switch proto {
case "tcp":
return true
case "unix":
return address == "/var/run/docker.sock"
}
return false
}, onConnect)
time.Sleep(5 * time.Second)
}
}
| {
logrus.SetLevel(logrus.DebugLevel)
} |
advices.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# author:jingtongyu
# datetime:2020/6/7 10:14 下午
# software: PyCharm
from flask import current_app
from . import db
from .base import BaseModel
from sqlalchemy.exc import SQLAlchemyError
from werkzeug.security import generate_password_hash, check_password_hash
import time
class AdvicesModel(db.Model, BaseModel):
__tablename__ = 'advices'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(25), nullable=False)
username = db.Column(db.String(25), nullable=False)
advice = db.Column(db.String(500), nullable=False)
def __in | f, email, username, advice):
self.email = email
self.username = username
self.advice = advice
def __str__(self):
return "Advices(id='%s')" % self.id
def paginate(self, page, per_page):
return self.query.paginate(page=page, per_page=per_page, error_out=False)
def filter_by_email(self, email):
return self.query.filter(self.email.like("%" + email + "%")).all()
def filter_by_username(self, username):
return self.query.filter(self.username.like("%" + username + "%")).all()
def get(self, _id):
return self.query.filter_by(id=_id).first()
def add(self, role):
db.session.add(role)
return session_commit()
def update(self):
return session_commit()
def delete(self, ids):
# self.query.filter_by(id=id).delete()
self.query.filter(self.id.in_(ids)).delete(synchronize_session=False)
return session_commit()
def session_commit():
try:
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
reason = str(e)
current_app.logger.info(e)
return reason
| it__(sel |
test_waveforms.py | # pylint: disable=missing-function-docstring
import numpy as np
import numpy.testing as npt
import pytest
from quantify_scheduler.waveforms import (
square,
drag,
staircase,
modulate_wave,
rotate_wave,
)
def | ():
amped_sq = square(np.arange(50), 2.44)
npt.assert_array_equal(amped_sq, np.linspace(2.44, 2.44, 50))
amped_sq_iq = square(np.arange(20), 6.88)
npt.assert_array_equal(amped_sq_iq.real, np.linspace(6.88, 6.88, 20))
npt.assert_array_equal(amped_sq_iq.imag, np.linspace(0, 0, 20))
def test_staircase():
t = np.linspace(0, 1e-6, 20)
sig = staircase(t, -1, 2, 4)
answer = np.array(
[
-1.0,
-1.0,
-1.0,
-1.0,
-1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
1.0,
1.0,
2.0,
2.0,
2.0,
2.0,
2.0,
]
)
npt.assert_array_equal(sig, answer)
def test_drag_ns():
duration = 20e-9
nr_sigma = 3
G_amp = 0.5
D_amp = 1
times = np.arange(0, duration, 1e-9) # sampling rate set to 1 GSPs
mu = times[0] + duration / 2
sigma = duration / (2 * nr_sigma)
gauss_env = G_amp * np.exp(-(0.5 * ((times - mu) ** 2) / sigma ** 2))
deriv_gauss_env = D_amp * -1 * (times - mu) / (sigma ** 1) * gauss_env
exp_waveform = gauss_env + 1j * deriv_gauss_env
# quantify
waveform = drag(
times,
G_amp=G_amp,
D_amp=D_amp,
duration=duration,
nr_sigma=nr_sigma,
subtract_offset="none",
)
np.testing.assert_array_almost_equal(waveform, exp_waveform, decimal=3)
assert pytest.approx(np.max(waveform), 0.5)
with pytest.raises(ValueError):
drag(times, 0.5, D_amp, duration, subtract_offset="bad!")
waveform = drag(
times,
G_amp=G_amp,
D_amp=D_amp,
duration=duration,
nr_sigma=nr_sigma,
subtract_offset="average",
)
exp_waveform.real -= np.mean([exp_waveform.real[0], exp_waveform.real[-1]])
exp_waveform.imag -= np.mean([exp_waveform.imag[0], exp_waveform.imag[-1]])
np.testing.assert_array_almost_equal(waveform, exp_waveform, decimal=3)
def test_rotate_wave():
I = np.ones(10) # noqa # Q component is zero
Q = np.zeros(10) # noqa # not used as input, only used for testing
rot_wf = rotate_wave(I, 0)
npt.assert_array_almost_equal(I, rot_wf.real)
npt.assert_array_almost_equal(I.imag, rot_wf.imag)
rot_wf = rotate_wave(I, 90)
npt.assert_array_almost_equal(I, rot_wf.imag)
npt.assert_array_almost_equal(Q, -rot_wf.real)
rot_wf = rotate_wave(I, 180)
npt.assert_array_almost_equal(I, -rot_wf.real)
npt.assert_array_almost_equal(Q, -rot_wf.imag)
rot_wf = rotate_wave(I, 360)
npt.assert_array_almost_equal(I, rot_wf.real)
npt.assert_array_almost_equal(Q, rot_wf.imag)
def test_modulate():
fs = 100
f = 4
t = np.arange(fs)
I = np.sin(2 * np.pi * f * (t / fs)) # noqa
Q = np.sin(2 * np.pi * f * (t / fs) + (np.pi / 2)) # noqa
wf = I + 1j * Q
mod_wf = modulate_wave(np.linspace(0, 1, fs), wf, 2)
npt.assert_array_almost_equal(
mod_wf.real, np.sin(2 * np.pi * (f + 2) * (t / fs)), decimal=1
)
mod_wf = modulate_wave(np.linspace(0, 1, fs), wf, -2)
npt.assert_array_almost_equal(
mod_wf.imag, np.sin(2 * np.pi * (f - 2) * (t / fs) + (np.pi / 2)), decimal=1
)
| test_square_wave |
key_use.rs | // Copyright 2020-2021 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use core::fmt::Display;
use core::fmt::Formatter;
use core::fmt::Result;
/// Supported algorithms for the JSON Web Key `use` property.
///
/// [More Info](https://www.iana.org/assignments/jose/jose.xhtml#web-key-use) | pub enum JwkUse {
/// Digital Signature or MAC.
#[serde(rename = "sig")]
Signature,
/// Encryption.
#[serde(rename = "enc")]
Encryption,
}
impl JwkUse {
/// Returns the JWK "use" as a `str` slice.
pub const fn name(&self) -> &'static str {
match self {
Self::Signature => "sig",
Self::Encryption => "enc",
}
}
}
impl Display for JwkUse {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_str(self.name())
}
} | #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] |
package.py | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PerlMoose(PerlPackage):
"""A postmodern object system for Perl 5"""
homepage = "https://metacpan.org/pod/Moose"
url = "https://search.cpan.org/CPAN/authors/id/E/ET/ETHER/Moose-2.2006.tar.gz"
version('2.2010', sha256='af0905b69f18c27de1177c9bc7778ee495d4ec91be1f223e8ca8333af4de08c5')
version('2.2009', sha256='63ba8a5e27dbcbdbac2cd8f4162fff50a31e9829d8955a196a5898240c02d194')
version('2.2007', sha256='bc75a320b55ba26ac9e60e11a77b3471066cb615bf7097537ed22e20df88afe8')
version('2.2006', sha256='a4e00ab25cc41bebc5e7a11d71375fb5e64b56d5f91159afee225d698e06392b')
depends_on('perl-cpan-meta-check', type=('build', 'run'))
depends_on('perl-test-cleannamespaces', type=('build', 'run'))
depends_on('perl-devel-overloadinfo', type=('build', 'run'))
depends_on('perl-class-load-xs', type=('build', 'run'))
depends_on('perl-devel-stacktrace', type=('build', 'run'))
depends_on('perl-eval-closure', type=('build', 'run'))
depends_on('perl-sub-name', type=('build', 'run'))
depends_on('perl-module-runtime-conflicts', type=('build', 'run')) | depends_on('perl-devel-globaldestruction', type=('build', 'run'))
depends_on('perl-package-deprecationmanager', type=('build', 'run'))
depends_on('perl-package-stash-xs', type=('build', 'run')) |
|
_alignsrc.py | import _plotly_utils.basevalidators
class AlignsrcValidator(_plotly_utils.basevalidators.SrcValidator):
| def __init__(
self, plotly_name="alignsrc", parent_name="heatmapgl.hoverlabel", **kwargs
):
super(AlignsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs,
) |
|
tikv_test.go | package tikv_test
import (
"context"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"net/url"
"sort"
"sync"
"github.com/coreos/go-semver/semver"
. "github.com/pingcap/check"
"github.com/pingcap/kvproto/pkg/import_sstpb"
"github.com/pingcap/tidb/br/pkg/lightning/common"
kv "github.com/pingcap/tidb/br/pkg/lightning/tikv"
)
type tikvSuite struct{}
var _ = Suite(&tikvSuite{})
var (
// Samples from importer backend for testing the Check***Version functions.
// No need keep these versions in sync.
requiredMinPDVersion = *semver.New("2.1.0")
requiredMinTiKVVersion = *semver.New("2.1.0")
requiredMaxPDVersion = *semver.New("6.0.0")
requiredMaxTiKVVersion = *semver.New("6.0.0")
)
func (s *tikvSuite) TestForAllStores(c *C) {
server := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
_, err := w.Write([]byte(`
{
"count": 5,
"stores": [
{
"store": {
"id": 1,
"address": "127.0.0.1:20160",
"version": "3.0.0-beta.1",
"state_name": "Up"
},
"status": {}
},
{
"store": {
"id": 2,
"address": "127.0.0.1:20161",
"version": "3.0.0-rc.1",
"state_name": "Down"
},
"status": {}
},
{
"store": {
"id": 3,
"address": "127.0.0.1:20162",
"version": "3.0.0-rc.2",
"state_name": "Disconnected"
},
"status": {}
},
{
"store": {
"id": 4,
"address": "127.0.0.1:20163",
"version": "3.0.0",
"state_name": "Tombstone"
},
"status": {}
},
{
"store": {
"id": 5,
"address": "127.0.0.1:20164",
"version": "3.0.1",
"state_name": "Offline"
},
"status": {}
}
]
}
`))
c.Assert(err, IsNil)
}))
defer server.Close()
ctx := context.Background()
var (
allStoresLock sync.Mutex
allStores []*kv.Store
)
tls := common.NewTLSFromMockServer(server)
err := kv.ForAllStores(ctx, tls, kv.StoreStateDown, func(c2 context.Context, store *kv.Store) error {
allStoresLock.Lock()
allStores = append(allStores, store)
allStoresLock.Unlock()
return nil
})
c.Assert(err, IsNil)
sort.Slice(allStores, func(i, j int) bool { return allStores[i].Address < allStores[j].Address })
c.Assert(allStores, DeepEquals, []*kv.Store{
{
Address: "127.0.0.1:20160",
Version: "3.0.0-beta.1",
State: kv.StoreStateUp,
},
{
Address: "127.0.0.1:20161",
Version: "3.0.0-rc.1",
State: kv.StoreStateDown,
},
{
Address: "127.0.0.1:20162",
Version: "3.0.0-rc.2",
State: kv.StoreStateDisconnected,
},
{
Address: "127.0.0.1:20164",
Version: "3.0.1",
State: kv.StoreStateOffline,
},
})
}
func (s *tikvSuite) TestFetchModeFromMetrics(c *C) {
testCases := []struct {
metrics string
mode import_sstpb.SwitchMode
isErr bool
}{
{
metrics: `tikv_config_rocksdb{cf="default",name="hard_pending_compaction_bytes_limit"} 274877906944`,
mode: import_sstpb.SwitchMode_Normal,
},
{
metrics: `tikv_config_rocksdb{cf="default",name="hard_pending_compaction_bytes_limit"} 0`,
mode: import_sstpb.SwitchMode_Import,
},
{
metrics: ``,
isErr: true,
},
}
for _, tc := range testCases {
comment := Commentf("test case '%s'", tc.metrics)
mode, err := kv.FetchModeFromMetrics(tc.metrics)
if tc.isErr {
c.Assert(err, NotNil, comment)
} else {
c.Assert(err, IsNil, comment)
c.Assert(mode, Equals, tc.mode, comment)
}
}
}
func (s *tikvSuite) TestCheckPDVersion(c *C) {
var version string
ctx := context.Background()
mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
c.Assert(req.URL.Path, Equals, "/pd/api/v1/version")
w.WriteHeader(http.StatusOK)
_, err := w.Write([]byte(version))
c.Assert(err, IsNil)
}))
mockURL, err := url.Parse(mockServer.URL) | version = `{
"version": "v4.0.0-rc.2-451-g760fb650"
}`
c.Assert(kv.CheckPDVersion(ctx, tls, mockURL.Host, requiredMinPDVersion, requiredMaxPDVersion), IsNil)
version = `{
"version": "v4.0.0"
}`
c.Assert(kv.CheckPDVersion(ctx, tls, mockURL.Host, requiredMinPDVersion, requiredMaxPDVersion), IsNil)
version = `{
"version": "v9999.0.0"
}`
c.Assert(kv.CheckPDVersion(ctx, tls, mockURL.Host, requiredMinPDVersion, requiredMaxPDVersion), ErrorMatches, "PD version too new.*")
version = `{
"version": "v6.0.0"
}`
c.Assert(kv.CheckPDVersion(ctx, tls, mockURL.Host, requiredMinPDVersion, requiredMaxPDVersion), ErrorMatches, "PD version too new.*")
version = `{
"version": "v6.0.0-beta"
}`
c.Assert(kv.CheckPDVersion(ctx, tls, mockURL.Host, requiredMinPDVersion, requiredMaxPDVersion), ErrorMatches, "PD version too new.*")
version = `{
"version": "v1.0.0"
}`
c.Assert(kv.CheckPDVersion(ctx, tls, mockURL.Host, requiredMinPDVersion, requiredMaxPDVersion), ErrorMatches, "PD version too old.*")
}
func (s *tikvSuite) TestCheckTiKVVersion(c *C) {
var versions []string
ctx := context.Background()
mockServer := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
c.Assert(req.URL.Path, Equals, "/pd/api/v1/stores")
w.WriteHeader(http.StatusOK)
stores := make([]map[string]interface{}, 0, len(versions))
for i, v := range versions {
stores = append(stores, map[string]interface{}{
"store": map[string]interface{}{
"address": fmt.Sprintf("tikv%d.test:20160", i),
"version": v,
},
})
}
err := json.NewEncoder(w).Encode(map[string]interface{}{
"count": len(versions),
"stores": stores,
})
c.Assert(err, IsNil)
}))
mockURL, err := url.Parse(mockServer.URL)
c.Assert(err, IsNil)
tls := common.NewTLSFromMockServer(mockServer)
versions = []string{"4.1.0", "v4.1.0-alpha-9-ga27a7dd"}
c.Assert(kv.CheckTiKVVersion(ctx, tls, mockURL.Host, requiredMinTiKVVersion, requiredMaxTiKVVersion), IsNil)
versions = []string{"9999.0.0", "4.0.0"}
c.Assert(kv.CheckTiKVVersion(ctx, tls, mockURL.Host, requiredMinTiKVVersion, requiredMaxTiKVVersion), ErrorMatches, `TiKV \(at tikv0\.test:20160\) version too new.*`)
versions = []string{"4.0.0", "1.0.0"}
c.Assert(kv.CheckTiKVVersion(ctx, tls, mockURL.Host, requiredMinTiKVVersion, requiredMaxTiKVVersion), ErrorMatches, `TiKV \(at tikv1\.test:20160\) version too old.*`)
versions = []string{"6.0.0"}
c.Assert(kv.CheckTiKVVersion(ctx, tls, mockURL.Host, requiredMinTiKVVersion, requiredMaxTiKVVersion), ErrorMatches, `TiKV \(at tikv0\.test:20160\) version too new.*`)
versions = []string{"6.0.0-beta"}
c.Assert(kv.CheckTiKVVersion(ctx, tls, mockURL.Host, requiredMinTiKVVersion, requiredMaxTiKVVersion), ErrorMatches, `TiKV \(at tikv0\.test:20160\) version too new.*`)
} | c.Assert(err, IsNil)
tls := common.NewTLSFromMockServer(mockServer)
|
FRAMS_STUDENT.py | import tkinter as tk
from tkinter import *
import cv2
import csv
import os
import numpy as np
from PIL import Image,ImageTk
import pandas as pd
import datetime
import time
##Error screen2
def del_sc2():
sc2.destroy()
def err_screen1():
global sc2
sc2 = tk.Tk()
sc2.geometry('300x100')
sc2.iconbitmap('FRAMS.ico')
sc2.title('Warning!!')
sc2.configure(background='snow')
Label(sc2,text='Please enter your subject name!!!',fg='red',bg='white',font=('times', 16, ' bold ')).pack()
Button(sc2,text='OK',command=del_sc2,fg="black" ,bg="lawn green" ,width=9 ,height=1, activebackground = "Red" ,font=('times', 15, ' bold ')).place(x=90,y= 50)
def Fillattendances():
sub = tx.get()
now = time.time() ###For calculate seconds of video
future = now + 20
if time.time() < future:
if sub == '':
err_screen1()
else:
recognizer = cv2.face.LBPHFaceRecognizer_create() # cv2.createLBPHFaceRecognizer()
try:
recognizer.read("TrainingImageLabel\Trainner.yml")
except:
e = 'Model not found,Please train model'
Notifica.configure(text=e, bg="red", fg="black", width=33, font=('times', 15, 'bold'))
Notifica.place(x=20, y=250)
harcascadePath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(harcascadePath)
df = pd.read_csv("StudentDetails\StudentDetails.csv")
cam = cv2.VideoCapture(0)
font = cv2.FONT_HERSHEY_SIMPLEX
col_names = ['Enrollment', 'Name', 'Date', 'Time']
attendance = pd.DataFrame(columns=col_names)
while True:
ret, im = cam.read()
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.2, 5)
for (x, y, w, h) in faces:
global Id
Id, conf = recognizer.predict(gray[y:y + h, x:x + w])
if (conf < 70):
print(conf)
global Subject
global aa
global date
global timeStamp
Subject = tx.get()
ts = time.time()
date = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d')
timeStamp = datetime.datetime.fromtimestamp(ts).strftime('%H:%M:%S')
aa = df.loc[df['Enrollment'] == Id]['Name'].values
global tt
tt = str(Id) + "-" + aa
En = '15624031' + str(Id)
attendance.loc[len(attendance)] = [Id, aa, date, timeStamp]
cv2.rectangle(im, (x, y), (x + w, y + h), (0, 260, 0), 7)
cv2.putText(im, str(tt), (x + h, y), font, 1, (255, 255, 0,), 4)
else:
Id = 'Unknown'
tt = str(Id)
cv2.rectangle(im, (x, y), (x + w, y + h), (0, 25, 255), 7)
cv2.putText(im, str(tt), (x + h, y), font, 1, (0, 25, 255), 4)
if time.time() > future:
break
attendance = attendance.drop_duplicates(['Enrollment'], keep='first')
cv2.imshow('Filling attedance..', im)
key = cv2.waitKey(30) & 0xff
if key == 27:
break
ts = time.time()
date = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d')
timeStamp = datetime.datetime.fromtimestamp(ts).strftime('%H:%M:%S')
Hour, Minute, Second = timeStamp.split(":")
fileName = "Attendance/" + Subject + "_" + date + "_" + Hour + "-" + Minute + "-" + Second + ".csv"
attendance = attendance.drop_duplicates(['Enrollment'], keep='first')
print(attendance)
attendance.to_csv(fileName, index=False)
M = 'Attendance filled Successfully'
Notifica.configure(text=M, bg="Green", fg="white", width=33, font=('times', 15, 'bold'))
Notifica.place(x=20, y=250)
cam.release()
cv2.destroyAllWindows()
import csv
import tkinter
root = tkinter.Tk()
root.title("Attendance of " + Subject)
root.configure(background='snow')
cs = './' + fileName
with open(cs, newline="") as file:
reader = csv.reader(file)
r = 0
for col in reader:
c = 0
for row in col:
# i've added some styling
label = tkinter.Label(root, width=8, height=1, fg="black", font=('times', 15, ' bold '),
bg="lawn green", text=row, relief=tkinter.RIDGE)
label.grid(row=r, column=c)
c += 1
r += 1
root.mainloop()
print(attendance)
if __name__ == '__main__':
###windo is frame for subject choosing
windo = tk.Tk()
windo.iconbitmap('FRAMS.ico')
windo.title("Enter subject name...")
windo.geometry('580x320')
windo.configure(background='snow')
Notifica = tk.Label(windo, text="Attendance filled Successfully", bg="Green", fg="white", width=33,
height=2, font=('times', 15, 'bold'))
def | ():
import subprocess
subprocess.Popen(
r'explorer /select,".\Attendance\Manually Attendance\"') # open attendance sheet window
attf = tk.Button(windo, text="Check Sheets", command=Attf, fg="black", bg="lawn green", width=12, height=1,
activebackground="Red", font=('times', 14, ' bold '))
attf.place(x=430, y=255)
sub = tk.Label(windo, text="Enter Subject", width=15, height=2, fg="white", bg="blue2",
font=('times', 15, ' bold '))
sub.place(x=30, y=100)
tx = tk.Entry(windo, width=20, bg="yellow", fg="red", font=('times', 23, ' bold '))
tx.place(x=250, y=105)
fill_a = tk.Button(windo, text="Fill Attendance", fg="white", command=Fillattendances, bg="deep pink", width=20,
height=2,
activebackground="Red", font=('times', 15, ' bold '))
fill_a.place(x=250, y=160)
windo.mainloop() | Attf |
StringFieldType.py |
from .PacketFieldType import PacketFieldType
class | (PacketFieldType):
def _setTypedData(self, data):
try:
self._data = str(data)
except Exception as e:
raise ValueError("{} is not a string".format(data)) | StringFieldType |
index.js | 'use strict';
const postcssLess = require('postcss-less');
const postcssScss = require('postcss-scss');
const { messages, ruleName } = require('..');
testRule({
ruleName,
config: ['0,1,0'],
accept: [
{
code: '.ab {}',
},
{
code: 'span a {}',
},
{
code: ':not(.b) {}',
},
{
code: ':not(.b, .c) {}',
},
{
code: ':matches(.b) {}',
},
{
code: ':matches(.b, .c) {}',
},
{
code: 'div div div div div div div div div div div {}',
message:
'a selector with 11 elements has a lower specificity than a selector with one classname',
},
{
code: 'z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z z {}',
message:
'a selector with 101 elements has a lower specificity than a selector with one classname',
},
{
code: ':root { --foo: 1px; }',
description: 'custom property in root',
},
{
code: 'html { --foo: 1px; }',
description: 'custom property in selector',
},
{
code: ':root { --custom-property-set: {} }',
description: 'custom property set in root',
},
{
code: 'html { --custom-property-set: {} }',
description: 'custom property set in selector',
},
{
code: '.foo() {\n&.bar {}}',
},
{
code: '.foo(@a, @b) {\n&.bar {}}',
},
],
reject: [
{
code: '.ab .ab {}',
message: messages.expected('.ab .ab', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.ab span {}',
message: messages.expected('.ab span', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.a:not(.b) {}',
message: messages.expected('.a:not(.b)', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.a:not(.b, .c) {}',
message: messages.expected('.a:not(.b, .c)', '0,1,0'),
line: 1,
column: 1,
},
{
code: ':not(.b, .c.d) {}',
message: messages.expected(':not(.b, .c.d)', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.a:matches(.b) {}',
message: messages.expected('.a:matches(.b)', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.a:matches(.b, .c) {}',
message: messages.expected('.a:matches(.b, .c)', '0,1,0'),
line: 1,
column: 1,
},
{
code: ':matches(.b, .c.d) {}',
message: messages.expected(':matches(.b, .c.d)', '0,1,0'),
line: 1,
column: 1,
},
],
});
testRule({
ruleName,
config: ['0,3,0'],
accept: [
{
code: '.ab {}',
},
{
code: '.ab .cd {}',
},
{
code: '.ab .cd span {}',
},
{
code: '.cd div span {}',
},
{
code: '.cd .de div span a {}',
},
{
code: '.cd .de div span a > b {}',
},
{
code: '.cd .de, .cd .ef > b {}',
},
],
reject: [
{
code: '#jubjub {}',
message: messages.expected('#jubjub', '0,3,0'),
line: 1,
column: 1,
},
{
code: '.thing div .thing .sausages {}',
message: messages.expected('.thing div .thing .sausages', '0,3,0'),
line: 1,
column: 1,
},
{
code: '.thing div .thing, .sausages .burgers .bacon a {}',
message: messages.expected('.sausages .burgers .bacon a', '0,3,0'),
line: 1,
column: 20,
},
],
});
testRule({
ruleName,
config: ['0,2,1'],
accept: [
{
code: '.cd .de,\n.cd .ef > b {}',
},
{
code: '.cd { .de {} }',
description: 'standard nesting',
},
{
code: 'div:hover { .de {} }',
description: 'element, pseudo-class, nested class',
},
{
code: '.ab, .cd { & > .de {} }',
description: 'initial (unnecessary) parent selector',
},
{
code: '.cd { .de > & {} }',
description: 'necessary parent selector',
},
{
code: '.cd { @media print { .de {} } }',
description: 'nested rule within nested media query',
},
{
code: '@media print { .cd { .de {} } }',
description: 'media query > rule > rule',
},
],
reject: [
{
code: '.thing div .thing,\n.sausages .burgers .bacon a {}',
message: messages.expected('.sausages .burgers .bacon a', '0,2,1'),
line: 2,
column: 1,
},
{
code: '.cd { .de { .fg {} } }',
message: messages.expected('.cd .de .fg', '0,2,1'),
},
{
code: '.cd { .de { & > .fg {} } }',
message: messages.expected('.cd .de > .fg', '0,2,1'),
},
{
code: '.cd { .de { &:hover > .fg {} } }',
message: messages.expected('.cd .de:hover > .fg', '0,2,1'),
},
{
code: '.cd { .de { .fg > & {} } }',
message: messages.expected('.fg > .cd .de', '0,2,1'),
},
{
code: '.cd { @media print { .de { & + .fg {} } } }',
message: messages.expected('.cd .de + .fg', '0,2,1'),
},
{
code: '@media print { li { & + .ab, .ef.ef { .cd {} } } }',
message: messages.expected('li .ef.ef .cd', '0,2,1'),
},
],
});
testRule({
ruleName,
config: ['0,4,1'],
accept: [
{
code: '.cd .de {& .fg {}}',
},
],
reject: [
{
code: '.thing .thing2 {&.nested {#pop {}}}', | line: 1,
column: 27,
},
{
code: '.thing .thing2 {#here & {}}',
message: messages.expected('#here .thing .thing2', '0,4,1'),
line: 1,
column: 17,
},
{
code: '.thing .thing2 .thing3 .thing4 {a.here & {}}',
message: messages.expected('a.here .thing .thing2 .thing3 .thing4', '0,4,1'),
line: 1,
column: 33,
},
],
});
testRule({
ruleName,
config: ['0,1,1'],
customSyntax: postcssScss,
accept: [
{
code: '#hello #{$test} {}',
description: 'ignore rules with variable interpolation',
},
{
code: '@each $a in $b { .#{ map-get($a, b) } { c {} } }',
description: 'ignore nested rules with variable interpolation',
},
],
reject: [
{
code: '.ab .ab { @include test {} }',
message: messages.expected('.ab .ab', '0,1,1'),
line: 1,
column: 1,
},
{
code: '.a:not(.b) { @include test {} }',
message: messages.expected('.a:not(.b)', '0,1,1'),
line: 1,
column: 1,
},
{
code: '.a:not(.b, .c) { @include test {} }',
message: messages.expected('.a:not(.b, .c)', '0,1,1'),
line: 1,
column: 1,
},
{
code: ':not(.b, .c.d) { @include test {} }',
message: messages.expected(':not(.b, .c.d)', '0,1,1'),
line: 1,
column: 1,
},
{
code: '.a:matches(.b) { @include test {} }',
message: messages.expected('.a:matches(.b)', '0,1,1'),
line: 1,
column: 1,
},
{
code: '.a:matches(.b, .c) { @include test {} }',
message: messages.expected('.a:matches(.b, .c)', '0,1,1'),
line: 1,
column: 1,
},
{
code: ':matches(.b, .c.d) { @include test {} }',
message: messages.expected(':matches(.b, .c.d)', '0,1,1'),
line: 1,
column: 1,
},
{
code: '@include test { .ab .ab {} }',
message: messages.expected('.ab .ab', '0,1,1'),
line: 1,
column: 17,
},
],
});
testRule({
ruleName,
config: ['0,3,0'],
customSyntax: postcssScss,
accept: [
{
code: '.navigation__item:nth-of-type(4n) .navigation__sub-list {}',
},
],
});
testRule({
ruleName,
config: ['0,1,1'],
customSyntax: postcssLess,
accept: [
{
code: '#hello @{test} {}',
description: 'ignore rules with variable interpolation',
},
],
});
testRule({
ruleName,
config: [
'0,1,0',
{
ignoreSelectors: [':global', ':local', '/my-/'],
},
],
accept: [
{
code: ':global(.b) {}',
},
{
code: ':global(.b, :local(.c)) {}',
},
{
code: ':local(.b) {}',
},
{
code: ':local(.b, :global(.c)) {}',
},
{
code: 'my-tag.a {}',
},
],
reject: [
{
code: '.a:global(.b) {}',
message: messages.expected('.a:global(.b)', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.a:global(.b, .c) {}',
message: messages.expected('.a:global(.b, .c)', '0,1,0'),
line: 1,
column: 1,
},
{
code: ':global(.b, .c.d) {}',
message: messages.expected(':global(.b, .c.d)', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.a:local(.b) {}',
message: messages.expected('.a:local(.b)', '0,1,0'),
line: 1,
column: 1,
},
{
code: '.a:local(.b, .c) {}',
message: messages.expected('.a:local(.b, .c)', '0,1,0'),
line: 1,
column: 1,
},
{
code: ':local(.b, .c.d) {}',
message: messages.expected(':local(.b, .c.d)', '0,1,0'),
line: 1,
column: 1,
},
{
code: 'my-tag.a.b {}',
message: messages.expected('my-tag.a.b', '0,1,0'),
line: 1,
column: 1,
},
],
});
testRule({
ruleName,
config: [
'0,1,0',
{
ignoreSelectors: [/my-/],
},
],
accept: [
{
code: 'my-tag.a {}',
},
],
reject: [
{
code: '.a:global(.b) {}',
message: messages.expected('.a:global(.b)', '0,1,0'),
line: 1,
column: 1,
},
],
}); | message: messages.expected('.thing .thing2.nested #pop', '0,4,1'), |
font.rs | // Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! A simple renderer for TrueType fonts
//extern crate test;
//use self::test::Bencher;
use std::collections::HashMap;
use std::fmt;
use std::fmt::{Formatter, Display};
use std::result::Result;
use geom::{Point, Affine, affine_pt};
use raster::Raster;
#[derive(PartialEq, Eq, Hash)]
struct Tag(u32);
impl Tag {
fn from_str(s: &str) -> Tag {
Tag(get_u32(s.as_bytes(), 0).unwrap())
}
}
impl Display for Tag {
fn | (&self, f: &mut Formatter) -> fmt::Result {
let &Tag(tag) = self;
let buf = vec![((tag >> 24) & 0xff) as u8,
((tag >> 16) & 0xff) as u8,
((tag >> 8) & 0xff) as u8,
(tag & 0xff) as u8];
f.write_str(&String::from_utf8(buf).unwrap())
}
}
fn get_u16(data: &[u8], off: usize) -> Option<u16> {
if off + 1 > data.len() {
None
} else {
Some(((data[off] as u16) << 8) | data[off + 1] as u16)
}
}
fn get_i16(data: &[u8], off: usize) -> Option<i16> {
get_u16(data, off).map(|x| x as i16)
}
fn get_f2_14(data: &[u8], off: usize) -> Option<f32> {
get_i16(data, off).map(|x| x as f32 * (1.0 / (1 << 14) as f32))
}
fn get_u32(data: &[u8], off: usize) -> Option<u32> {
if off + 3 > data.len() {
None
} else {
Some(((data[off] as u32) << 24) | ((data[off + 1] as u32) << 16) |
((data[off + 2] as u32) << 8) | data[off + 3] as u32)
}
}
// TODO: be consistent, use newtype or one-field struct everywhere
struct Head<'a>(&'a [u8]);
impl<'a> Head<'a> {
fn index_to_loc_format(&'a self) -> i16 {
let &Head(data) = self;
get_i16(data, 50).unwrap()
}
fn units_per_em(&'a self) -> u16 {
let &Head(data) = self;
get_u16(data, 18).unwrap()
}
}
struct Maxp<'a> {
data: &'a [u8]
}
impl<'a> Maxp<'a> {
fn num_glyphs(&'a self) -> u16 {
get_u16(self.data, 4).unwrap()
}
}
struct Loca<'a>(&'a [u8]);
impl<'a> Loca<'a> {
fn get_off(&'a self, glyph_ix: u16, fmt: i16) -> Option<u32> {
let &Loca(data) = self;
if fmt != 0 {
get_u32(data, glyph_ix as usize * 4)
} else {
get_u16(data, glyph_ix as usize * 2).map(|raw| raw as u32 * 2)
}
}
}
fn get_bbox_raw(data: &[u8]) -> (i16, i16, i16, i16) {
(get_i16(data, 2).unwrap(),
get_i16(data, 4).unwrap(),
get_i16(data, 6).unwrap(),
get_i16(data, 8).unwrap(),
)
}
enum Glyph<'a> {
Empty,
Simple(SimpleGlyph<'a>),
Compound(CompoundGlyph<'a>),
}
struct SimpleGlyph<'a> {
data: &'a [u8]
}
impl<'a> SimpleGlyph<'a> {
fn number_of_contours(&'a self) -> i16 {
get_i16(self.data, 0).unwrap()
}
fn bbox(&'a self) -> (i16, i16, i16, i16) {
get_bbox_raw(self.data)
}
fn points(&'a self) -> GlyphPoints<'a> {
let data = self.data;
let n_contours = self.number_of_contours();
let insn_len_off = 10 + 2 * n_contours as usize;
let n_points = get_u16(data, insn_len_off - 2).unwrap() as usize + 1;
let insn_len = get_u16(data, insn_len_off).unwrap(); // insn_len
let flags_ix = insn_len_off + insn_len as usize + 2;
let mut flags_size = 0;
let mut x_size = 0;
let mut points_remaining = n_points;
while points_remaining > 0 {
let flag = data[flags_ix as usize + flags_size];
let repeat_count = if (flag & 8) == 0 {
1
} else {
flags_size += 1;
data[flags_ix as usize + flags_size] as usize + 1
};
flags_size += 1;
match flag & 0x12 {
0x02 | 0x12 => x_size += repeat_count,
0x00 => x_size += 2 * repeat_count,
_ => ()
}
points_remaining -= repeat_count;
}
let x_ix = flags_ix + flags_size;
let y_ix = x_ix + x_size;
GlyphPoints{data: data, x: 0, y: 0, points_remaining: n_points,
last_flag:0, flag_repeats_remaining: 0,
flags_ix: flags_ix, x_ix: x_ix, y_ix: y_ix }
}
fn contour_sizes(&self) -> ContourSizes {
let n_contours = self.number_of_contours();
ContourSizes {
data: self.data,
contours_remaining: n_contours as usize,
ix: 10,
offset: -1,
}
}
}
struct GlyphPoints<'a> {
data: &'a [u8],
x: i16,
y: i16,
points_remaining: usize,
last_flag: u8,
flag_repeats_remaining: u8,
flags_ix: usize,
x_ix: usize,
y_ix: usize,
}
impl<'a> Iterator for GlyphPoints<'a> {
type Item = (bool, i16, i16);
fn next(&mut self) -> Option<(bool, i16, i16)> {
if self.points_remaining == 0 {
None
} else {
if self.flag_repeats_remaining == 0 {
self.last_flag = self.data[self.flags_ix];
if (self.last_flag & 8) == 0 {
self.flags_ix += 1;
} else {
self.flag_repeats_remaining = self.data[self.flags_ix + 1];
self.flags_ix += 2;
}
} else {
self.flag_repeats_remaining -= 1;
}
let flag = self.last_flag;
//println!("flag={:02x}, flags_ix={}, x_ix={}, ({}) y_ix={} ({})",
// flag, self.flags_ix, self.x_ix, self.data.get(self.x_ix), self.y_ix, self.data.get(self.y_ix));
match flag & 0x12 {
0x02 => {
self.x -= self.data[self.x_ix] as i16;
self.x_ix += 1;
},
0x00 => {
self.x += get_i16(self.data, self.x_ix).unwrap();
self.x_ix += 2;
}
0x12 => {
self.x += self.data[self.x_ix] as i16;
self.x_ix += 1;
},
_ => ()
}
match flag & 0x24 {
0x04 => {
self.y -= self.data[self.y_ix] as i16;
self.y_ix += 1;
},
0x00 => {
self.y += get_i16(self.data, self.y_ix).unwrap();
self.y_ix += 2;
}
0x24 => {
self.y += self.data[self.y_ix] as i16;
self.y_ix += 1;
},
_ => ()
}
self.points_remaining -= 1;
Some(((self.last_flag & 1) != 0, self.x, self.y))
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.points_remaining as usize, Some(self.points_remaining as usize))
}
}
struct ContourSizes<'a> {
data: &'a [u8],
contours_remaining: usize,
ix: usize,
offset: i32,
}
impl<'a> Iterator for ContourSizes<'a> {
type Item = usize;
fn next(&mut self) -> Option<(usize)> {
if self.contours_remaining == 0 {
None
} else {
let ret = get_u16(self.data, self.ix).unwrap() as i32 - self.offset;
self.offset += ret;
self.ix += 2;
self.contours_remaining -= 1;
Some(ret as usize)
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.contours_remaining, Some(self.contours_remaining))
}
}
struct CompoundGlyph<'a> {
data: &'a [u8]
}
struct Components<'a> {
data: &'a [u8],
more: bool,
ix: usize,
}
const ARG_1_AND_2_ARE_WORDS: u16 = 1;
const WE_HAVE_A_SCALE: u16 = 1 << 3;
const MORE_COMPONENTS: u16 = 1 << 5;
const WE_HAVE_AN_X_AND_Y_SCALE: u16 = 1 << 6;
const WE_HAVE_A_TWO_BY_TWO: u16 = 1 << 7;
impl<'a> Iterator for Components<'a> {
type Item = (u16, Affine);
fn next(&mut self) -> Option<(u16, Affine)> {
if !self.more { return None; }
let flags = get_u16(self.data, self.ix).unwrap();
self.ix += 2;
let glyph_index = get_u16(self.data, self.ix).unwrap();
self.ix += 2;
let arg1;
let arg2;
if (flags & ARG_1_AND_2_ARE_WORDS) != 0 {
arg1 = get_i16(self.data, self.ix).unwrap();
self.ix += 2;
arg2 = get_i16(self.data, self.ix).unwrap();
self.ix += 2;
} else {
arg1 = self.data[self.ix] as i16;
self.ix += 1;
arg2 = self.data[self.ix] as i16;
self.ix += 1;
}
let mut a = 1.0;
let mut b = 0.0;
let mut c = 0.0;
let mut d = 1.0;
if (flags & WE_HAVE_A_TWO_BY_TWO) != 0 {
a = get_f2_14(self.data, self.ix).unwrap();
self.ix += 2;
b = get_f2_14(self.data, self.ix).unwrap();
self.ix += 2;
c = get_f2_14(self.data, self.ix).unwrap();
self.ix += 2;
d = get_f2_14(self.data, self.ix).unwrap();
self.ix += 2;
} else if (flags & WE_HAVE_AN_X_AND_Y_SCALE) != 0 {
a = get_f2_14(self.data, self.ix).unwrap();
self.ix += 2;
d = get_f2_14(self.data, self.ix).unwrap();
self.ix += 2;
} else if (flags & WE_HAVE_A_SCALE) != 0 {
a = get_f2_14(self.data, self.ix).unwrap();
self.ix += 2;
d = a;
}
// TODO: handle non-ARGS_ARE_XY_VALUES case
let x = arg1 as f32;
let y = arg2 as f32;
let z = Affine::new(a, b, c, d, x, y);
self.more = (flags & MORE_COMPONENTS) != 0;
Some((glyph_index, z))
}
}
impl<'a> CompoundGlyph<'a> {
fn bbox(&self) -> (i16, i16, i16, i16) {
get_bbox_raw(self.data)
}
fn components(&self) -> Components {
Components {
data: self.data,
ix: 10,
more: true,
}
}
}
pub struct Font<'a> {
_version: u32,
_tables: HashMap<Tag, &'a [u8]>,
head: Head<'a>,
maxp: Maxp<'a>,
loca: Option<Loca<'a>>,
glyf: Option<&'a [u8]>,
}
struct Metrics {
l: i32,
t: i32,
r: i32,
b: i32,
}
impl Metrics {
fn width(&self) -> usize {
(self.r - self.l) as usize
}
fn height(&self) -> usize {
(self.b - self.t) as usize
}
}
impl<'a> Font<'a> {
fn metrics_and_affine(&self, xmin: i16, ymin: i16, xmax: i16, ymax: i16, size:u32) ->
(Metrics, Affine) {
let ppem = self.head.units_per_em();
let scale = (size as f32) / (ppem as f32);
let l = (xmin as f32 * scale).floor() as i32;
let t = (ymax as f32 * -scale).floor() as i32;
let r = (xmax as f32 * scale).ceil() as i32;
let b = (ymin as f32 * -scale).ceil() as i32;
let metrics = Metrics { l: l, t: t, r: r, b: b };
let z = Affine::new(scale, 0.0, 0.0, -scale, -l as f32, -t as f32);
(metrics, z)
}
fn render_glyph_inner(&self, raster: &mut Raster, z: &Affine, glyph: &Glyph) {
match *glyph {
Glyph::Simple(ref s) => {
let mut p = s.points();
for n in s.contour_sizes() {
//println!("n = {}", n);
//let v = path_from_pts(p.by_ref().take(n)).collect::<Vec<_>>();
//println!("size = {}", v.len());
draw_path(raster, z, &mut path_from_pts(p.by_ref().take(n)));
}
}
Glyph::Compound(ref c) => {
for (glyph_index, affine) in c.components() {
//println!("component {} {:?}", glyph_index, affine);
let concat = Affine::concat(z, &affine);
if let Some(component_glyph) = self.get_glyph(glyph_index) {
self.render_glyph_inner(raster, &concat, &component_glyph);
}
}
}
_ => {
println!("unhandled glyph case");
}
}
}
pub fn render_glyph(&self, glyph_id: u16, size: u32) -> Option<GlyphBitmap> {
let glyph = self.get_glyph(glyph_id);
match glyph {
Some(Glyph::Simple(ref s)) => {
let (xmin, ymin, xmax, ymax) = s.bbox();
let (metrics, z) = self.metrics_and_affine(xmin, ymin, xmax, ymax, size);
let mut raster = Raster::new(metrics.width(), metrics.height());
//dump_glyph(SimpleGlyph(s));
self.render_glyph_inner(&mut raster, &z, glyph.as_ref().unwrap());
//None
Some(GlyphBitmap {
width: metrics.width(),
height: metrics.height(),
left: metrics.l,
top: metrics.t,
data: raster.get_bitmap()
})
},
Some(Glyph::Compound(ref c)) => {
let (xmin, ymin, xmax, ymax) = c.bbox();
let (metrics, z) = self.metrics_and_affine(xmin, ymin, xmax, ymax, size);
let mut raster = Raster::new(metrics.width(), metrics.height());
self.render_glyph_inner(&mut raster, &z, glyph.as_ref().unwrap());
Some(GlyphBitmap {
width: metrics.width(),
height: metrics.height(),
left: metrics.l,
top: metrics.t,
data: raster.get_bitmap()
})
}
_ => {
println!("glyph {} error", glyph_id);
None
}
}
}
fn get_glyph(&self, glyph_ix: u16) -> Option<Glyph> {
if glyph_ix >= self.maxp.num_glyphs() { return None }
let fmt = self.head.index_to_loc_format();
match self.loca {
Some(ref loca) => match (loca.get_off(glyph_ix, fmt), loca.get_off(glyph_ix + 1, fmt), self.glyf) {
(Some(off0), Some(off1), Some(glyf)) =>
if off0 == off1 {
Some(Glyph::Empty)
} else {
let glyph_data = &glyf[off0 as usize .. off1 as usize];
if get_i16(glyph_data, 0) == Some(-1) {
Some(Glyph::Compound(CompoundGlyph{data: glyph_data}))
} else {
Some(Glyph::Simple(SimpleGlyph{data: glyph_data}))
}
},
(_, _, _) => None
},
None => None
}
}
}
#[derive(Debug)]
enum PathOp {
MoveTo(Point),
LineTo(Point),
QuadTo(Point, Point),
}
use self::PathOp::{MoveTo, LineTo, QuadTo};
struct BezPathOps<T> {
inner: T,
first_oncurve: Option<Point>,
first_offcurve: Option<Point>,
last_offcurve: Option<Point>,
alldone: bool,
closing: bool,
}
fn path_from_pts<T: Iterator>(inner: T) -> BezPathOps<T> {
BezPathOps{
inner: inner, first_oncurve: None, first_offcurve: None, last_offcurve: None,
alldone: false, closing: false
}
}
impl<I> Iterator for BezPathOps<I> where I: Iterator<Item=(bool, i16, i16)> {
type Item = PathOp;
fn next(&mut self) -> Option<PathOp> {
loop {
if self.closing {
if self.alldone {
return None
} else {
match (self.first_offcurve, self.last_offcurve) {
(None, None) => {
self.alldone = true;
return Some(LineTo(self.first_oncurve.unwrap()))
},
(None, Some(last_offcurve)) => {
self.alldone = true;
return Some(QuadTo(last_offcurve, self.first_oncurve.unwrap()))
},
(Some(first_offcurve), None) => {
self.alldone = true;
return Some(QuadTo(first_offcurve, self.first_oncurve.unwrap()))
},
(Some(first_offcurve), Some(last_offcurve)) => {
self.last_offcurve = None;
return Some(QuadTo(last_offcurve, Point::lerp(0.5, &last_offcurve, &first_offcurve)))
}
}
}
} else {
match self.inner.next() {
None => {
self.closing = true;
},
Some((oncurve, x, y)) => {
let p = Point::new(x, y);
if self.first_oncurve.is_none() {
if oncurve {
self.first_oncurve = Some(p);
return Some(MoveTo(p));
} else {
match self.first_offcurve {
None => self.first_offcurve = Some(p),
Some(first_offcurve) => {
let midp = Point::lerp(0.5, &first_offcurve, &p);
self.first_oncurve = Some(midp);
self.last_offcurve = Some(p);
return Some(MoveTo(midp));
}
}
}
} else {
match (self.last_offcurve, oncurve) {
(None, false) => self.last_offcurve = Some(p),
(None, true) => return Some(LineTo(p)),
(Some(last_offcurve), false) => {
self.last_offcurve = Some(p);
return Some(QuadTo(last_offcurve, Point::lerp(0.5, &last_offcurve, &p)));
},
(Some(last_offcurve), true) => {
self.last_offcurve = None;
return Some(QuadTo(last_offcurve, p));
}
}
}
}
}
}
}
}
}
#[derive(Debug)]
pub enum FontError {
Invalid
}
pub fn parse(data: &[u8]) -> Result<Font, FontError> {
if data.len() < 12 {
return Err(FontError::Invalid);
}
let version = get_u32(data, 0).unwrap();
let num_tables = get_u16(data, 4).unwrap() as usize;
let _search_range = get_u16(data, 6).unwrap();
let _entry_selector = get_u16(data, 8).unwrap();
let _range_shift = get_u16(data, 10).unwrap();
let mut tables = HashMap::new();
for i in 0..num_tables {
let header = &data[12 + i*16 .. 12 + (i + 1) * 16];
let tag = get_u32(header, 0).unwrap();
let _check_sum = get_u32(header, 4).unwrap();
let offset = get_u32(header, 8).unwrap();
let length = get_u32(header, 12).unwrap();
let table_data = &data[offset as usize .. (offset + length) as usize];
//println!("{}: {}", Tag(tag), tableData.len())
tables.insert(Tag(tag), table_data);
}
let head = Head(*tables.get(&Tag::from_str("head")).unwrap()); // todo: don't fail
let maxp = Maxp{data: *tables.get(&Tag::from_str("maxp")).unwrap()};
let loca = tables.get(&Tag::from_str("loca")).map(|&data| Loca(data));
let glyf = tables.get(&Tag::from_str("glyf")).map(|&data| data);
let f = Font{_version: version,
_tables: tables,
head: head,
maxp: maxp,
loca: loca,
glyf: glyf,
};
//println!("version = {:x}", version);
Ok(f)
}
/*
fn dump_glyph(g: Glyph) {
match g {
Glyph::Empty => println!("empty"),
Glyph::Simple(s) => {
//println!("{} contours", s.number_of_contours())
let mut p = s.points();
for n in s.contour_sizes() {
for _ in 0..n {
println!("{:?}", p.next().unwrap());
}
println!("z");
}
let mut p = s.points();
for n in s.contour_sizes() {
for pathop in path_from_pts(p.by_ref().take(n)) {
println!("{:?}", pathop);
}
}
},
_ => println!("other")
}
}
*/
/*
fn dump(data: Vec<u8>) {
println!("length is {}", data.len());
match parse(&data) {
Ok(font) => {
println!("numGlyphs = {}", font.maxp.num_glyphs());
for i in 0.. font.maxp.num_glyphs() {
println!("glyph {}", i);
match font.get_glyph(i) {
Some(g) => dump_glyph(g),
None => println!("glyph {} error", i)
}
}
},
_ => ()
}
}
*/
fn draw_path<I: Iterator<Item=PathOp>>(r: &mut Raster, z: &Affine, path: &mut I) {
let mut lastp = Point::new(0i16, 0i16);
for op in path {
match op {
MoveTo(p) => lastp = p,
LineTo(p) => {
r.draw_line(&affine_pt(z, &lastp), &affine_pt(z, &p));
lastp = p
},
QuadTo(p1, p2) => {
r.draw_quad(&affine_pt(z, &lastp), &affine_pt(z, &p1), &affine_pt(z, &p2));
lastp = p2;
}
}
}
}
pub struct GlyphBitmap {
pub width: usize,
pub height: usize,
pub left: i32,
pub top: i32,
pub data: Vec<u8>,
}
/*
TODO: get these benchmarks to work
fn glyphbench(b: &mut Bencher, size: u32) {
let filename = "/Users/raph/Downloads/wt024.ttf";
let mut f = File::open(filename).unwrap();
let mut data = Vec::new();
match f.read_to_end(&mut data) {
Ok(_) => match parse(&data) {
Ok(font) =>
b.iter(|| render_glyph(&font, 6000, size)),
_ => ()
},
_ => ()
}
}
#[bench]
fn glyph400(b: &mut Bencher) {
glyphbench(b, 400)
}
#[bench]
fn glyph100(b: &mut Bencher) {
glyphbench(b, 100)
}
#[bench]
fn glyph040(b: &mut Bencher) {
glyphbench(b, 40)
}
#[bench]
fn glyph020(b: &mut Bencher) {
glyphbench(b, 20)
}
#[bench]
fn glyph010(b: &mut Bencher) {
glyphbench(b, 10)
}
*/
| fmt |
stability_controller.py | # import rospy
from math import atan2, pi, sqrt
from pid import PID
GAS_DENSITY = 2.858
ONE_MPH = 0.44704
class TwistController(object):
def __init__(self, max_angular_velocity, accel_limit, decel_limit):
self.max_angular_velocity = max_angular_velocity
self.accel_limit = accel_limit
self.decel_limit = decel_limit
steer_kp = 1.
steer_ki = 0.
steer_kd = 0.
self.steer_pid = PID(steer_kp,
steer_ki,
steer_kd,
0. - max_angular_velocity,
max_angular_velocity)
throttle_kp = 1.
throttle_ki = 0.
throttle_kd = 0.
self.throttle_pid = PID(throttle_kp,
throttle_ki,
throttle_kd,
decel_limit,
accel_limit)
def control(self, goal_acceleration, goal_angular_velocity, current_velocity, deltat, dbw_enabled):
current_speed = sqrt(current_velocity[0]**2 + current_velocity[1]**2)
#goal_speed = sqrt(goal_velocity[0]**2 + goal_velocity[1]**2)
#speed_diff = goal_speed - current_speed
acceleration = self.throttle_pid.step(goal_acceleration, deltat)
angular_velocity = self.steer_pid.step(goal_angular_velocity, deltat)
if not dbw_enabled:
self.throttle_pid.reset()
self.steer_pid.reset()
#rospy.logwarn("twist_controller | speed_diff: %s acceleration: %s goal_angular: %s angular: %s",
# speed_diff, acceleration, goal_angular_velocity, angular_velocity)
return acceleration, angular_velocity
def update_steer_pid(self, p, i, d):
self.steer_pid.update_gains(p, i, d)
def | (self, p, i, d):
self.throttle_pid.update_gains(p, i, d)
def reset_throttle_pid(self):
self.throttle_pid.reset()
| update_throttle_pid |
test_constants.py | from unittest import TestCase
from mock import patch
from .. import constants
class mock_service_exeTestCase(TestCase):
def setUp(self):
super(mock_service_exeTestCase, self).setUp()
self.addCleanup(patch.stopall)
self.mock_os = patch.object(constants, 'os', autospec=True).start()
def test_other(self):
self.mock_os.name = 'posix'
self.assertEqual(constants.mock_service_exe(), 'pact-mock-service')
def test_windows(self):
self.mock_os.name = 'nt'
self.assertEqual(constants.mock_service_exe(), 'pact-mock-service.bat')
| class provider_verifier_exeTestCase(TestCase):
def setUp(self):
super(provider_verifier_exeTestCase, self).setUp()
self.addCleanup(patch.stopall)
self.mock_os = patch.object(constants, 'os', autospec=True).start()
def test_other(self):
self.mock_os.name = 'posix'
self.assertEqual(
constants.provider_verifier_exe(), 'pact-provider-verifier')
def test_windows(self):
self.mock_os.name = 'nt'
self.assertEqual(
constants.provider_verifier_exe(), 'pact-provider-verifier.bat') | |
model.py | STEVILO_DOVOLJENIH_NAPAK = 10
PRAVILNA_CRKA = '+'
PONOVLJENA_CRKA = 'o'
NAPACNA_CRKA = '-'
ZMAGA = 'W'
PORAZ = 'X'
class Igra:
def __init__(self, geslo, crke):
self.geslo = geslo
self.crke = crke[:]
def napacne_crke(self):
return [crka for crka in self.crke if crka not in self.geslo]
def pravilne_crke(self):
return [crka for crka in self.crke if crka in self.geslo]
def stevilo_napak(self):
return len(self.napacne_crke())
def zmaga(self):
vse_crke = True
for crka in self.geslo:
if crka in self.pravilne_crke():
pass
else:
vse_crke = False
break
# vse_crke1 all(crka in self.crke for crka in self.geslo)
return vse_crke and STEVILO_DOVOLJENIH_NAPAK >= self.stevilo_napak()
def poraz(self):
return STEVILO_DOVOLJENIH_NAPAK < self.stevilo_napak()
def pravilni_del_gesla(self):
delni = ''
ugibanje = [crka.upper() for crka in self.crke]
for crka in self.geslo:
if crka.upper() in ugibanje:
delni += crka
else:
delni += '_ '
return delni.strip()
def nepravili_ugibi(self):
return ' '.join(self.napacne_crke())
def ugibaj(self, crka):
crka = crka.upper()
if crka in self.crke:
|
elif crka in self.geslo:
self.crke.append(crka)
if self.zmaga():
return ZMAGA
else:
return PRAVILNA_CRKA
else:
self.crke.append(crka)
if self.poraz():
return PORAZ
else:
return NAPACNA_CRKA
with open('Vislice/besede.txt', 'r') as f:
bazen_besed = [beseda.strip().upper() for beseda in f.readlines()]
import random
def nova_igra():
geslo = random.choice(bazen_besed)
return Igra(geslo, [])
# testno_geslo = 'DEŽUJE'
# testne_crke = ['A', 'E', 'I', 'O', 'U', 'D', 'J', 'K', 'Ž']
# igra = Igra(testno_geslo, testne_crke)
# print(testno_geslo)
| return PONOVLJENA_CRKA |
yamlhack.py | """
Make yaml respect OrderedDicts and stop sorting things
"""
from collections import OrderedDict
import sys
import yaml
_items = 'viewitems' if sys.version_info < (3,) else 'items'
def map_representer(dumper, data):
return dumper.represent_dict(getattr(data, _items)())
def map_constructor(loader, node): # pragma: nocover (python 3.6 doesn't use it)
|
yaml.add_representer(dict, map_representer)
yaml.add_representer(OrderedDict, map_representer)
if sys.version_info < (3, 6): # pragma: nocover
yaml.add_constructor('tag:yaml.org,2002:map', map_constructor)
| loader.flatten_mapping(node)
return OrderedDict(loader.construct_pairs(node)) |
testbed.rs | use std::env;
use std::rc::Rc;
use std::cell::RefCell;
use sfml::graphics::{RenderWindow, RenderTarget, Font};
use sfml::window::{ContextSettings, VideoMode, Close};
use sfml::window::event;
use sfml::window::keyboard::Key;
use sfml::window::mouse::MouseButton;
use sfml::graphics::Color;
use sfml::system::vector2::Vector2i;
use na::{Pnt2, Pnt3, Iso2};
use na;
use ncollide::world::CollisionGroups;
use nphysics::world::World;
use nphysics::object::RigidBody;
use nphysics::detection::joint::{Fixed, Anchor};
use camera::Camera;
use fps::Fps;
use engine::GraphicsManager;
use draw_helper;
fn usage(exe_name: &str) {
println!("Usage: {} [OPTION] ", exe_name);
println!("");
println!("Options:");
println!(" --help - prints this help message and exits.");
println!(" --pause - do not start the simulation right away.");
println!("");
println!("The following keyboard commands are supported:");
println!(" t - pause/continue the simulation.");
println!(" s - pause then execute only one simulation step.");
println!(" space - display/hide contacts.");
}
#[derive(PartialEq)]
enum RunMode {
Running,
Stop,
Step
}
pub enum CallBackMode {
StateActivated,
StateDeactivated,
LoopActive,
LoopNonActive
}
pub enum CallBackId {
Cb1, Cb2, Cb3, Cb4, Cb5,
Cb6, Cb7, Cb8, Cb9
}
pub struct Testbed<'a> {
world: World,
callbacks: [Option<Box<Fn(CallBackMode)>>; 9],
window: RenderWindow,
graphics: GraphicsManager<'a>
}
struct TestbedState<'a> {
running: RunMode,
draw_colls: bool,
cb_states: [bool; 9],
camera: Camera,
fps: Fps<'a>,
grabbed_object: Option<Rc<RefCell<RigidBody>>>,
grabbed_object_joint: Option<Rc<RefCell<Fixed>>>,
}
impl<'a> TestbedState<'a> {
fn new(fnt: &'a Font) -> TestbedState<'a> {
TestbedState{
running: RunMode::Running,
draw_colls: false,
cb_states: [ false; 9 ],
camera: Camera::new(),
fps: Fps::new(&fnt),
grabbed_object: None,
grabbed_object_joint: None,
}
}
}
impl<'a> Testbed<'a> {
pub fn new_empty() -> Testbed<'a> {
let mode = VideoMode::new_init(800, 600, 32);
let setting = ContextSettings {
depth_bits: 10,
stencil_bits: 10,
antialiasing_level: 2,
major_version: 0,
minor_version: 1
};
let window =
match RenderWindow::new(mode, "nphysics 2d demo", Close, &setting) {
Some(rwindow) => rwindow,
None => panic!("Error on creating the sfml window.")
};
let graphics = GraphicsManager::new();
Testbed {
world: World::new(),
callbacks: [ None, None, None, None, None, None, None, None, None ],
window: window,
graphics: graphics
}
}
pub fn new(world: World) -> Testbed<'a> {
let mut res = Testbed::new_empty();
res.set_world(world);
res
}
pub fn set_world(&mut self, world: World) {
self.world = world;
self.graphics.clear();
for rb in self.world.bodies() {
self.graphics.add(rb.clone());
}
}
pub fn set_color(&mut self, body: &Rc<RefCell<RigidBody>>, color: Pnt3<f32>) {
let color = Pnt3::new(
(color.x * 255.0) as u8,
(color.y * 255.0) as u8,
(color.z * 255.0) as u8
);
self.graphics.set_color(body, color);
}
pub fn add_callback(&mut self, id: CallBackId, callback: Box<Fn(CallBackMode)>) {
match id {
CallBackId::Cb1 => self.callbacks[0] = Some(callback),
CallBackId::Cb2 => self.callbacks[1] = Some(callback),
CallBackId::Cb3 => self.callbacks[2] = Some(callback),
CallBackId::Cb4 => self.callbacks[3] = Some(callback),
CallBackId::Cb5 => self.callbacks[4] = Some(callback),
CallBackId::Cb6 => self.callbacks[5] = Some(callback),
CallBackId::Cb7 => self.callbacks[6] = Some(callback),
CallBackId::Cb8 => self.callbacks[7] = Some(callback),
CallBackId::Cb9 => self.callbacks[8] = Some(callback)
}
}
pub fn run(&mut self) {
let font_mem = include_bytes!("Inconsolata.otf");
let fnt = Font::new_from_memory(font_mem).unwrap();
let mut state = TestbedState::new(&fnt);
let mut args = env::args();
if args.len() > 1 {
let exname = args.next().unwrap();
for arg in args { | return;
}
else if &arg[..] == "--pause" {
state.running = RunMode::Stop;
}
}
}
self.window.set_framerate_limit(60);
self.run_loop(state);
self.window.close();
}
fn run_loop(&mut self, mut state: TestbedState) {
while self.window.is_open() {
self.process_events(&mut state);
self.window.clear(&Color::black());
state.fps.reset();
self.progress_world(&mut state);
state.fps.register_delta();
self.graphics.draw(&mut self.window, &state.camera);
state.camera.activate_scene(&mut self.window);
self.draw_collisions(&mut state);
state.camera.activate_ui(&mut self.window);
state.fps.draw_registered(&mut self.window);
self.window.display();
}
}
fn process_events(&mut self, mut state: &mut TestbedState) {
loop {
match self.window.poll_event() {
event::KeyPressed{code, ..} => self.process_key_press(&mut state, code),
event::MouseButtonPressed{button, x, y} => self.process_mouse_press(&mut state, button, x, y),
event::MouseButtonReleased{button, x, y} => self.process_mouse_release(&mut state, button, x, y),
event::MouseMoved{x, y} => self.process_mouse_moved(&mut state, x, y),
event::Closed => self.window.close(),
event::NoEvent => break,
e => state.camera.handle_event(&e)
}
}
}
fn process_key_press(&mut self, state: &mut TestbedState, code: Key) {
let mut toogled_callback = None;
match code {
Key::Escape => self.window.close(),
Key::S => state.running = RunMode::Step,
Key::Space => state.draw_colls = !state.draw_colls,
Key::T => {
if state.running == RunMode::Stop {
state.running = RunMode::Running;
}
else {
state.running = RunMode::Stop;
}
},
Key::Num1 => toogled_callback = Some(0),
Key::Num2 => toogled_callback = Some(1),
Key::Num3 => toogled_callback = Some(2),
Key::Num4 => toogled_callback = Some(3),
Key::Num5 => toogled_callback = Some(4),
Key::Num6 => toogled_callback = Some(5),
Key::Num7 => toogled_callback = Some(6),
Key::Num8 => toogled_callback = Some(7),
Key::Num9 => toogled_callback = Some(8),
_ => { }
}
if let Some(id) = toogled_callback {
state.cb_states[id] = !state.cb_states[id];
match self.callbacks[id] {
Some(ref p) => {
if state.cb_states[id] {
p(CallBackMode::StateActivated);
} else {
p(CallBackMode::StateDeactivated);
}
},
None => {}
}
}
}
fn process_mouse_press(&mut self, state: &mut TestbedState, button: MouseButton, x: i32, y: i32) {
match button {
MouseButton::MouseLeft => {
let mapped_coords = state.camera.map_pixel_to_coords(Vector2i::new(x, y));
let mapped_point = Pnt2::new(mapped_coords.x, mapped_coords.y);
let all_groups = &CollisionGroups::new();
for b in self.world
.collision_world()
.interferences_with_point(&mapped_point, all_groups) {
if b.data.borrow().can_move() {
state.grabbed_object = Some(b.data.clone())
}
}
match state.grabbed_object {
Some(ref b) => {
match state.grabbed_object_joint {
Some(ref j) => self.world.remove_fixed(j),
None => { }
}
let _1: Iso2<f32> = na::one();
let attach2 = na::append_translation(&_1, mapped_point.as_vec());
let attach1 = na::inv(&na::transformation(b.borrow().position())).unwrap() * attach2;
let anchor1 = Anchor::new(Some(state.grabbed_object.as_ref().unwrap().clone()), attach1);
let anchor2 = Anchor::new(None, attach2);
let joint = Fixed::new(anchor1, anchor2);
state.grabbed_object_joint = Some(self.world.add_fixed(joint));
for node in self.graphics.body_to_scene_node(b).unwrap().iter_mut() {
node.select()
}
},
None => { }
}
},
_ => {
state.camera.handle_event(&event::MouseButtonPressed{ button: button, x: x, y: y })
}
}
}
fn process_mouse_release(&mut self, state: &mut TestbedState, button: MouseButton, x: i32, y: i32) {
match button {
MouseButton::MouseLeft => {
match state.grabbed_object {
Some(ref b) => {
for node in self.graphics.body_to_scene_node(b).unwrap().iter_mut() {
node.unselect()
}
},
None => { }
}
match state.grabbed_object_joint {
Some(ref j) => self.world.remove_fixed(j),
None => { }
}
state.grabbed_object = None;
state.grabbed_object_joint = None;
},
_ => {
state.camera.handle_event(&event::MouseButtonReleased{ button: button, x: x, y: y })
}
}
}
fn process_mouse_moved(&mut self, state: &mut TestbedState, x: i32, y: i32) {
let mapped_coords = state.camera.map_pixel_to_coords(Vector2i::new(x, y));
let mapped_point = Pnt2::new(mapped_coords.x, mapped_coords.y);
let _1: Iso2<f32> = na::one();
let attach2 = na::append_translation(&_1, (mapped_point).as_vec());
match state.grabbed_object {
Some(_) => {
let joint = state.grabbed_object_joint.as_ref().unwrap();
joint.borrow_mut().set_local2(attach2);
},
None => state.camera.handle_event(&event::MouseMoved{x: x, y: y})
};
}
fn progress_world(&mut self, state: &mut TestbedState) {
if state.running != RunMode::Stop {
for i in 0 .. 9 {
match self.callbacks[i] {
Some(ref p) => {
if state.cb_states[i] {
p(CallBackMode::LoopActive);
} else {
p(CallBackMode::LoopNonActive);
}
},
None => {}
}
}
self.world.step(0.016);
}
if state.running == RunMode::Step {
state.running = RunMode::Stop;
}
}
fn draw_collisions(&mut self, state: &mut TestbedState) {
if state.draw_colls {
draw_helper::draw_colls(&mut self.window, &mut self.world);
}
}
} | if &arg[..] == "--help" || &arg[..] == "-h" {
usage(&exname[..]); |
reconcile.go | // Copyright 2019 Orange
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cassandracluster
import (
"context"
"encoding/json"
"fmt"
"reflect"
"regexp"
"strings"
api "github.com/Orange-OpenSource/cassandra-k8s-operator/pkg/apis/db/v1alpha1"
"github.com/Orange-OpenSource/cassandra-k8s-operator/pkg/k8s"
"github.com/r3labs/diff"
"github.com/sirupsen/logrus"
v1 "k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const topologyChangeRefused = "The Operator has refused the Topology change. "
func preventClusterDeletion(cc *api.CassandraCluster, value bool) {
if value {
cc.SetFinalizers([]string{"kubernetes.io/pvc-to-delete"})
return
}
cc.SetFinalizers([]string{})
}
func updateDeletePvcStrategy(cc *api.CassandraCluster) |
// CheckDeletePVC checks if DeletePVC is updated and update DeletePVC strategy
func (rcc *ReconcileCassandraCluster) CheckDeletePVC(cc *api.CassandraCluster) error {
var oldCRD api.CassandraCluster
if cc.Annotations[api.AnnotationLastApplied] == "" {
return nil
}
//We retrieved our last-applied-configuration stored in the CRD
err := json.Unmarshal([]byte(cc.Annotations[api.AnnotationLastApplied]), &oldCRD)
if err != nil {
logrus.Errorf("[%s]: Can't get Old version of CRD", cc.Name)
return nil
}
if cc.Spec.DeletePVC != oldCRD.Spec.DeletePVC {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Debug("DeletePVC has been updated")
updateDeletePvcStrategy(cc)
return rcc.client.Update(context.TODO(), cc)
}
return nil
}
// CheckNonAllowedChanges - checks if there are some changes on CRD that are not allowed on statefulset
// If a non Allowed Changed is Find we won't Update associated kubernetes objects, but we will put back the old value
// and Patch the CRD with correct values
func (rcc *ReconcileCassandraCluster) CheckNonAllowedChanges(cc *api.CassandraCluster,
status *api.CassandraClusterStatus) bool {
var oldCRD api.CassandraCluster
if cc.Annotations[api.AnnotationLastApplied] == "" {
return false
}
if lac, _ := cc.ComputeLastAppliedConfiguration(); string(lac) == cc.Annotations[api.AnnotationLastApplied] {
//there are no changes to take care about
return false
}
//We retrieved our last-applied-configuration stored in the CRD
err := json.Unmarshal([]byte(cc.Annotations[api.AnnotationLastApplied]), &oldCRD)
if err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Error("Can't get Old version of CRD")
return false
}
//Global scaleDown to 0 is forbidden
if cc.Spec.NodesPerRacks == 0 {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).
Warningf("The Operator has refused the change on NodesPerRack=0 restore to OldValue[%d]",
oldCRD.Spec.NodesPerRacks)
cc.Spec.NodesPerRacks = oldCRD.Spec.NodesPerRacks
needUpdate = true
}
//DataCapacity change is forbidden
if cc.Spec.DataCapacity != oldCRD.Spec.DataCapacity {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).
Warningf("The Operator has refused the change on DataCapacity from [%s] to NewValue[%s]",
oldCRD.Spec.DataCapacity, cc.Spec.DataCapacity)
cc.Spec.DataCapacity = oldCRD.Spec.DataCapacity
needUpdate = true
}
//DataStorage
if cc.Spec.DataStorageClass != oldCRD.Spec.DataStorageClass {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).
Warningf("The Operator has refused the change on DataStorageClass from [%s] to NewValue[%s]",
oldCRD.Spec.DataStorageClass, cc.Spec.DataStorageClass)
cc.Spec.DataStorageClass = oldCRD.Spec.DataStorageClass
needUpdate = true
}
if needUpdate {
status.LastClusterAction = api.ActionCorrectCRDConfig
return true
}
var updateStatus string
if needUpdate, updateStatus = CheckTopologyChanges(rcc, cc, status, &oldCRD); needUpdate {
if updateStatus != "" {
status.LastClusterAction = updateStatus
}
if updateStatus == api.ActionCorrectCRDConfig {
cc.Spec.Topology = (&oldCRD).Spec.Topology
}
return true
}
if updateStatus == api.ActionDeleteRack {
return true
}
if needUpdate, updateStatus = rcc.CheckNonAllowedScaleDown(cc, status, &oldCRD); needUpdate {
if updateStatus != "" {
status.LastClusterAction = updateStatus
}
return true
}
//What if we ask to changes Pod ressources ?
// It is authorized, but the operator needs to detect it to prevent multiple statefulsets updates in the same time
// the operator must handle thoses updates sequentially, so we flag each dcrackname with this information
if !reflect.DeepEqual(cc.Spec.Resources, oldCRD.Spec.Resources) {
logrus.Infof("[%s]: We ask to Change Pod Resources from %v to %v", cc.Name, oldCRD.Spec.Resources, cc.Spec.Resources)
for dc := 0; dc < cc.GetDCSize(); dc++ {
dcName := cc.GetDCName(dc)
for rack := 0; rack < cc.GetRackSize(dc); rack++ {
rackName := cc.GetRackName(dc, rack)
dcRackName := cc.GetDCRackName(dcName, rackName)
dcRackStatus := status.CassandraRackStatus[dcRackName]
logrus.Infof("[%s][%s]: Update Rack Status UpdateResources=Ongoing", cc.Name, dcRackName)
dcRackStatus.CassandraLastAction.Name = api.ActionUpdateResources
dcRackStatus.CassandraLastAction.Status = api.StatusToDo
now := metav1.Now()
status.CassandraRackStatus[dcRackName].CassandraLastAction.StartTime = &now
status.CassandraRackStatus[dcRackName].CassandraLastAction.EndTime = nil
}
}
}
return false
}
func generatePaths(s string) []string {
return strings.Split(s, ".")
}
// lookForFilter checks if filters are found in path and add the information to filtersFound if that's the case
func lookForFilter(path []string, filters [][]string, filtersFound *map[string]bool) {
for _, filter := range filters {
if 2*len(filter)+1 == len(path) {
currentPath := path[0]
for i := 2; i < len(path)-1; i += 2 {
currentPath += "." + path[i]
}
if currentPath == strings.Join(filter, ".") {
if _, ok := (*filtersFound)[currentPath]; !ok {
(*filtersFound)[currentPath] = true
}
}
}
}
}
// hasChange returns if there is a change with the type provided and matching all paths
// paths can be prepended with a - to specify that it should not be found
// for instance ('DC', '-DC.Rack') means a DC change without a DC.Rack change
// changes of property NodesPerRacks are skipped
func hasChange(changelog diff.Changelog, changeType string, paths ...string) bool {
regexPath := regexp.MustCompile("^\\-([^\\+]*)$")
if len(changelog) == 0 {
return false
}
noPaths := len(paths) == 0
includeFilters := [][]string{}
excludeFilters := [][]string{}
for _, path := range paths {
if match := regexPath.FindStringSubmatch(path); len(match) > 0 {
excludeFilters = append(excludeFilters, generatePaths(match[1]))
continue
}
includeFilters = append(includeFilters, generatePaths(path))
}
idx := "-1"
var includedFiltersFound, excludedFiltersFound map[string]bool
for _, cl := range changelog {
// Only scan changes on Name/NumTokens
if cl.Type == changeType &&
// DC Changes
(cl.Path[2] == "Name" || cl.Path[2] == "NumTokens" ||
// Rack changes
(len(cl.Path) > 4 && cl.Path[4] == "Name")) {
if noPaths {
return true
}
// We reset counters when it's a new index
if cl.Path[1] != idx {
idx = cl.Path[1]
includedFiltersFound = map[string]bool{}
excludedFiltersFound = map[string]bool{}
}
// We look for all matching filters
lookForFilter(cl.Path, includeFilters, &includedFiltersFound)
// We look for all excluding filters
lookForFilter(cl.Path, excludeFilters, &excludedFiltersFound)
if len(includedFiltersFound) == len(includeFilters) && len(excludedFiltersFound) == 0 {
return true
}
}
}
return false
}
//CheckTopologyChanges checks to see if the Operator accepts or refuses the CRD changes
func CheckTopologyChanges(rcc *ReconcileCassandraCluster, cc *api.CassandraCluster,
status *api.CassandraClusterStatus, oldCRD *api.CassandraCluster) (bool, string) {
changelog, _ := diff.Diff(oldCRD.Spec.Topology, cc.Spec.Topology)
if hasChange(changelog, diff.UPDATE) ||
hasChange(changelog, diff.DELETE, "DC.Rack", "-DC") ||
hasChange(changelog, diff.CREATE, "DC.Rack", "-DC") {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Warningf(
topologyChangeRefused+"No change other than adding/removing a DC can happen: %v restored to %v",
cc.Spec.Topology, oldCRD.Spec.Topology)
return true, api.ActionCorrectCRDConfig
}
if cc.GetDCSize() < oldCRD.GetDCSize()-1 {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Warningf(
topologyChangeRefused+"You can only remove 1 DC at a time, "+
"not only a Rack: %v restored to %v", cc.Spec.Topology, oldCRD.Spec.Topology)
return true, api.ActionCorrectCRDConfig
}
if cc.GetDCRackSize() < oldCRD.GetDCRackSize() {
if cc.Status.LastClusterAction == api.ActionScaleDown &&
cc.Status.LastClusterActionStatus != api.StatusDone {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).
Warningf(topologyChangeRefused +
"You must wait to the end of ScaleDown to 0 before deleting a DC")
return true, api.ActionCorrectCRDConfig
}
dcName := cc.GetRemovedDCName(oldCRD)
//We need to check how many nodes were in the old CRD (before the user delete it)
if found, nbNodes := oldCRD.GetDCNodesPerRacksFromName(dcName); found && nbNodes > 0 {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).
Warningf(topologyChangeRefused+
"You must scale down the DC %s to 0 before deleting it", dcName)
return true, api.ActionCorrectCRDConfig
}
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Warningf("Removing DC %s", dcName)
//We apply this change to the Cluster status
return rcc.deleteDCObjects(cc, status, oldCRD)
}
return false, ""
}
func (rcc *ReconcileCassandraCluster) deleteDCObjects(cc *api.CassandraCluster,
status *api.CassandraClusterStatus, oldCRD *api.CassandraCluster) (bool, string) {
dcRackNameToDeleteList := cc.FixCassandraRackList(status)
if len(dcRackNameToDeleteList) > 0 {
for _, dcRackNameToDelete := range dcRackNameToDeleteList {
err := rcc.DeleteStatefulSet(cc.Namespace, cc.Name+"-"+dcRackNameToDelete)
if err != nil && !apierrors.IsNotFound(err) {
logrus.WithFields(logrus.Fields{"cluster": cc.Name, "rack": dcRackNameToDelete}).Warnf(
"Can't Delete Statefulset: %v", err)
}
names := []string{
cc.Name + "-" + cc.GetDCFromDCRackName(dcRackNameToDelete), //name-dc
cc.Name + "-" + dcRackNameToDelete, //name-dc-rack
cc.Name + "-" + cc.GetDCFromDCRackName(dcRackNameToDelete) + "-exporter-jmx", //name-dc-exporter-jmx
}
for i := range names {
err = rcc.DeleteService(cc.Namespace, names[i])
if err != nil && !apierrors.IsNotFound(err) {
logrus.WithFields(logrus.Fields{"cluster": cc.Name, "rack": dcRackNameToDelete}).Warnf(
"Can't Delete Service: %v", err)
}
}
}
return true, api.ActionDeleteDC
}
return false, ""
}
//CheckNonAllowedScaleDown goal is to discard the scaleDown to 0 is there is still replicated data towards the
// corresponding DC
func (rcc *ReconcileCassandraCluster) CheckNonAllowedScaleDown(cc *api.CassandraCluster,
status *api.CassandraClusterStatus,
oldCRD *api.CassandraCluster) (bool, string) {
if ok, dcName, dc := cc.FindDCWithNodesTo0(); ok {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Infof("Ask ScaleDown to 0 for dc %s", dcName)
//We take the first Rack
rackName := cc.GetRackName(dc, 0)
selector := k8s.MergeLabels(k8s.LabelsForCassandraDCRack(cc, dcName, rackName))
podsList, err := rcc.ListPods(cc.Namespace, selector)
if err != nil || len(podsList.Items) < 1 {
if err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Warningf(
"The Operator has refused the ScaleDown (no pod found). "+
"topology %v restored to %v", cc.Spec.Topology, oldCRD.Spec.Topology)
cc.Spec.Topology = oldCRD.Spec.Topology
return true, api.ActionCorrectCRDConfig
}
//else there is already no pods so it's ok
return false, ""
}
//We take the first available Pod
for _, pod := range podsList.Items {
if pod.Status.Phase != v1.PodRunning || pod.DeletionTimestamp != nil {
continue
}
hostName := fmt.Sprintf("%s.%s", pod.Spec.Hostname, pod.Spec.Subdomain)
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Debugf("The Operator will ask node %s", hostName)
jolokiaClient, err := NewJolokiaClient(hostName, JolokiaPort, rcc,
cc.Spec.ImageJolokiaSecret, cc.Namespace)
var keyspacesWithData []string
if err == nil {
keyspacesWithData, err = jolokiaClient.HasDataInDC(dcName)
}
if err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Warningf(
"The Operator has refused the ScaleDown (HasDataInDC failed %s). ", err)
cc.Spec.Topology = oldCRD.Spec.Topology
return true, api.ActionCorrectCRDConfig
}
if len(keyspacesWithData) != 0 {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Warningf(
"The Operator has refused the ScaleDown. Keyspaces still having data %v", keyspacesWithData)
cc.Spec.Topology = oldCRD.Spec.Topology
return true, api.ActionCorrectCRDConfig
}
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Warningf(
"Cassandra has no more replicated data on dc %s, we can scale Down to 0", dcName)
return false, ""
}
}
return false, ""
}
//ReconcileRack will try to reconcile cassandra for each of the couple DC/Rack defined in the topology
func (rcc *ReconcileCassandraCluster) ReconcileRack(cc *api.CassandraCluster,
status *api.CassandraClusterStatus) (err error) {
for dc := 0; dc < cc.GetDCSize(); dc++ {
dcName := cc.GetDCName(dc)
for rack := 0; rack < cc.GetRackSize(dc); rack++ {
rackName := cc.GetRackName(dc, rack)
dcRackName := cc.GetDCRackName(dcName, rackName)
if dcRackName == "" {
return fmt.Errorf("Name uses for DC and/or Rack are not good")
}
//If we have added a dc/rack to the CRD, we add it to the Status
if _, ok := status.CassandraRackStatus[dcRackName]; !ok {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Infof("the DC(%s) and Rack(%s) does not exist, "+
"initialize it in status", dcName, rackName)
cc.InitCassandraRackinStatus(status, dcName, rackName)
//Return will stop operator reconcile loop until next one
//used here to write CassandraClusterStatus properly
return nil
}
dcRackStatus := status.CassandraRackStatus[dcRackName]
if cc.DeletionTimestamp != nil && cc.Spec.DeletePVC {
rcc.DeletePVCs(cc, dcName, rackName)
//Go to next rack
continue
}
Name := cc.Name + "-" + dcRackName
storedStatefulSet, err := rcc.GetStatefulSet(cc.Namespace, Name)
if err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name,
"dc-rack": dcRackName}).Infof("failed to get cassandra's statefulset (%s) %v", Name, err)
} else {
//Update CassandraClusterPhase
rcc.UpdateCassandraRackStatusPhase(cc, dcName, rackName, storedStatefulSet, status)
//Find if there is an Action to execute or to end
rcc.getNextCassandraClusterStatus(cc, dc, rack, dcName, rackName, storedStatefulSet, status)
//If Not in +Initial State
// Find if we have some Pod Operation to Execute, and execute thees
if dcRackStatus.Phase != api.ClusterPhaseInitial {
breakResyncloop, err := rcc.executePodOperation(cc, dcName, rackName, status)
if err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name, "dc-rack": dcRackName,
"err": err}).Error("Executing pod operation failed")
}
//For some Operations, we must NOT update the statefulset until Done.
//So we block until OK
if breakResyncloop {
// If an Action is ongoing on the current Rack,
// we don't want to check or start actions on Next Rack
if dcRackStatus.Phase != api.ClusterPhaseRunning ||
dcRackStatus.CassandraLastAction.Status == api.StatusToDo ||
dcRackStatus.CassandraLastAction.Status == api.StatusOngoing ||
dcRackStatus.CassandraLastAction.Status == api.StatusContinue {
logrus.WithFields(logrus.Fields{"cluster": cc.Name, "dc-rack": dcRackName,
"err": err}).Debug("Waiting Rack to be running before continuing, " +
"we break ReconcileRack Without Updating Statefulset")
return nil
}
logrus.WithFields(logrus.Fields{"cluster": cc.Name, "dc-rack": dcRackName,
"LastActionName": dcRackStatus.CassandraLastAction.Name,
"LastActionStatus": dcRackStatus.CassandraLastAction.Status}).Warning(
"Should Not see this message ;)" +
" Waiting Rack to be running before continuing, we loop on Next Rack, maybe we don't want that")
continue
}
}
}
if err = rcc.ensureCassandraService(cc); err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Errorf("ensureCassandraService Error: %v", err)
}
if err = rcc.ensureCassandraServiceMonitoring(cc, dcName); err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name,
"dc-rack": dcRackName}).Errorf("ensureCassandraServiceMonitoring Error: %v", err)
}
breakLoop, err := rcc.ensureCassandraStatefulSet(cc, status, dcName, dcRackName, dc, rack)
if err != nil {
logrus.WithFields(logrus.Fields{"cluster": cc.Name,
"dc-rack": dcRackName}).Errorf("ensureCassandraStatefulSet Error: %v", err)
}
if cc.Spec.UnlockNextOperation {
//If we enter specific change we remove _unlockNextOperation from Spec
cc.Spec.UnlockNextOperation = false
needUpdate = true
}
if breakLoop {
logrus.WithFields(logrus.Fields{"cluster": cc.Name, "dc-rack": dcRackName,
"err": err}).Debug("We just update Statefulset " +
"we break ReconcileRack")
return nil
}
//If the Phase is not running Then we won't check on Next Racks so we return
//We don't want to make change in 2 racks in a same time
if dcRackStatus.Phase != api.ClusterPhaseRunning ||
(dcRackStatus.CassandraLastAction.Status == api.StatusOngoing ||
dcRackStatus.CassandraLastAction.Status == api.StatusFinalizing) {
logrus.WithFields(logrus.Fields{"cluster": cc.Name,
"dc-rack": dcRackName}).Infof("Waiting Rack to be running before continuing, " +
"we break ReconcileRack after updated statefulset")
return nil
}
}
}
//If cluster is deleted and DeletePVC is set, we can now stop preventing the cluster from being deleted
//cause PVCs have been deleted
if cc.DeletionTimestamp != nil && cc.Spec.DeletePVC {
preventClusterDeletion(cc, false)
return rcc.client.Update(context.TODO(), cc)
}
return nil
}
// UpdateCassandraClusterStatusPhase goal is to calculate the Cluster Phase according to StatefulSet Status.
func UpdateCassandraClusterStatusPhase(cc *api.CassandraCluster, status *api.CassandraClusterStatus) {
var setLastClusterActionStatus bool
for dc := 0; dc < cc.GetDCSize(); dc++ {
dcName := cc.GetDCName(dc)
for rack := 0; rack < cc.GetRackSize(dc); rack++ {
rackName := cc.GetRackName(dc, rack)
dcRackName := cc.GetDCRackName(dcName, rackName)
dcRackStatus, exist := status.CassandraRackStatus[dcRackName]
if !exist {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Infof("the DC(%s) and Rack(%s) does not exist, "+
"the rack status will be updated in next reconcile", dcName, rackName)
continue
}
// If there is a lastAction ongoing in a Rack we update cluster lastaction accordingly
if dcRackStatus.CassandraLastAction.Status != api.StatusDone {
status.LastClusterActionStatus = dcRackStatus.CassandraLastAction.Status
status.LastClusterAction = dcRackStatus.CassandraLastAction.Name
setLastClusterActionStatus = true
}
//If a rack is not running we return
if dcRackStatus.Phase != api.ClusterPhaseRunning {
status.Phase = dcRackStatus.Phase
if _, ok := cc.Status.CassandraRackStatus[dcRackName]; !ok ||
cc.Status.CassandraRackStatus[dcRackName].Phase != dcRackStatus.Phase {
logrus.WithFields(logrus.Fields{"cluster": cc.Name,
"dc-rack": dcRackName}).Infof("Update Rack Status: %s", dcRackStatus.Phase)
}
return
}
}
}
//If there is no more action in racks, we update cluster
if !setLastClusterActionStatus &&
status.LastClusterActionStatus != api.StatusDone {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Infof("Action %s is done!", status.LastClusterAction)
status.LastClusterActionStatus = api.StatusDone
status.Phase = api.ClusterPhaseRunning
}
//If cluster phase id not running, we update it
if status.Phase != api.ClusterPhaseRunning && status.LastClusterActionStatus == api.StatusDone {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Infof("Cluster is running")
status.Phase = api.ClusterPhaseRunning
}
return
}
//FlipCassandraClusterUpdateSeedListStatus checks if all racks has the status UpdateSeedList=To-do
//It then sets UpdateSeedList to Ongoing to start the operation
func FlipCassandraClusterUpdateSeedListStatus(cc *api.CassandraCluster, status *api.CassandraClusterStatus) {
//if global status is not yet "Configuring", we skip this one
if status.LastClusterAction == api.ActionUpdateSeedList &&
status.LastClusterActionStatus == api.StatusConfiguring {
var setOperationOngoing = true
//Check if We need to start operation
//all status of all racks must be "configuring"
for dc := 0; dc < cc.GetDCSize(); dc++ {
dcName := cc.GetDCName(dc)
for rack := 0; rack < cc.GetRackSize(dc); rack++ {
rackName := cc.GetRackName(dc, rack)
dcRackName := cc.GetDCRackName(dcName, rackName)
dcRackStatus := status.CassandraRackStatus[dcRackName]
//If not all racks are in "configuring", then we don't flip status to to-do except for initializing rack
if !(dcRackStatus.CassandraLastAction.Name == api.ActionUpdateSeedList &&
dcRackStatus.CassandraLastAction.Status == api.StatusConfiguring) {
//if rack is initializing we allow it to Flip
if dcRackStatus.CassandraLastAction.Name != api.ClusterPhaseInitial {
setOperationOngoing = false
}
break
}
}
}
//If all racks are in "configuring" state, we set all status to ToDo to trigger the operator actions
if setOperationOngoing {
for dc := 0; dc < cc.GetDCSize(); dc++ {
dcName := cc.GetDCName(dc)
for rack := 0; rack < cc.GetRackSize(dc); rack++ {
rackName := cc.GetRackName(dc, rack)
dcRackName := cc.GetDCRackName(dcName, rackName)
dcRackStatus := status.CassandraRackStatus[dcRackName]
logrus.WithFields(logrus.Fields{"cluster": cc.Name,
"dc-rack": dcRackName}).Infof("Update Rack Status UpdateSeedList=ToDo")
dcRackStatus.CassandraLastAction.Name = api.ActionUpdateSeedList
dcRackStatus.CassandraLastAction.Status = api.StatusToDo
}
}
}
}
return
}
| {
logrus.WithFields(logrus.Fields{"cluster": cc.Name, "deletePVC": cc.Spec.DeletePVC,
"finalizers": cc.Finalizers}).Debug("updateDeletePvcStrategy called")
// Remove Finalizers if DeletePVC is not enabled
if !cc.Spec.DeletePVC && len(cc.Finalizers) > 0 {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Info("Won't delete PVCs when nodes are removed")
preventClusterDeletion(cc, false)
}
// Add Finalizer if DeletePVC is enabled
if cc.Spec.DeletePVC && len(cc.Finalizers) == 0 {
logrus.WithFields(logrus.Fields{"cluster": cc.Name}).Info("Will delete PVCs when nodes are removed")
preventClusterDeletion(cc, true)
}
} |
render.go | package main
import (
"fmt"
"os"
"plugin"
bootkubeplugin "github.com/kubernetes-sigs/bootkube/pkg/plugin"
"github.com/spf13/cobra"
)
var (
cmdRender = &cobra.Command{
Use: "render",
Short: "Render cluster manifests using the specified plugin",
RunE: runCmdRender,
SilenceUsage: true,
}
renderOpts struct {
assetDir string
plugin string
pluginFlags []string
}
pluginOpts bootkubeplugin.Options
)
func | () {
cmdRoot.AddCommand(cmdRender)
cmdRender.Flags().StringVar(&pluginOpts.AssetDir, "asset-dir", "", "Output path for rendered assets")
cmdRender.Flags().StringVar(&renderOpts.plugin, "plugin", "", "Path to the render plugin")
cmdRender.Flags().StringSliceVar(&renderOpts.pluginFlags, "plugin-flag", []string{}, "The flags to pass to the render plugin")
cobra.MarkFlagRequired(cmdRender.Flags(), "asset-dir")
cobra.MarkFlagRequired(cmdRender.Flags(), "plugin")
}
func runCmdRender(cmd *cobra.Command, args []string) error {
plug, err := plugin.Open(renderOpts.plugin)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
symbol, err := plug.Lookup("Renderer")
if err != nil {
fmt.Println(err)
os.Exit(1)
}
var renderer bootkubeplugin.Renderer
renderer, ok := symbol.(bootkubeplugin.Renderer)
if !ok {
fmt.Println("unexpected type from plugin")
os.Exit(1)
}
return renderer.Render(&pluginOpts, renderOpts.pluginFlags)
}
| init |
md-butane.rs | // Lumol, an extensible molecular simulation engine
// Copyright (C) Lumol's contributors — BSD license
//! Testing molecular dynamics of butane
use lumol::input::Input;
use std::path::Path;
use std::sync::Once;
static START: Once = Once::new();
#[test]
fn bo | {
START.call_once(::env_logger::init);
let path = Path::new(file!()).parent()
.unwrap()
.join("data")
.join("md-butane")
.join("nve.toml");
let system = Input::new(path).unwrap().read_system().unwrap();
assert_eq!(system.molecules().count(), 50);
for molecule in system.molecules() {
assert_eq!(molecule.bonds().len(), 3);
assert_eq!(molecule.angles().len(), 2);
assert_eq!(molecule.dihedrals().len(), 1);
}
}
#[test]
fn constant_energy() {
START.call_once(::env_logger::init);
let path = Path::new(file!()).parent()
.unwrap()
.join("data")
.join("md-butane")
.join("nve.toml");
let mut config = Input::new(path).unwrap().read().unwrap();
let e_initial = config.system.total_energy();
config.simulation.run(&mut config.system, config.nsteps);
let e_final = config.system.total_energy();
assert!(f64::abs((e_initial - e_final) / e_final) < 1e-3);
}
| nds_detection() |
main.go | package main
import (
"context"
"errors"
"fmt"
"time"
"github.com/renbou/dontstress/stresser/lib/compile"
)
var (
errInternal = errors.New("internal error")
errCompilation = errors.New("compilation error")
)
type compilationResult struct {
err error
// Error message if err is not nil
message string
// Path to compiled executable
path string
}
const (
// 128 kb limit on compilation logs
CompilationLogLimit = 128_000
// 12 seconds for compilation should be more than enough
Timeout = time.Second * 12
)
func compileCode(path, lang string) *compilationResult {
compiler, ok := compile.GetCompiler(lang)
if !ok {
return &compilationResult{errCompilation, fmt.Sprintf("%s is not supported yet", lang), ""}
}
ctx, _ := context.WithTimeout(context.Background(), Timeout)
execPath, err := compiler.Compile(&compile.Options{
Path: path,
OutputLimit: CompilationLogLimit,
Context: ctx,
})
if err != nil {
// oh boy, here comes the error handling
ce := &compile.CompilationError{}
if errors.As(err, &ce) {
return &compilationResult{errCompilation, ce.Error(), ""}
} else {
return &compilationResult{errInternal, err.Error(), ""}
}
}
return &compilationResult{nil, "", execPath}
}
type CompilationRequest struct {
fileId string
}
func LambdaHandler(cr *CompilationRequest) error |
func main() {
}
| {
} |
read.rs | use crate::aggregations::ScanAggregation;
use crate::mmap::MmapBytesReader;
use crate::parquet::read_impl::read_parquet;
use crate::predicates::PhysicalIoExpr;
use crate::prelude::*;
use crate::RowCount;
use arrow::io::parquet::read;
use polars_core::prelude::*;
use std::io::{Read, Seek};
use std::sync::Arc;
/// Read Apache parquet format into a DataFrame.
#[must_use]
pub struct ParquetReader<R: Read + Seek> {
reader: R,
rechunk: bool,
n_rows: Option<usize>,
columns: Option<Vec<String>>,
projection: Option<Vec<usize>>,
parallel: bool,
row_count: Option<RowCount>,
}
impl<R: MmapBytesReader> ParquetReader<R> {
#[cfg(feature = "lazy")]
// todo! hoist to lazy crate
pub fn finish_with_scan_ops(
mut self,
predicate: Option<Arc<dyn PhysicalIoExpr>>,
aggregate: Option<&[ScanAggregation]>,
projection: Option<&[usize]>,
) -> Result<DataFrame> {
// this path takes predicates and parallelism into account
let metadata = read::read_metadata(&mut self.reader)?;
let schema = read::schema::infer_schema(&metadata)?;
let rechunk = self.rechunk;
read_parquet(
self.reader,
self.n_rows.unwrap_or(usize::MAX),
projection,
&schema,
Some(metadata),
predicate,
aggregate,
self.parallel,
self.row_count,
)
.map(|mut df| {
if rechunk {
df.rechunk();
};
df
})
}
/// Read the parquet file in parallel (default). The single threaded reader consumes less memory.
pub fn read_parallel(mut self, parallel: bool) -> Self {
self.parallel = parallel;
self
}
/// Stop parsing when `n` rows are parsed. By settings this parameter the csv will be parsed
/// sequentially.
pub fn with_n_rows(mut self, num_rows: Option<usize>) -> Self {
self.n_rows = num_rows;
self
}
/// Columns to select/ project
pub fn with_columns(mut self, columns: Option<Vec<String>>) -> Self {
self.columns = columns;
self
}
/// Set the reader's column projection. This counts from 0, meaning that
/// `vec![0, 4]` would select the 1st and 5th column.
pub fn with_projection(mut self, projection: Option<Vec<usize>>) -> Self {
self.projection = projection;
self
}
/// Add a `row_count` column.
pub fn with_row_count(mut self, row_count: Option<RowCount>) -> Self {
self.row_count = row_count;
self
}
pub fn schema(mut self) -> Result<Schema> {
let metadata = read::read_metadata(&mut self.reader)?;
let schema = read::infer_schema(&metadata)?;
Ok((&schema.fields).into())
}
}
impl<R: MmapBytesReader> SerReader<R> for ParquetReader<R> {
fn new(reader: R) -> Self {
ParquetReader {
reader,
rechunk: false,
n_rows: None,
columns: None,
projection: None,
parallel: true,
row_count: None,
}
}
fn set_rechunk(mut self, rechunk: bool) -> Self {
self.rechunk = rechunk;
self
}
fn finish(mut self) -> Result<DataFrame> {
let metadata = read::read_metadata(&mut self.reader)?;
let schema = read::schema::infer_schema(&metadata)?;
if let Some(cols) = self.columns |
read_parquet(
self.reader,
self.n_rows.unwrap_or(usize::MAX),
self.projection.as_deref(),
&schema,
Some(metadata),
None,
None,
self.parallel,
self.row_count,
)
.map(|mut df| {
if self.rechunk {
df.rechunk();
}
df
})
}
}
| {
self.projection = Some(columns_to_projection(cols, &schema)?);
} |
unicast_socket.go | // Copyright 2015 Satoshi Konno. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ssdp
import ()
// A UnicastSocket represents a packet of SSDP. | // NewUnicastSocket returns a new UnicastSocket.
func NewUnicastSocket() *UnicastSocket {
ssdpSock := &UnicastSocket{}
ssdpSock.HTTPUSocket = NewHTTPUSocket()
return ssdpSock
}
// WriteRequest sends the specified request.
func (self *UnicastSocket) WriteRequest(req *Request) (int, error) {
return self.HTTPUSocket.Write(ADDRESS, PORT, req.Bytes())
}
// WriteBytes sends the specified bytes.
func (self *UnicastSocket) WriteBytes(addr string, port int, b []byte) (int, error) {
return self.HTTPUSocket.Write(addr, port, b)
}
// WriteResponse sends the specified responst.
func (self *UnicastSocket) WriteResponse(addr string, port int, res *Response) (int, error) {
return self.HTTPUSocket.Write(addr, port, res.Bytes())
} | type UnicastSocket struct {
*HTTPUSocket
}
|
xfmr.py |
import torch
from tqdm import tqdm
from transformers import AutoTokenizer, AutoModel
import logging
from torch.nn import ConstantPad3d, ConstantPad2d
from layers.utils import set_model_device, set_tensor_device
'''
tutorial4 tokenization
https://mccormickml.com/2019/07/22/BERT-fine-tuning/
how to use clinical bert
https://huggingface.co/emilyalsentzer/Bio_ClinicalBERT
align ng character offsets with bert tokenization
https://github.com/LightTag/sequence-labeling-with-transformers/blob/master/sequence_aligner/dataset.py
'''
INPUT_IDS = 'input_ids'
ATTENTION_MASK = 'attention_mask'
OFFSET_MAPPING = 'offset_mapping'
PRETRAINED = "emilyalsentzer/Bio_ClinicalBERT"
def tokenize_documents(documents, \
pretrained=PRETRAINED,
add_special_tokens=True,
max_length=50,
return_attention_mask=True,
return_tensors='pt',
return_offsets_mapping=True,
is_split_into_words=False
):
logging.info("Tokenization using AutoTokenizer")
# Instantiate tokenizer
tokenizer = AutoTokenizer.from_pretrained(pretrained)
# Tokenize all of the sentences and map the tokens to thier word IDs.
input_ids = []
mask = []
offsets = []
pbar = tqdm(total=len(documents))
for i, text in enumerate(documents):
# `encode_plus` will:
# (1) Tokenize the sentence.
# (2) Prepend the `[CLS]` token to the start.
# (3) Append the `[SEP]` token to the end.
# (4) Map tokens to their IDs.
# (5) Pad or truncate the sentence to `max_length`
# (6) Create attention masks for [PAD] tokens.
encoded_dict = tokenizer.batch_encode_plus(
text, # Sentence to encode.
add_special_tokens = add_special_tokens, # Add '[CLS]' and '[SEP]'
max_length = max_length, # Pad & truncate all sentences.
padding = 'max_length',
truncation = True,
return_attention_mask = return_attention_mask, # Construct attn. masks.
return_tensors = return_tensors, # Return pytorch tensors.
return_offsets_mapping = return_offsets_mapping,
is_split_into_words = is_split_into_words)
input_ids.append(encoded_dict[INPUT_IDS])
mask.append(encoded_dict[ATTENTION_MASK])
offsets_ = encoded_dict[OFFSET_MAPPING].tolist()
offsets_ = [[tuple(token) for token in sentence] for sentence in offsets_]
offsets.append(offsets_)
if i == 0:
logging.info("-"*80)
logging.info("")
logging.info("Returned params:\n{}".format(encoded_dict.keys()))
logging.info("")
logging.info('Input:\n{}'.format(text))
logging.info("")
#logging.info('IDs: {}\n{}'.format(input_ids[0].shape, input_ids[0]))
logging.info('IDs: {}'.format(input_ids[0].shape))
logging.info("")
#logging.info('Attn: {}\n{}'.format(mask[0].shape, mask[0]))
logging.info('Attn: {}'.format(mask[0].shape))
wps = [tokenizer.convert_ids_to_tokens(ids_) for ids_ in input_ids[0].squeeze()]
logging.info("")
logging.info('Tok:\n')
for wps_ in wps[:10]:
logging.info(f'{wps_[:10]} ....')
#logging.info("")
#logging.info('Idx:\n{}'.format(offsets[0]))
#logging.info("")
#logging.info("-"*80)
pbar.update()
pbar.close()
logging.info("")
logging.info('Document count: {}'.format(len(input_ids)))
logging.info("")
return (input_ids, mask, offsets)
def encode_documents(input_ids, mask, \
pretrained=PRETRAINED,
device=None,
train=False):
|
def char2wordpiece(start, end, offsets):
'''
convert character indices to word piece indices
(i.e. document)
Parameters
----------
char_indices: character indices for span
offsets: offsets returned by transformer tokenizer
Returns
-------
word_indices: word piece indices for spans
'''
start_new = -1
end_new = -1
for index, (start_word, end_word) in enumerate(offsets):
# start_word = character index of word piece start (inclusive)
# end_word = character index of word piece end (exclusive)
# index = index of word peice in sentence
if (start_new == -1) and \
(start >= start_word) and \
(start < end_word):
start_new = index
if (end_new == -1) and \
(end > start_word) and \
(end <= end_word):
# add one so end_new is exclusive
end_new = index + 1
assert start_new != -1
assert end_new != -1
return (start_new, end_new)
def wordpiece2char(start, end, offsets):
'''
convert word piece indices to character indices for sequence of sentences
(i.e. document)
Parameters
----------
word_indices: word piece indices for spans
offsets: offsets returned by transformer tokenizer
Returns
-------
char_indices: character indices per spans
'''
indices = offsets[start:end]
# character index of start
start_new = indices[0][0]
# character index of end
end_new = indices[-1][-1]
return (start_new, end_new)
def demo():
#loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
#for logger in loggers:#
# logger.setLevel(logging.info)
documents = [['patient is reporting fever and cough.', 'chest x re indicates bilateral infile traits'],
['diffuse lung disease', 'reporting position is addr']]
tokens = tokenize_documents(documents, max_length=19)
embedding = encode_documents(tokens)
| logging.info("Embedding using AutoModel")
model = AutoModel.from_pretrained(pretrained)
if train:
model.train()
else:
model.eval()
set_model_device(model, device)
X = []
masks = []
pbar = tqdm(total=len(input_ids))
assert len(input_ids) == len(mask)
for i, (ids, msk) in enumerate(zip(input_ids, mask)):
ids = set_tensor_device(ids, device)
msk = set_tensor_device(msk, device)
x = model( \
ids,
token_type_ids=None,
attention_mask=msk)[0]
x = x.cpu().detach()
X.append(x)
if i == 1:
logging.info("Encode documents")
#logging.info("-"*80)
#logging.info("")
#logging.info('IDs: {}\n{}'.format(ids.shape, ids))
logging.info('IDs: {}'.format(ids.shape))
#logging.info("")
#logging.info('Mask: {}\n{}'.format(msk.shape, msk))
logging.info('Mask: {}'.format(msk.shape))
#logging.info("")
#logging.info('X: {}\n{}'.format(x.shape, x))
logging.info('X: {}'.format(x.shape))
logging.info('')
#logging.info("")
#logging.info("-"*80)
pbar.update()
pbar.close()
logging.info("")
logging.info('Document count: {}'.format(len(X)))
logging.info("")
return X |
each_group.rs | use nu_engine::{eval_block_with_redirect, CallExt};
use nu_protocol::ast::Call;
use nu_protocol::engine::{CaptureBlock, Command, EngineState, Stack};
use nu_protocol::{
Category, Example, IntoInterruptiblePipelineData, PipelineData, Signature, Span, Spanned,
SyntaxShape, Value,
};
#[derive(Clone)]
pub struct EachGroup;
impl Command for EachGroup {
fn name(&self) -> &str {
"each group"
}
fn signature(&self) -> Signature {
Signature::build("each group")
.required("group_size", SyntaxShape::Int, "the size of each group")
.required(
"block",
SyntaxShape::Block(Some(vec![SyntaxShape::Any])),
"the block to run on each group",
)
.category(Category::Filters)
}
fn usage(&self) -> &str {
"Runs a block on groups of `group_size` rows of a table at a time."
}
fn examples(&self) -> Vec<Example> {
let stream_test_1 = vec![
Value::Int {
val: 3,
span: Span::test_data(),
},
Value::Int {
val: 7,
span: Span::test_data(),
},
];
vec![Example {
example: "echo [1 2 3 4] | each group 2 { $it.0 + $it.1 }",
description: "Echo the sum of each pair",
result: Some(Value::List {
vals: stream_test_1,
span: Span::test_data(),
}),
}]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
let group_size: Spanned<usize> = call.req(engine_state, stack, 0)?;
let capture_block: CaptureBlock = call.req(engine_state, stack, 1)?;
let ctrlc = engine_state.ctrlc.clone();
//FIXME: add in support for external redirection when engine-q supports it generally
let each_group_iterator = EachGroupIterator {
block: capture_block,
engine_state: engine_state.clone(),
stack: stack.clone(),
group_size: group_size.item,
input: Box::new(input.into_iter()),
span: call.head,
};
Ok(each_group_iterator.into_pipeline_data(ctrlc))
}
}
struct EachGroupIterator {
block: CaptureBlock,
engine_state: EngineState,
stack: Stack,
group_size: usize,
input: Box<dyn Iterator<Item = Value> + Send>,
span: Span,
}
impl Iterator for EachGroupIterator {
type Item = Value;
fn next(&mut self) -> Option<Self::Item> {
let mut group = vec![];
let mut current_count = 0;
loop {
let item = self.input.next();
match item {
Some(v) => {
group.push(v);
current_count += 1;
if current_count >= self.group_size {
break;
}
}
None => break,
}
}
if group.is_empty() {
return None;
}
Some(run_block_on_vec(
group,
self.block.clone(),
self.engine_state.clone(),
self.stack.clone(),
self.span,
))
}
}
pub(crate) fn run_block_on_vec(
input: Vec<Value>,
capture_block: CaptureBlock,
engine_state: EngineState,
stack: Stack,
span: Span,
) -> Value {
let value = Value::List { vals: input, span };
let mut stack = stack.captures_to_stack(&capture_block.captures);
let block = engine_state.get_block(capture_block.block_id);
if let Some(var) = block.signature.get_positional(0) {
if let Some(var_id) = &var.var_id |
}
match eval_block_with_redirect(&engine_state, &mut stack, block, PipelineData::new(span)) {
Ok(pipeline) => pipeline.into_value(span),
Err(error) => Value::Error { error },
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(EachGroup {})
}
}
| {
stack.add_var(*var_id, value);
} |
types.py | from dataclasses import dataclass
from typing import Optional, List
@dataclass
| class RecipeConstraints:
meal: Optional[str] = None
ingredients: Optional[List[str]] = None
@dataclass
class Printable:
title: str = ""
ingredients: str = ""
preparation: str = ""
error_message: Optional[str] = None
warning_message: Optional[str] = None
info_message: Optional[str] = None
@dataclass
class FetchingError(Exception):
message: str = "An error ocurred"
@dataclass
class PrintInterrupt(Exception):
printable: Printable | |
0001_initial.py |
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
] | # Generated by Django 2.1.15 on 2021-04-08 16:02
from django.db import migrations, models |
|
suite_test.go | package docker_instruction_test
import (
"fmt"
"os"
"testing"
"github.com/prashantv/gostub"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/onsi/gomega/gexec"
"github.com/werf/werf/pkg/testing/utils"
)
func TestIntegration(t *testing.T) |
var requiredSuiteTools = []string{"docker"}
var requiredSuiteEnvs []string
var testDirPath string
var werfBinPath string
var stubs = gostub.New()
var _ = SynchronizedBeforeSuite(func() []byte {
computedPathToWerf := utils.ProcessWerfBinPath()
return []byte(computedPathToWerf)
}, func(computedPathToWerf []byte) {
werfBinPath = string(computedPathToWerf)
})
var _ = SynchronizedAfterSuite(func() {}, func() {
gexec.CleanupBuildArtifacts()
})
var _ = BeforeEach(func() {
utils.BeforeEachOverrideWerfProjectName(stubs)
stubs.SetEnv("WERF_STAGES_STORAGE", ":local")
})
var _ = AfterEach(func() {
utils.RunSucceedCommand(
testDirPath,
werfBinPath,
"stages", "purge", "-s", ":local", "--force",
)
stubs.Reset()
})
| {
if !utils.MeetsRequirements(requiredSuiteTools, requiredSuiteEnvs) {
fmt.Println("Missing required tools")
os.Exit(1)
}
RegisterFailHandler(Fail)
RunSpecs(t, "Build/Stapel Image/Docker Instructions Suite")
} |
algorithm.rs | //! Implementation of the first depth search algorithm
use super::variables::{Direction, Position};
use super::messages::END_MESSAGE;
use super::map::Map;
use crossterm::style::Color;
use crossterm::Crossterm;
use super::rand;
use super::rand::distributions::{IndependentSample, Range};
use std::io::{stdout, Write};
use std::{thread, time};
pub struct FirstDepthSearch<'crossterm>
{
direction: Direction,
map: Map,
stack: Vec<Position>,
root_pos: Position,
is_terminated: bool,
crossterm: &'crossterm Crossterm
}
impl<'crossterm> FirstDepthSearch<'crossterm>
{
pub fn new(map: Map, start_pos: Position, crossterm: &'crossterm Crossterm) -> FirstDepthSearch<'crossterm>
{
FirstDepthSearch
{
direction: Direction::Up,
map: map,
stack: Vec::new(),
root_pos: start_pos,
is_terminated: false,
crossterm: crossterm,
}
}
pub fn start(&mut self)
{
self.is_terminated = false;
// push first position on the stack
self.stack.push(self.root_pos);
let mut cursor = self.crossterm.cursor();
cursor.hide();
// loop until there are now items left in the stack.
loop {
if self.stack.len() == 0
{
break;
}
self.choose_random_neighbor();
if self.is_terminated
{
break;
}
self.update_position();
let cell = self.crossterm.paint(" ").on(Color::Blue);
let pos = self.root_pos.clone();
let x = pos.x as u16;
let y = pos.y as u16;
cursor.goto(x,y).print(cell);
::std::io::stdout().flush();
thread::sleep(time::Duration::from_millis(2));
}
}
| fn choose_random_neighbor(&mut self)
{
let mut available_directions: Vec<Direction> = Vec::with_capacity(4);
// check every direction if the direction is not visited we can add it to the list.
// note that if the y or x is 0 that we don't want to subtract because we get an subtract overflow.
if self.root_pos.y != 0 && !self.map.is_cell_visited(self.root_pos.x, self.root_pos.y - 1)
{
available_directions.push(Direction::Up)
}
if !&self.map.is_cell_visited(self.root_pos.x, self.root_pos.y + 1)
{
available_directions.push(Direction::Down)
}
if self.root_pos.x != 0 && !self.map.is_cell_visited(self.root_pos.x - 1, self.root_pos.y)
{
available_directions.push(Direction::Left)
}
if !&self.map.is_cell_visited(self.root_pos.x + 1, self.root_pos.y)
{
available_directions.push(Direction::Right)
}
let directions_count = available_directions.len();
// if there are no directions left we need to backtrack until we find directions to go to.
if directions_count != 0
{
let step = Range::new(0, directions_count);
let mut rng = rand::thread_rng();
let choice = step.ind_sample(&mut rng);
// set the current direction to the new random generated direction.
self.direction = available_directions[choice];
}
else {
self.find_first_possible_direction();
}
}
/// Find direction to go to if there is no direction pop the current position of the stack for back tracking to the previous position.
fn find_first_possible_direction(&mut self)
{
// if there are no elements left in the stack that means we have visited all cell and we van terminate the program.
if let Some(previous_cell) = &self.stack.pop()
{
// update root pos to previous cell and continue searching for new neighbours
self.root_pos = *previous_cell;
self.choose_random_neighbor();
}
else {
self.is_terminated = true;
}
}
/// update the root position to the new direction we went in
fn update_position(&mut self)
{
match self.direction
{
Direction::Up => self.root_pos.y -= 1,
Direction::Down => self.root_pos.y += 1,
Direction::Left => self.root_pos.x -= 1,
Direction::Right => self.root_pos.x += 1,
_ => panic!()
};
self.map.set_visited(self.root_pos.x, self.root_pos.y);
self.stack.push(self.root_pos);
}
} | /// With this function we are choosing an random neighbor that we havent visited yet. |
util.rs | use std::fmt;
#[derive(Debug, PartialEq)]
pub struct Error {
pub msg: String,
pub line: u32,
pub col: u32,
}
impl fmt::Display for Error {
fn | (&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"Error: {}\nLine: {}, Column: {}\n",
self.msg, self.line, self.col
)
}
}
| fmt |
hwdbg.ts | namespace pxt {
// keep in sync with RefCounted.h in Codal
export const REF_TAG_STRING = 1
export const REF_TAG_BUFFER = 2
export const REF_TAG_IMAGE = 3
export const REF_TAG_NUMBER = 32
export const REF_TAG_ACTION = 33
}
namespace pxt.HWDBG {
import Cloud = pxt.Cloud;
import U = pxt.Util;
import H = pxt.HF2;
const HF2_DBG_GET_GLOBAL_STATE = 0x53fc66e0
const HF2_DBG_RESUME = 0x27a55931
const HF2_DBG_RESTART = 0x1120bd93
const HF2_DBG_GET_STACK = 0x70901510
const HF2_EV_DBG_PAUSED = 0x3692f9fd
const r32 = H.read32
export interface StateInfo {
numGlobals: number;
globalsPtr: number;
}
let isHalted = false
let lastCompileResult: pxtc.CompileResult;
let onHalted: Promise<void>;
let haltHandler: () => void;
let cachedStaticState: StateInfo
let currBreakpoint: pxtc.Breakpoint;
let callInfos: pxt.Map<ExtCallInfo>;
let lastFlash: pxtc.UF2.ShiftedBuffer
let hid: pxt.HF2.Wrapper
interface ExtCallInfo {
from: pxtc.ProcDebugInfo;
to: pxtc.ProcDebugInfo;
stack: number;
}
function taggedSpecialValue(n: number) { return (n << 2) | 2 }
export const taggedUndefined = 0
export const taggedNull = taggedSpecialValue(1)
export const taggedFalse = taggedSpecialValue(2)
export const taggedTrue = taggedSpecialValue(16)
export var postMessage: (msg: pxsim.DebuggerMessage) => void = msg => console.log(msg)
function clearAsync() {
isHalted = false
lastCompileResult = null
cachedStaticState = null
return Promise.resolve()
}
export function decodeValue(n: number): any {
if (n & 1)
return n >> 1
if (n == 0)
return undefined
if (n & 2) {
if (n == taggedNull) return null
if (n == taggedFalse) return false
if (n == taggedTrue) return true
return { tagged: n >> 2 }
}
return { ptr: n }
}
function readMemAsync(addr: number, numbytes: number): Promise<Uint8Array> {
U.assert(!(addr & 3))
U.assert(addr >= 0)
if (addr < 2 * 1024 * 1024) {
// assume these sit in flash
let res = new Uint8Array(numbytes)
addr -= lastFlash.start
U.memcpy(res, 0, lastFlash.buf, addr, numbytes)
return Promise.resolve(res)
}
let maxBytes = hid.maxMsgSize - 32
if (numbytes > maxBytes) {
let promises: Promise<Uint8Array>[] = []
while (numbytes > 0) {
let n = Math.min(maxBytes, numbytes)
promises.push(readMemAsync(addr, n))
numbytes -= n
addr += n
}
return Promise.all(promises)
.then(U.uint8ArrayConcat)
} else {
return hid.readWordsAsync(addr, Math.ceil(numbytes / 4))
.then(rr => {
if (rr.length > numbytes)
return rr.slice(0, numbytes)
else
return rr
})
}
}
export function heapExpandAsync(v: any): Promise<any> {
if (typeof v != "object" || !v) return Promise.resolve(v)
if (typeof v.ptr == "number") {
// there should be no unaligned pointers
if (v.ptr & 3)
return Promise.resolve({ unalignedPtr: v.ptr })
let tag = 0
// 56 bytes of data fit in one HID packet (with 5 bytes of header and 3 bytes of padding)
return readMemAsync(v.ptr, 56)
.then(buf => {
tag = H.read16(buf, 2)
let neededLength = buf.length
if (tag == REF_TAG_STRING || tag == REF_TAG_BUFFER) {
neededLength = H.read16(buf, 4) + 6
} else if (tag == REF_TAG_IMAGE) {
neededLength = H.read16(buf, 4) * H.read16(buf, 8) + 8
} else if (tag == REF_TAG_NUMBER) {
neededLength = 8 + 4
} else {
// TODO
}
if (neededLength > buf.length) {
return readMemAsync(v.ptr + buf.length, neededLength - buf.length)
.then(secondary => U.uint8ArrayConcat([buf, secondary]))
} else if (neededLength < buf.length) {
return buf.slice(0, neededLength)
} else {
return buf
}
})
.then<any>(buf => {
if (tag == REF_TAG_STRING)
return U.uint8ArrayToString(buf.slice(6))
else if (tag == REF_TAG_STRING)
return { type: "buffer", data: buf.slice(6) }
else if (tag == REF_TAG_IMAGE)
return {
type: "image",
data: buf.slice(8),
width: H.read16(buf, 4),
height: H.read16(buf, 8),
}
else if (tag == REF_TAG_NUMBER)
return new Float64Array(buf.buffer.slice(4))[0]
else
return {
type: "unknown",
tag: tag,
refcnt: H.read16(buf, 0),
data: buf.slice(4)
}
})
} else {
return Promise.resolve(v)
}
}
export function heapExpandMapAsync(vars: pxsim.Variables) {
let promises: Promise<void>[] = []
for (let k of Object.keys(vars)) {
promises.push(heapExpandAsync(vars[k])
.then((r: any) => {
vars[k] = r
//console.log("set", k, "to", r, "prev", vars[k], "NOW", vars)
}))
}
return Promise.all(promises)
.then(() => {
//console.log("FIN", vars)
})
}
function buildFrames(stack: number[], msg: pxsim.DebuggerBreakpointMessage) {
let currAddr = currBreakpoint.binAddr
let sp = 0
let pi = lastCompileResult.procDebugInfo.filter(p =>
p.codeStartLoc <= currAddr && currAddr <= p.codeEndLoc)[0]
while (true) {
if (!pi)
break // ???
if (pi == lastCompileResult.procDebugInfo[0])
break // main
let bp = findPrevBrkp(currAddr)
let info = U.clone(bp) as any as pxtc.FunctionLocationInfo
info.functionName = pi.name
msg.stackframes.push({
locals: {},
funcInfo: info,
breakpointId: bp.id,
})
let frame = msg.stackframes[msg.stackframes.length - 1]
let idx = 0
for (let l of pi.locals) {
U.assert(l.index == idx++)
frame.locals[l.name] = decodeValue(stack[sp++])
}
currAddr = stack[sp++] & 0x7ffffffe
let ci = callInfos[currAddr + ""]
for (let l of pi.args) {
frame.locals[l.name] = decodeValue(stack[sp + (pi.args.length - 1 - l.index)])
}
if (!ci) break
pi = ci.from
sp += ci.stack - pi.localsMark
}
}
function findPrevBrkp(addr: number) {
let bb = lastCompileResult.breakpoints
let brkMatch = bb[0]
let bestDelta = Infinity
for (let b of bb) {
let delta = addr - b.binAddr
// console.log(`${b.line+1}: addr=${b.binAddr} d=${delta}`)
if (delta >= 0 && delta < bestDelta) {
bestDelta = delta
brkMatch = b
}
}
return brkMatch
}
function corePaused(buf: Uint8Array) {
if (isHalted) return Promise.resolve()
isHalted = true
let msg: pxsim.DebuggerBreakpointMessage
return getHwStateAsync()
.then(st => {
let w = H.decodeU32LE(buf)
let pc = w[0]
let globals: pxsim.Variables = {}
for (let l of lastCompileResult.procDebugInfo[0].locals) {
let gbuf = st.globals
let readV = () => {
switch (l.type) {
case "uint32": return H.read32(gbuf, l.index)
case "int32": return H.read32(gbuf, l.index) | 0
case "uint16": return H.read16(gbuf, l.index)
case "int16": return (H.read16(gbuf, l.index) << 16) >> 16
case "uint8": return gbuf[l.index]
case "int8": return (gbuf[l.index] << 24) >> 24
default: return null
}
}
let v: any = readV()
if (v === null) {
U.assert((l.index & 3) == 0)
v = decodeValue(H.read32(gbuf, l.index))
}
globals[l.name] = v
}
| breakpointId: currBreakpoint.id,
globals: globals,
stackframes: []
}
haltHandler()
return hid.talkAsync(HF2_DBG_GET_STACK)
})
.then(stack => {
buildFrames(H.decodeU32LE(stack), msg);
let maps = [msg.globals].concat(msg.stackframes.map(s => s.locals))
return Promise.map(maps, heapExpandMapAsync)
})
.then(() => postMessage(msg))
}
function clearHalted() {
isHalted = false
onHalted = new Promise<void>((resolve, reject) => {
haltHandler = resolve
})
}
export function startDebugAsync(compileRes: pxtc.CompileResult, hidWr: H.Wrapper) {
hid = hidWr
hid.onEvent(HF2_EV_DBG_PAUSED, corePaused)
return clearAsync()
.then(() => {
lastCompileResult = compileRes
callInfos = {}
let procLookup: pxtc.ProcDebugInfo[] = []
for (let pdi of compileRes.procDebugInfo) {
procLookup[pdi.idx] = pdi
}
for (let pdi of compileRes.procDebugInfo) {
//console.log(pdi)
for (let ci of pdi.calls) {
callInfos[ci.addr + ""] = {
from: pdi,
to: procLookup[ci.procIndex],
stack: ci.stack
}
}
}
})
.then(() => {
let f = lastCompileResult.outfiles[pxtc.BINARY_UF2]
let blockBuf = U.stringToUint8Array(atob(f))
lastFlash = pxtc.UF2.toBin(blockBuf)
let blocks = pxtc.UF2.parseFile(blockBuf)
return hid.reflashAsync(blocks) // this will reset into app at the end
})
.then(() => hid.talkAsync(HF2_DBG_RESTART).catch(e => { }))
.then(() => Promise.delay(200))
.then(() => hid.reconnectAsync())
.then(clearHalted)
.then(waitForHaltAsync)
}
export function handleMessage(msg: pxsim.DebuggerMessage) {
console.log("HWDBGMSG", msg)
if (msg.type != "debugger")
return
let stepInto = false
switch (msg.subtype) {
case 'stepinto':
stepInto = true
case 'stepover':
resumeAsync(stepInto)
break
}
}
export function resumeAsync(into = false) {
return Promise.resolve()
.then(() => hid.talkAsync(HF2_DBG_RESUME, H.encodeU32LE([into ? 1 : 3])))
.then(clearHalted)
}
export interface HwState {
staticState: StateInfo;
globals: Uint8Array;
}
export function waitForHaltAsync() {
if (!onHalted) onHalted = Promise.resolve();
return onHalted
}
function getStaticStateAsync() {
if (cachedStaticState) return Promise.resolve(cachedStaticState)
return hid.talkAsync(HF2_DBG_GET_GLOBAL_STATE)
.then(buf => (cachedStaticState = {
numGlobals: r32(buf, 0),
globalsPtr: r32(buf, 4)
}))
}
export function getHwStateAsync() {
return getStaticStateAsync()
.then(st => hid.readWordsAsync(st.globalsPtr, st.numGlobals))
.then(buf => {
let res: HwState = {
staticState: cachedStaticState,
globals: buf
}
return res
})
}
} | currBreakpoint = findPrevBrkp(pc)
msg = {
type: 'debugger',
subtype: 'breakpoint', |
f_d_c.py | '''
Author: Ashutosh Panigrahi
Year: 2021
Version: 0.0.1
'''
#This piece of code detect the face (image in png/jpg or else given) given.
import __future__
import click
import os
import re
import face_recognition.api as fcrec
import multiprocessing
import sys
import itertools
def print_result(filename, location):
top, right, bottom, left = location
print("{},{},{},{},{}".format(filename, top, right, bottom, left))
def test_image(image_to_check, model, upsample):
unknown_image = fcrec.load_image_file(image_to_check)
face_locations = fcrec.face_locations(unknown_image, number_of_times_to_upsample=upsample, model=model)
for face_location in face_locations:
print_result(image_to_check, face_location)
def | (folder):
return [os.path.join(folder, f) for f in os.listdir(folder) if re.match(r'.*\.(jpg|jpeg|png)', f, flags=re.I)]
def process_images_in_process_pool(images_to_check, number_of_cpus, model, upsample):
if number_of_cpus == -1:
processes = None
else:
processes = number_of_cpus
context = multiprocessing
if "forkserver" in multiprocessing.get_all_start_methods():
context = multiprocessing.get_context("forkserver")
pool = context.Pool(processes=processes)
function_parameters = zip(
images_to_check,
itertools.repeat(model),
itertools.repeat(upsample),
)
pool.starmap(test_image, function_parameters)
@click.command()
@click.argument('image_to_check')
@click.option('--cpus', default=1, help='number of CPU cores to use in parallel. -1 means "use all in system"')
@click.option('--model', default="hog", help='Which face detection model to use. Options are "hog" or "cnn".')
@click.option('--upsample', default=0, help='How many times to upsample the image looking for faces. Higher numbers find smaller faces.')
def main(image_to_check, cpus, model, upsample):
# Multi-core processing only supported on Python 3.4 or greater
if (sys.version_info < (3, 4)) and cpus != 1:
click.echo("WARNING: Multi-processing support requires Python 3.4 or greater. Falling back to single-threaded processing!")
cpus = 1
if os.path.isdir(image_to_check):
if cpus == 1:
[test_image(image_file, model, upsample) for image_file in image_files_in_folder(image_to_check)]
else:
process_images_in_process_pool(image_files_in_folder(image_to_check), cpus, model, upsample)
else:
test_image(image_to_check, model, upsample)
if __name__ == "__main__":
main() | image_files_in_folder |
wrapper.py | import torch
from torch.optim import Optimizer
class OptimWrapper(Optimizer):
# Mixin class that defines convenient functions for writing Optimizer Wrappers
def | (self, optim):
self.optim = optim
def __getstate__(self):
return self.optim.__getstate__()
def __setstate__(self, state):
self.optim.__setstate__(state)
@property
def state(self):
return self.optim.state
@property
def param_groups(self):
return self.optim.param_groups
@param_groups.setter
def param_groups(self, value):
self.optim.param_groups = value
def state_dict(self):
return self.optim.state_dict()
def load_state_dict(self, state_dict):
self.optim.load_state_dict(state_dict)
def zero_grad(self):
self.optim.zero_grad()
def add_param_group(self, param_group):
self.optim.add_param_group(param_group)
@property
def defaults(self):
return self.optim.defaults
@defaults.setter
def defaults(self, defaults):
self.optim.defaults = defaults
@torch.no_grad()
def step(self, closure=None):
self.optim.step(closure=closure)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.optim) | __init__ |
main.rs | use three_d::*;
fn main() {
let args: Vec<String> = std::env::args().collect();
let window = Window::new(WindowSettings {
title: "Lighting!".to_string(),
min_size: (512, 512),
max_size: Some((1280, 720)),
..Default::default()
})
.unwrap();
let context = window.gl().unwrap();
let mut pipeline = DeferredPipeline::new(&context).unwrap();
let target = vec3(0.0, 0.0, 0.0);
let mut camera = Camera::new_perspective(
&context,
window.viewport().unwrap(),
vec3(2.0, 2.0, 5.0),
target,
vec3(0.0, 1.0, 0.0),
degrees(45.0),
0.1,
1000.0,
)
.unwrap();
let mut gui = three_d::GUI::new(&context).unwrap();
Loader::load(
&["examples/assets/suzanne.obj", "examples/assets/suzanne.mtl"],
move |mut loaded| {
let (monkey_cpu_meshes, monkey_cpu_materials) =
loaded.obj("examples/assets/suzanne.obj").unwrap();
let mut monkey = Mesh::new_with_material(
&context,
&monkey_cpu_meshes[0],
&Material::new(&context, &monkey_cpu_materials[0]).unwrap(),
)
.unwrap();
monkey.cull = CullType::Back;
let mut plane = Mesh::new_with_material(
&context,
&CPUMesh {
positions: vec![ | -10000.0, -1.0, 10000.0, 10000.0, -1.0, 10000.0, 0.0, -1.0, -10000.0,
],
normals: Some(vec![0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0]),
..Default::default()
},
&Material {
color_source: ColorSource::Color(vec4(0.5, 0.7, 0.3, 1.0)),
..Default::default()
},
)
.unwrap();
let ambient_light = AmbientLight {
color: vec3(1.0, 1.0, 1.0),
intensity: 0.2,
};
let mut directional_light0 =
DirectionalLight::new(&context, 1.0, &vec3(1.0, 0.0, 0.0), &vec3(0.0, -1.0, 0.0))
.unwrap();
let mut directional_light1 =
DirectionalLight::new(&context, 1.0, &vec3(0.0, 1.0, 0.0), &vec3(0.0, -1.0, 0.0))
.unwrap();
let mut point_light0 = PointLight::new(
&context,
1.0,
&vec3(0.0, 1.0, 0.0),
&vec3(0.0, 0.0, 0.0),
0.5,
0.05,
0.005,
)
.unwrap();
let mut point_light1 = PointLight::new(
&context,
1.0,
&vec3(1.0, 0.0, 0.0),
&vec3(0.0, 0.0, 0.0),
0.5,
0.05,
0.005,
)
.unwrap();
let mut spot_light = SpotLight::new(
&context,
2.0,
&vec3(0.0, 0.0, 1.0),
&vec3(0.0, 0.0, 0.0),
&vec3(0.0, -1.0, 0.0),
25.0,
0.1,
0.001,
0.0001,
)
.unwrap();
// main loop
let mut shadows_enabled = true;
let mut ambient_enabled = true;
let mut directional_enabled = true;
let mut spot_enabled = true;
let mut point_enabled = true;
window
.render_loop(move |mut frame_input| {
let mut change = frame_input.first_frame;
let mut panel_width = frame_input.viewport.width;
change |= gui
.update(&mut frame_input, |gui_context| {
use three_d::egui::*;
SidePanel::left("side_panel", panel_width as f32).show(
gui_context,
|ui| {
ui.heading("Debug Panel");
ui.label("Surface parameters");
ui.add(
Slider::f32(&mut monkey.material.metallic, 0.0..=1.0)
.text("Monkey Metallic"),
);
ui.add(
Slider::f32(&mut monkey.material.roughness, 0.0..=1.0)
.text("Monkey Roughness"),
);
ui.add(
Slider::f32(&mut plane.material.metallic, 0.0..=1.0)
.text("Plane Metallic"),
);
ui.add(
Slider::f32(&mut plane.material.roughness, 0.0..=1.0)
.text("Plane Roughness"),
);
ui.label("Lighting model");
ui.radio_value(
&mut pipeline.lighting_model,
LightingModel::Phong,
"Phong",
);
ui.radio_value(
&mut pipeline.lighting_model,
LightingModel::Blinn,
"Blinn",
);
ui.radio_value(
&mut pipeline.lighting_model,
LightingModel::Cook(
NormalDistributionFunction::Blinn,
GeometryFunction::SmithSchlickGGX,
),
"Cook (Blinn)",
);
ui.radio_value(
&mut pipeline.lighting_model,
LightingModel::Cook(
NormalDistributionFunction::Beckmann,
GeometryFunction::SmithSchlickGGX,
),
"Cook (Beckmann)",
);
ui.radio_value(
&mut pipeline.lighting_model,
LightingModel::Cook(
NormalDistributionFunction::TrowbridgeReitzGGX,
GeometryFunction::SmithSchlickGGX,
),
"Cook (Trowbridge-Reitz GGX)",
);
ui.label("Debug options");
ui.radio_value(
&mut pipeline.debug_type,
DebugType::NONE,
"None",
);
ui.radio_value(
&mut pipeline.debug_type,
DebugType::POSITION,
"Position",
);
ui.radio_value(
&mut pipeline.debug_type,
DebugType::NORMAL,
"Normal",
);
ui.radio_value(
&mut pipeline.debug_type,
DebugType::COLOR,
"Color",
);
ui.radio_value(
&mut pipeline.debug_type,
DebugType::DEPTH,
"Depth",
);
ui.radio_value(
&mut pipeline.debug_type,
DebugType::DIFFUSE,
"Diffuse",
);
ui.radio_value(
&mut pipeline.debug_type,
DebugType::SPECULAR,
"Specular",
);
ui.radio_value(
&mut pipeline.debug_type,
DebugType::POWER,
"Power",
);
ui.label("Light options");
ui.checkbox(&mut ambient_enabled, "Ambient light");
ui.checkbox(&mut directional_enabled, "Directional lights");
ui.checkbox(&mut spot_enabled, "Spot lights");
ui.checkbox(&mut point_enabled, "Point lights");
if ui.checkbox(&mut shadows_enabled, "Shadows").clicked() {
if !shadows_enabled {
spot_light.clear_shadow_map();
directional_light0.clear_shadow_map();
directional_light1.clear_shadow_map();
}
}
},
);
panel_width = gui_context.used_size().x as u32;
})
.unwrap();
let viewport = Viewport {
x: panel_width as i32,
y: 0,
width: frame_input.viewport.width - panel_width,
height: frame_input.viewport.height,
};
change |= camera.set_viewport(viewport).unwrap();
for event in frame_input.events.iter() {
match event {
Event::MouseMotion {
delta,
handled,
button,
..
} => {
if !handled && *button == Some(MouseButton::Left) {
camera
.rotate_around_with_fixed_up(
&target,
0.1 * delta.0 as f32,
0.1 * delta.1 as f32,
)
.unwrap();
change = true;
}
}
Event::MouseWheel { delta, handled, .. } => {
if !handled {
camera
.zoom_towards(&target, 0.02 * delta.1 as f32, 5.0, 100.0)
.unwrap();
change = true;
}
}
_ => {}
}
}
let time = 0.001 * frame_input.accumulated_time;
let c = time.cos() as f32;
let s = time.sin() as f32;
directional_light0.set_direction(&vec3(-1.0 - c, -1.0, 1.0 + s));
directional_light1.set_direction(&vec3(1.0 + c, -1.0, -1.0 - s));
spot_light.set_position(&vec3(3.0 + c, 5.0 + s, 3.0 - s));
spot_light.set_direction(&-vec3(3.0 + c, 5.0 + s, 3.0 - s));
point_light0.set_position(&vec3(-5.0 * c, 5.0, -5.0 * s));
point_light1.set_position(&vec3(5.0 * c, 5.0, 5.0 * s));
// Draw
if shadows_enabled {
directional_light0
.generate_shadow_map(
&vec3(0.0, 0.0, 0.0),
4.0,
20.0,
1024,
1024,
&[&monkey],
)
.unwrap();
directional_light1
.generate_shadow_map(
&vec3(0.0, 0.0, 0.0),
4.0,
20.0,
1024,
1024,
&[&monkey],
)
.unwrap();
spot_light
.generate_shadow_map(20.0, 1024, &[&monkey])
.unwrap();
}
// Geometry pass
if change {
pipeline.geometry_pass(&camera, &[&monkey, &plane]).unwrap();
}
// Light pass
Screen::write(&context, ClearState::default(), || {
pipeline.light_pass(
&camera,
if ambient_enabled {
Some(&ambient_light)
} else {
None
},
&if directional_enabled {
vec![&directional_light0, &directional_light1]
} else {
vec![]
},
&if spot_enabled {
vec![&spot_light]
} else {
vec![]
},
&if point_enabled {
vec![&point_light0, &point_light1]
} else {
vec![]
},
)?;
gui.render().unwrap();
Ok(())
})
.unwrap();
if args.len() > 1 {
// To automatically generate screenshots of the examples, can safely be ignored.
FrameOutput {
screenshot: Some(args[1].clone().into()),
exit: true,
..Default::default()
}
} else {
FrameOutput::default()
}
})
.unwrap();
},
);
} | |
quicksort_test.go | package quicksort
import (
"fmt"
"github.com/goris/algorithm-design/quicksort"
"testing"
)
func TestQuickSort(t *testing.T) {
fmt.Println("Testing QuickSort")
var arr []int
var expected []int
arr = []int{3, 9, 8, 4, 6, 10, 2, 5, 7, 1}
expected = []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} | if arr[9] != expected[9] {
t.Errorf("expected\n%v\n,got:\n%v", expected, arr)
}
}
/* func TestCountQuickSortComparisonsLeft(t *testing.T) {
fmt.Println("Testing QuickSort")
var arr []int
var expected []int
arr = []int{3,9,8,4,6,10,2,5,7,1}
expected = []int{1,2,3,4,5,6,7,8,9,10}
quicksort.QuickSort(arr, 0, len(arr)-1)
if arr[9] != expected[9] {
t.Errorf("expected\n%v\n,got:\n%v", expected, arr)
}
}
*/ | quicksort.QuickSort(arr, 0, len(arr))
|
conf.py | # -*- coding: utf-8 -*-
#
# AgavePy documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 5 11:08:11 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'AgavePy'
copyright = u'2018- Texas Advanced Computing Center'
author = u'Texas Advanced Computing Center'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# Implement
# https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html
html_static_path = ['_static']
html_context = {
'css_files': [
'_static/theme_overrides.css', # override wide tables in RTD theme
],
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html'
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'AgavePydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '', | # 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'AgavePy.tex', u'AgavePy Documentation',
u'Joe Stubbs, Walter Moreira, Matt Vaughn', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'agavepy', u'AgavePy Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'AgavePy', u'AgavePy Documentation',
author, 'AgavePy', 'One line description of project.',
'Miscellaneous'),
] |
# Latex figure (float) alignment
# |
file.go | package conf
import (
"os"
"github.com/spf13/viper"
)
// InitializeConfigFile will read a file from the provided path
// if it's found, it will read the config with viper.
// If not, it will create and initialize the file with defaults
func InitializeConfigFile(path string) (bool, error) {
viper.SetConfigFile(path)
if _, e := os.Stat(path); os.IsNotExist(e) {
// create the file
emptyFile, err := os.Create(path)
if err != nil { | }
defer emptyFile.Close()
err = viper.WriteConfig()
return true, err
}
err := viper.ReadInConfig()
return false, err
} | return false, err |
lftracker_v1_DzLR5a5Rbkx8BoQ2.js | !function(){"use strict";function a(e){var n=r();return n[e]=n[e]||function(e){return{id:e,plugins:{},getPlugins:function(){var e=[];for(var n in this.plugins)e.push(this.plugins[n]);return e}}}(e),n[e]}function o(e,n){var t=a(e);return t.plugins[n]=t.plugins[n]||{},t.plugins[n]}function r(){var e=l();return e.r=e.r||{},e.r}e()&&(window.ldfdr.registerTracker=function(e,n,t,r,i){var o=a(e);return o.track=n,o.identify=t,o.pageview=r,o.getClientId=i,o},window.ldfdr.setTrackerOption=function(e,n,t){a(e)[n]=t},window.ldfdr.registerPlugin=function(e,n,t){var r=a(e),i=o(e,n);i.init=t,r.pluginsInitialized&&i.init()},window.ldfdr.registerPluginConfig=function(e,n,t){o(e,n).config=t});var f="DzLR5a5Rbkx8BoQ2";function c(e,n){return(n=e.GoogleAnalyticsObject||"ga")&&e[n]?e[n]:null}function l(){return window.ldfdr}function e(){return"undefined"!=typeof window&&void 0!==window.ldfdr}function t(n){var e=Object.values(r());return O(n)?e.map(function(e){return n(e)}):e}function | (){return l().getTracker(f)}function i(){for(var e={clientIds:[],trackingIds:[]},n=function(e){return e?"function"!=typeof e.getAll?[]:e.getAll():[]}(c(window)),t=0;t<n.length;t++){var r=n[t].get("clientId"),i=n[t].get("trackingId");-1===e.clientIds.indexOf(r)&&e.clientIds.push(r),-1===e.trackingIds.indexOf(i)&&e.trackingIds.push(i)}if(!e.clientIds.length){var o=function(){var e=R("_ga");if(e){var n=e.split(".");if(!(n.length<2))return n[n.length-2]+"."+n[n.length-1]}}();o&&e.clientIds.push(o)}return e}function u(){var e=d().foreignCookieSettings,n=[];if(!e)return n;for(var t=function(){for(var e=document.cookie.split(";"),n={},t=0;t<e.length;t++){var r=e[t].split("=");n[(r[0]+"").trim()]=unescape(r.slice(1).join("="))}return n}(),r=Object.keys(t),i=Object.keys(e),o=0;o<i.length;o++)for(var a=i[o],c=e[a],u=0;u<r.length;u++){var f=r[u];if(f.match(c)){var l=t[f];n.push({type:a,value:l})}}return n}function g(){return!0===d().lfaCookieEnabled&&"1"===y.cookie}function s(){var e;return g()?((e=function(e){return function(e,n){var t=x(n);return t?void 0===t[e]?0:t[e]:0}(e,C)||function(e,n){var t=localStorage.getItem(N(e,n));if(null!=t){var r=localStorage.getItem(A(n));null!=r&&(new Date).toISOString()>r&&(t=0)}else t=0;return t}(e,C)}(f))&&function(e,n){return e.substring(0,n.length)===n}(e,S)?_(f,e):e=_(f),e):e=function(e){var n=U((navigator.userAgent||"")+(navigator.platform||"")+JSON.stringify(y)+e).slice(0,16);return S+"."+n+".NC"}(f)}function p(e){var n=d();void 0===n.gaInitRetries&&(n.gaInitRetries=0);var t=1<=n.gaInitRetries;return function(e){return"function"==typeof c(e)&&"function"==typeof c(e).getAll}(window)?e():t?e():(setTimeout(function(){return p(e)},100*Math.pow(2,n.gaInitRetries)),void(n.gaInitRetries+=1))}function v(e,n){p(function(){e=e||{},function(e,n){var t="https://tr.lfeeder.com?sid="+encodeURIComponent(e.scriptId)+"&data="+D(JSON.stringify(e)),r=document.createElement("img");r.width=1,r.height=1,r.src=t,r.onload=function(){O(n)&&n()}}(function(e,n){var t=i(),r=new Date;return n=n||{},{gaTrackingIds:t.trackingIds,gaClientIds:t.clientIds,context:{library:{name:"lftracker",version:"1.11.1"},pageUrl:n.pageUrl||window.location.href,pageTitle:n.pageTitle||document.title,referrer:document.referrer},event:n.eventName||"tracking-event",clientTimestamp:r.toISOString(),clientTimezone:r.getTimezoneOffset(),scriptId:f,cookiesEnabled:g(),anonymizeIp:!1,lfClientId:e,foreignCookies:u(),properties:n.properties||{}}}(s(),e),n)})}function n(e,n){var t=(e=e||{}).email;if(t){var r=e.firstName,i=e.lastName,o={email:t};r&&(o.firstName=r),i&&(o.lastName=i),v({eventName:"identify",properties:o},n)}}function m(e,n){var t={eventName:"tracking-event"};(e=e||{}).pageUrl&&(t.pageUrl=e.pageUrl),e.pageTitle&&(t.pageTitle=e.pageTitle),v(t,n)}function w(){return s()}"undefined"!=typeof window&&void 0!==window.ldfdr&&(window.ldfdr=window.ldfdr||{},window.ldfdr.cfg=window.ldfdr.cfg||{},window.ldfdr.setTrackerOption(f,"lfaCookieEnabled",!0),window.ldfdr.setTrackerOption(f,"foreignCookieSettings",{'intercom':'^intercom-id-.*','hubspot':'^hubspotutk$'}),window.ldfdr.registerPluginConfig(f,"file-downloads",{filesEnabled:!1,filesToMatch:/(\.pdf|\.doc|\.docx|\.xls|\.xlsx)$/}),window.ldfdr.registerPluginConfig(f,"auto-identify",{autoIdentifyEnabled:!1})),e()&&l().registerPlugin(f,"auto-identify",function(){var a=500,c=l().getTracker(f);if(c.plugins["auto-identify"].config.autoIdentifyEnabled)for(var e=document.getElementsByTagName("form"),n=0;n<e.length;n++){t(e[n])}function t(r){var i={capture:!0};function o(e){r.removeEventListener("submit",o,i);var n=function(e){for(var n=e.querySelectorAll("input"),t=0;t<n.length;t++){var r=n[t],i=r.type,o=r.value;if(("text"==i||"email"==i)&&o.match(/^[^@\s]+@([^@\s]+\.)+[^@\W]+$/))return{email:o}}return null}(r);if(null!==n){e.preventDefault();var t=setTimeout(function(){r.submit()},a);c.identify(n,function(){clearTimeout(t),r.submit()})}}r.addEventListener("submit",o,i)}}),e()&&l().registerPlugin(f,"file-downloads",function(){var c=500,u=l().getTracker(f),e=u.plugins["file-downloads"].config;if(e.filesEnabled)for(var n=document.getElementsByTagName("a"),t=0;t<n.length;t++){var r=n[t];(r.getAttribute("href")+"").match(e.filesToMatch)&&i(r)}function i(i){var o={capture:!0};function a(e){i.removeEventListener("click",a,o);var n=e.ctrlKey||e.metaKey,t=function(e){var n=function(e){var n=e.replace(/https?\:\/\//,"").split("/"),t=n[n.length-1].replace(/[\?&].*/,"");return 1!=n.length&&t?t:null}(e.href);return{eventName:"file-download",properties:{url:e.href,filename:n},pageUrl:e.href,pageTitle:n}}(i);if(n)u.track(t);else{e.preventDefault();var r=setTimeout(function(){i.click()},c);u.track(t,function(){clearTimeout(r),i.click()})}}i.addEventListener("click",a,o)}}),window.ldfdr.getTracker=function(e,n){var t=r()[e]||null;return O(n)?n(t):t},window.ldfdr.getAll=t,window.ldfdr.track=function(n){t(function(e){e.track(n)})},window.ldfdr.identify=function(n){t(function(e){e.identify(n)})},window.ldfdr.pageview=function(n){t(function(e){e.pageview(n)})};var h,I,k,T,y={},E=63072e6,C="_lfa",S="LF1.1";function b(e){return void 0!==e}function O(e){return"function"==typeof e}function A(e){return e+"_expiry"}function N(e,n){return n+"_"+e}function x(e){var n=R(e);if(!n)return 0;var t=function(e){try{return decodeURIComponent(window.atob(e).split("").map(function(e){return"%"+("00"+e.charCodeAt(0).toString(16)).slice(-2)}).join(""))}catch(e){return 0}}(n);return t?JSON.parse(t):0}function R(e){var n=new RegExp("(^|;)[ ]*"+e+"=([^;]*)").exec(document.cookie);return n?window.decodeURIComponent(n[2]):0}function _(e,n){n=n||function(e){return S+"."+e+"."+(new Date).getTime()}(U((navigator.userAgent||"")+(navigator.platform||"")+JSON.stringify(y)+(new Date).getTime()+Math.random()).slice(0,16));return function(e,n,t,r,i,o,a){var c=x(n)||{};c[e]=t,j(n,t=D(JSON.stringify(c)),r,i,o,a)}(e,C,n,E,"/",function(){var e=window.ldfdr.cfg.cookieDomain;if(e)return e;return 0==(e=""+document.domain).indexOf("www.")?e.substring(4):e}()),function(e,n,t){localStorage.setItem(N(e,n),t);var r=new Date;r.setTime(r.getTime()+E),localStorage.setItem(A(n),r.toISOString())}(e,C,n),n}function j(e,n,t,r,i,o){var a;t&&(a=new Date).setTime(a.getTime()+t),document.cookie=e+"="+window.encodeURIComponent(n)+(t?";expires="+a.toGMTString():"")+";path="+(r||"/")+(i?";domain="+i:"")+(o?";secure":"")+";SameSite=Lax"}function U(e){function n(e,n){return e<<n|e>>>32-n}function t(e){var n,t="";for(n=7;0<=n;n--)t+=(e>>>4*n&15).toString(16);return t}var r,i,o,a,c,u,f,l,d,g,s=[],p=1732584193,v=4023233417,m=2562383102,w=271733878,h=3285377520,I=[];for(g=(e=function(e){return unescape(window.encodeURIComponent(e))}(e)).length,i=0;i<g-3;i+=4)o=e.charCodeAt(i)<<24|e.charCodeAt(i+1)<<16|e.charCodeAt(i+2)<<8|e.charCodeAt(i+3),I.push(o);switch(3&g){case 0:i=2147483648;break;case 1:i=e.charCodeAt(g-1)<<24|8388608;break;case 2:i=e.charCodeAt(g-2)<<24|e.charCodeAt(g-1)<<16|32768;break;case 3:i=e.charCodeAt(g-3)<<24|e.charCodeAt(g-2)<<16|e.charCodeAt(g-1)<<8|128}for(I.push(i);14!=(15&I.length);)I.push(0);for(I.push(g>>>29),I.push(g<<3&4294967295),r=0;r<I.length;r+=16){for(i=0;i<16;i++)s[i]=I[r+i];for(i=16;i<=79;i++)s[i]=n(s[i-3]^s[i-8]^s[i-14]^s[i-16],1);for(a=p,c=v,u=m,f=w,l=h,i=0;i<=19;i++)d=n(a,5)+(c&u|~c&f)+l+s[i]+1518500249&4294967295,l=f,f=u,u=n(c,30),c=a,a=d;for(i=20;i<=39;i++)d=n(a,5)+(c^u^f)+l+s[i]+1859775393&4294967295,l=f,f=u,u=n(c,30),c=a,a=d;for(i=40;i<=59;i++)d=n(a,5)+(c&u|c&f|u&f)+l+s[i]+2400959708&4294967295,l=f,f=u,u=n(c,30),c=a,a=d;for(i=60;i<=79;i++)d=n(a,5)+(c^u^f)+l+s[i]+3395469782&4294967295,l=f,f=u,u=n(c,30),c=a,a=d;p=p+a&4294967295,v=v+c&4294967295,m=m+u&4294967295,w=w+f&4294967295,h=h+l&4294967295}return(d=t(p)+t(v)+t(m)+t(w)+t(h)).toLowerCase()}function D(e){return window.btoa(encodeURIComponent(e).replace(/%([0-9A-F]{2})/g,function(e,n){return String.fromCharCode("0x"+n)}))}e()&&(!1!==(h="trackingAllowed",l().cfg[h])?function(){!function(){var e,n,t={pdf:"application/pdf",qt:"video/quicktime",realp:"audio/x-pn-realaudio-plugin",wma:"application/x-mplayer2",dir:"application/x-director",fla:"application/x-shockwave-flash",java:"application/x-java-vm",gears:"application/x-googlegears",ag:"application/x-silverlight"};if(!new RegExp("MSIE").test(navigator.userAgent)){if(navigator.mimeTypes&&navigator.mimeTypes.length)for(e in t)Object.prototype.hasOwnProperty.call(t,e)&&(n=navigator.mimeTypes[t[e]],y[e]=n&&n.enabledPlugin?"1":"0");!new RegExp("Edge[ /](\\d+[\\.\\d]+)").test(navigator.userAgent)&&"unknown"!=typeof navigator.javaEnabled&&b(navigator.javaEnabled)&&navigator.javaEnabled()&&(y.java="1"),O(window.GearsFactory)&&(y.gears="1"),y.cookie=function(){if(b(navigator.cookieEnabled))return navigator.cookieEnabled?"1":"0";var e="testcookie";return j(e,"1"),"1"===R(e)?"1":"0"}()}var r=parseInt(screen.width,10),i=parseInt(screen.height,10);y.res=parseInt(r,10)+"x"+parseInt(i,10)}();var e=l().registerTracker(f,v,n,m,w);return function(e){for(var n=e.getPlugins(),t=0;t<n.length;t++){var r=n[t];r.initialized||r.init(),r.initialized=!0}e.pluginsInitialized=!0}(e),function(e){var n=document.createEvent("Event");n.tracker=e,n.initEvent("ldfdr.trackerReady"),document.dispatchEvent(n)}(e),e}().pageview():(T=x(k=C)||{},Object.keys(T).forEach(function(e){localStorage.removeItem(N(e,k))}),localStorage.removeItem(A(k)),I=C,document.cookie=I+"=; expires=Thu, 01 Jan 1970 00:00:00 UTC;"))}(); | d |
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use crate::models::*;
pub mod reservation {
use crate::models::*;
pub async fn available_scopes(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
reservation_id: &str,
body: &AvailableScopeRequest,
) -> std::result::Result<Properties, available_scopes::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}/availableScopes",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(|source| available_scopes::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| available_scopes::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(body).map_err(|source| available_scopes::Error::SerializeError { source })?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| available_scopes::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| available_scopes::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Properties =
serde_json::from_slice(rsp_body).map_err(|source| available_scopes::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| available_scopes::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(available_scopes::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod available_scopes {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn | (
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &SplitRequest,
) -> std::result::Result<split::Response, split::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/split",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(|source| split::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| split::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(body).map_err(|source| split::Error::SerializeError { source })?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| split::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| split::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<ReservationResponse> =
serde_json::from_slice(rsp_body).map_err(|source| split::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(split::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(split::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| split::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(split::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod split {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(Vec<ReservationResponse>),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn merge(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &MergeRequest,
) -> std::result::Result<merge::Response, merge::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/merge",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(|source| merge::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| merge::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(body).map_err(|source| merge::Error::SerializeError { source })?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| merge::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| merge::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<ReservationResponse> =
serde_json::from_slice(rsp_body).map_err(|source| merge::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(merge::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(merge::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| merge::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(merge::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod merge {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(Vec<ReservationResponse>),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
) -> std::result::Result<ReservationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(|source| list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
reservation_id: &str,
reservation_order_id: &str,
expand: Option<&str>,
) -> std::result::Result<ReservationResponse, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(|source| get::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| get::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| get::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| get::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationResponse = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
reservation_id: &str,
parameters: &Patch,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(|source| update::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| update::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(parameters).map_err(|source| update::Error::SerializeError { source })?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| update::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| update::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(ReservationResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list_revisions(
operation_config: &crate::OperationConfig,
reservation_id: &str,
reservation_order_id: &str,
) -> std::result::Result<ReservationList, list_revisions::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}/revisions",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(|source| list_revisions::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list_revisions::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list_revisions::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list_revisions::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationList =
serde_json::from_slice(rsp_body).map_err(|source| list_revisions::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| list_revisions::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(list_revisions::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_revisions {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn archive(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
reservation_id: &str,
) -> std::result::Result<(), archive::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}/archive",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(|source| archive::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| archive::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| archive::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| archive::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| archive::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(archive::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod archive {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn unarchive(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
reservation_id: &str,
) -> std::result::Result<(), unarchive::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}/reservations/{}/unarchive",
operation_config.base_path(),
reservation_order_id,
reservation_id
);
let mut url = url::Url::parse(url_str).map_err(|source| unarchive::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| unarchive::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| unarchive::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| unarchive::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| unarchive::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(unarchive::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod unarchive {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub async fn get_catalog(
operation_config: &crate::OperationConfig,
subscription_id: &str,
reserved_resource_type: &str,
location: Option<&str>,
) -> std::result::Result<Vec<Catalog>, get_catalog::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/catalogs",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(|source| get_catalog::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| get_catalog::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
url.query_pairs_mut().append_pair("reservedResourceType", reserved_resource_type);
if let Some(location) = location {
url.query_pairs_mut().append_pair("location", location);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| get_catalog::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| get_catalog::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: Vec<Catalog> = serde_json::from_slice(rsp_body).map_err(|source| get_catalog::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| get_catalog::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(get_catalog::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_catalog {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get_applied_reservation_list(
operation_config: &crate::OperationConfig,
subscription_id: &str,
) -> std::result::Result<AppliedReservations, get_applied_reservation_list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Capacity/appliedReservations",
operation_config.base_path(),
subscription_id
);
let mut url = url::Url::parse(url_str).map_err(|source| get_applied_reservation_list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| get_applied_reservation_list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| get_applied_reservation_list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| get_applied_reservation_list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: AppliedReservations =
serde_json::from_slice(rsp_body).map_err(|source| get_applied_reservation_list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error =
serde_json::from_slice(rsp_body).map_err(|source| get_applied_reservation_list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(get_applied_reservation_list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_applied_reservation_list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub mod reservation_order {
use crate::models::*;
pub async fn calculate(
operation_config: &crate::OperationConfig,
body: &PurchaseRequest,
) -> std::result::Result<CalculatePriceResponse, calculate::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/calculatePrice", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(|source| calculate::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| calculate::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(body).map_err(|source| calculate::Error::SerializeError { source })?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| calculate::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| calculate::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: CalculatePriceResponse =
serde_json::from_slice(rsp_body).map_err(|source| calculate::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| calculate::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(calculate::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod calculate {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<ReservationOrderList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/reservationOrders", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(|source| list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationOrderList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn get(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
expand: Option<&str>,
) -> std::result::Result<ReservationOrderResponse, get::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(|source| get::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| get::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
if let Some(expand) = expand {
url.query_pairs_mut().append_pair("$expand", expand);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| get::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| get::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationOrderResponse =
serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(get::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
pub async fn purchase(
operation_config: &crate::OperationConfig,
reservation_order_id: &str,
body: &PurchaseRequest,
) -> std::result::Result<purchase::Response, purchase::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/providers/Microsoft.Capacity/reservationOrders/{}",
operation_config.base_path(),
reservation_order_id
);
let mut url = url::Url::parse(url_str).map_err(|source| purchase::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| purchase::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = azure_core::to_json(body).map_err(|source| purchase::Error::SerializeError { source })?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| purchase::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| purchase::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: ReservationOrderResponse =
serde_json::from_slice(rsp_body).map_err(|source| purchase::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(purchase::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = rsp.body();
let rsp_value: ReservationOrderResponse =
serde_json::from_slice(rsp_body).map_err(|source| purchase::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(purchase::Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| purchase::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(purchase::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod purchase {
use crate::{models, models::*};
#[derive(Debug)]
pub enum Response {
Ok200(ReservationOrderResponse),
Accepted202(ReservationOrderResponse),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
pub mod operation {
use crate::models::*;
pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationList, list::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/providers/Microsoft.Capacity/operations", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(|source| list::Error::ParseUrlError { source })?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(|source| list::Error::GetTokenError { source })?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", operation_config.api_version());
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(|source| list::Error::BuildRequestError { source })?;
let rsp = http_client
.execute_request(req)
.await
.map_err(|source| list::Error::ExecuteRequestError { source })?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: OperationList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: Error = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError {
source,
body: rsp_body.clone(),
})?;
Err(list::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list {
use crate::{models, models::*};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::Error,
},
#[error("Failed to parse request URL: {}", source)]
ParseUrlError { source: url::ParseError },
#[error("Failed to build request: {}", source)]
BuildRequestError { source: http::Error },
#[error("Failed to execute request: {}", source)]
ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to serialize request body: {}", source)]
SerializeError { source: Box<dyn std::error::Error + Sync + Send> },
#[error("Failed to deserialize response body: {}", source)]
DeserializeError { source: serde_json::Error, body: bytes::Bytes },
#[error("Failed to get access token: {}", source)]
GetTokenError { source: azure_core::errors::AzureError },
}
}
}
| split |
getContentType.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20210101preview
import (
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
// Content type contract details.
func LookupContentType(ctx *pulumi.Context, args *LookupContentTypeArgs, opts ...pulumi.InvokeOption) (*LookupContentTypeResult, error) {
var rv LookupContentTypeResult
err := ctx.Invoke("azure-native:apimanagement/v20210101preview:getContentType", args, &rv, opts...)
if err != nil |
return &rv, nil
}
type LookupContentTypeArgs struct {
// Content type identifier.
ContentTypeId string `pulumi:"contentTypeId"`
// The name of the resource group.
ResourceGroupName string `pulumi:"resourceGroupName"`
// The name of the API Management service.
ServiceName string `pulumi:"serviceName"`
}
// Content type contract details.
type LookupContentTypeResult struct {
// Content type description.
Description *string `pulumi:"description"`
// Resource ID.
Id string `pulumi:"id"`
// Resource name.
Name string `pulumi:"name"`
// Content type schema.
Schema interface{} `pulumi:"schema"`
// Resource type for API Management resource.
Type string `pulumi:"type"`
// Content type version.
Version *string `pulumi:"version"`
}
| {
return nil, err
} |
0001_initial.py | # Generated by Django 3.2.8 on 2021-10-18 11:53
from django.db import migrations, models
import django.db.models.deletion
class | (migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField(max_length=500)),
('description', models.TextField(blank=True, max_length=1000)),
('price', models.DecimalField(decimal_places=2, max_digits=6)),
('category', models.CharField(choices=[('CO', 'Coches'), ('MO', 'Motos'), ('MA', 'Moda y Accesorios'), ('IM', 'Immobiliaria'), ('TV', 'TV, Audio y Foto'), ('TE', 'Móviles y Telefonía'), ('IE', 'Informática y Electrónica'), ('DO', 'Deporte y Ocio'), ('BI', 'Bicicletas'), ('CV', 'Consolas y Videojuegos'), ('HJ', 'Hogar y Jardín'), ('ED', 'Electrodomésticos'), ('CU', 'Cine, Libros y Música'), ('NI', 'Niños y Bebés'), ('CC', 'Coleccionismo'), ('CT', 'Construcción y reformas'), ('IN', 'Industria y Agricultura'), ('EM', 'Empleo'), ('SE', 'Servicios'), ('OT', 'Otros')], default='OT', max_length=2)),
],
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(blank=True, upload_to='products/%Y/%m/%d')),
('product', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='images', to='products.product')),
],
),
]
| Migration |
message_test.go | package transactions
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/theQRL/go-qrl/pkg/core/addressstate"
"github.com/theQRL/go-qrl/pkg/misc"
"github.com/theQRL/go-qrl/test/helper"
)
type TestMessageTransaction struct {
tx *MessageTransaction
}
func NewTestMessageTransaction(message string, fee uint64, xmssPK []byte, masterAddr []byte) *TestMessageTransaction {
tx := CreateMessageTransaction([]byte(message), fee, xmssPK, masterAddr)
return &TestMessageTransaction{tx: tx}
}
func TestCreateMessageTransaction(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := "Hello World!!!"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
tx := CreateMessageTransaction([]byte(message), fee, xmssPK, nil)
assert.NotNil(t, tx)
}
func TestMessageTransaction_AddrFrom(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := "Hello World!!!"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
assert.Equal(t, messageTx.tx.AddrFrom(), misc.UCharVectorToBytes(xmss.Address()))
}
func TestMessageTransaction_MessageHash(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := "Hello World!!!"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
assert.Equal(t, messageTx.tx.MessageHash(), []byte(message))
}
func TestMessageTransaction_GetHashableBytes(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := "Hello World!!!"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
hashableBytes := "cd00dc04142a981d9b1a6cf671c76a5f134c1264cf6f9e18048c66a6ba149b16"
assert.NotNil(t, messageTx.tx)
assert.Equal(t, misc.Bin2HStr(messageTx.tx.GetHashableBytes()), hashableBytes)
}
func | (t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := "Hello World!!!"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
assert.True(t, messageTx.tx.Validate(false))
// Signed transaction, signature verification should pass
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
assert.True(t, messageTx.tx.Validate(true))
// Changed Message Hash to some different address, validation must fail
messageTx.tx.PBData().GetMessage().MessageHash = []byte("ok")
assert.False(t, messageTx.tx.Validate(true))
}
func TestMessageTransaction_Validate2(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := "Hello World!!!"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
assert.True(t, messageTx.tx.Validate(true))
// Random Transaction Hash, validate must fail
messageTx.tx.PBData().TransactionHash = []byte{0, 1, 5}
assert.False(t, messageTx.tx.Validate(true))
}
func TestMessageTransaction_ValidateCustom(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := ""
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
// Transaction must be nil, as message length is 0
assert.Nil(t, messageTx.tx)
}
func TestMessageTransaction_ValidateCustom2(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := make([]byte, 80)
for i := 0; i < len(message); i++ {
message[i] = 0
}
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(string(message), fee, xmssPK, nil)
// Transaction must not be nil, as the message size is still within limit
assert.NotNil(t, messageTx.tx)
}
func TestMessageTransaction_ValidateCustom3(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := make([]byte, 81)
for i := 0; i < len(message); i++ {
message[i] = 0
}
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(string(message), fee, xmssPK, nil)
// Transaction must be nil, as message length is more than the threshold
assert.Nil(t, messageTx.tx)
}
func TestMessageTransaction_ValidateExtended(t *testing.T) {
xmss := helper.GetAliceXMSS(6)
message := "hello"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
addrFromState := addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss.Address()))
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
// Since balance is 0, validation should fail as required fee is 1
assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState))
// Added balance
addrFromState.AddBalance(1)
assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState))
}
func TestMessageTransaction_ValidateExtended2(t *testing.T) {
xmss := helper.GetAliceXMSS(6) // Master XMSS
masterAddress := misc.UCharVectorToBytes(xmss.Address())
xmss2 := helper.GetBobXMSS(6) // Slave XMSS
message := "hello"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss2.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, masterAddress)
assert.NotNil(t, messageTx.tx)
addrFromState := addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss.Address()))
addrFromState.AddBalance(1)
addrFromPKState := addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss2.Address()))
messageTx.tx.Sign(xmss2, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
// Slave is not registered, validation must fail
assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromPKState))
addrFromState.AddSlavePKSAccessType(misc.UCharVectorToBytes(xmss2.PK()), 0)
assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromPKState))
}
func TestMessageTransaction_ValidateExtended3(t *testing.T) {
/*
Test for signing a message transaction via slave with an used ots key
*/
xmss := helper.GetAliceXMSS(6) // Master XMSS
masterAddress := misc.UCharVectorToBytes(xmss.Address())
xmss2 := helper.GetBobXMSS(6) // Slave XMSS
message := "hello"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss2.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, masterAddress)
assert.NotNil(t, messageTx.tx)
addrFromState := addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss.Address()))
addrFromState.AddBalance(1)
addrFromState.AddSlavePKSAccessType(misc.UCharVectorToBytes(xmss2.PK()), 0) // Adding slave
addrFromPKState := addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss2.Address()))
messageTx.tx.Sign(xmss2, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromPKState))
addrFromPKState.SetOTSKey(0) // Marked ots key 0 as used
// Signed by an used ots key, validation must fail
assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromPKState))
xmss.SetOTSIndex(10)
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromPKState))
addrFromPKState.SetOTSKey(10) // Marked ots key 10 as used
// Signed by an used ots key, validation must fail
assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromPKState))
}
func TestMessageTransaction_ValidateExtended4(t *testing.T) {
/*
Test for signing a message transaction without slave with an used ots key
*/
xmss := helper.GetAliceXMSS(6) // Master XMSS
message := "hello"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
addrFromState := addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss.Address()))
addrFromState.AddBalance(1)
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState))
addrFromState.SetOTSKey(0) // Marked ots key 0 as used
// Signed by an used ots key, validation must fail
assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState))
xmss.SetOTSIndex(10)
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState))
addrFromState.SetOTSKey(10) // Marked ots key 10 as used
// Signed by an used ots key, validation must fail
assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState))
}
func TestMessageTransaction_ApplyStateChanges(t *testing.T) {
xmss := helper.GetAliceXMSS(6) // Master XMSS
message := "hello"
fee := uint64(1)
initialBalance := uint64(10000000)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
addressesState := make(map[string]*addressstate.AddressState)
messageTx.tx.SetAffectedAddress(addressesState)
assert.Len(t, addressesState, 1)
addressesState[xmss.QAddress()] = addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss.Address()))
// Initializing balance
addressesState[xmss.QAddress()].PBData().Balance = initialBalance
messageTx.tx.ApplyStateChanges(addressesState)
assert.Equal(t, addressesState[xmss.QAddress()].Balance(), initialBalance-fee)
}
func TestMessageTransaction_RevertStateChanges(t *testing.T) {
xmss := helper.GetAliceXMSS(6) // Master XMSS
message := "hello"
fee := uint64(1)
initialBalance := uint64(10000000)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes()))
addressesState := make(map[string]*addressstate.AddressState)
messageTx.tx.SetAffectedAddress(addressesState)
assert.Len(t, addressesState, 1)
addressesState[xmss.QAddress()] = addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss.Address()))
// Initializing balance
addressesState[xmss.QAddress()].PBData().Balance = initialBalance
messageTx.tx.ApplyStateChanges(addressesState)
assert.Equal(t, addressesState[xmss.QAddress()].Balance(), initialBalance-fee)
messageTx.tx.RevertStateChanges(addressesState)
assert.Equal(t, addressesState[xmss.QAddress()].Balance(), initialBalance)
}
func TestMessageTransaction_SetAffectedAddress(t *testing.T) {
xmss := helper.GetAliceXMSS(6) // Master XMSS
message := "hello"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
addressesState := make(map[string]*addressstate.AddressState)
messageTx.tx.SetAffectedAddress(addressesState)
assert.Len(t, addressesState, 1)
assert.Contains(t, addressesState, xmss.QAddress())
}
func TestMessageTransaction_FromPBData(t *testing.T) {
xmss := helper.GetAliceXMSS(6) // Master XMSS
message := "hello"
fee := uint64(1)
xmssPK := misc.UCharVectorToBytes(xmss.PK())
messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil)
assert.NotNil(t, messageTx.tx)
pbdata := messageTx.tx.PBData()
tx2 := MessageTransaction{}
tx2.FromPBdata(*pbdata)
assert.Equal(t, pbdata, tx2.PBData())
// Test to ensure, FromPBData doesnt use reference object to initialize tx.data
messageTx.tx.PBData().Fee = 10
assert.Equal(t, messageTx.tx.Fee(), uint64(10))
assert.NotEqual(t, tx2.Fee(), messageTx.tx.Fee())
// A random Message Txn
tx3 := MessageTransaction{}
assert.NotEqual(t, pbdata, tx3.PBData())
}
| TestMessageTransaction_Validate |
annotation.d.ts | import { IPdfWrapper } from './../../interfaces/i-pdf-wrapper';
import { IPdfPrimitive } from './../../interfaces/i-pdf-primitives';
import { PdfColor } from './../graphics/pdf-color';
import { RectangleF } from './../drawing/pdf-drawing';
import { PdfPage } from './../pages/pdf-page';
import { PdfPageBase } from './../pages/pdf-page-base';
import { PdfDictionary } from './../primitives/pdf-dictionary';
import { DictionaryProperties } from './../input-output/pdf-dictionary-properties';
import { PdfBrush } from './../graphics/brushes/pdf-brush';
import { PdfFont } from './../graphics/fonts/pdf-font';
import { PdfStringFormat } from './../graphics/fonts/pdf-string-format';
/**
* `PdfAnnotation` class represents the base class for annotation objects.
* @private
*/
export declare abstract class PdfAnnotation implements IPdfWrapper {
/**
* Specifies the Internal variable to store fields of `PdfDictionaryProperties`.
* @private
*/
protected dictionaryProperties: DictionaryProperties;
/**
* `Color` of the annotation
* @private
*/
private pdfColor;
/**
* `Bounds` of the annotation.
* @private
*/
private rectangle;
/**
* Parent `page` of the annotation.
* @private
*/
private pdfPage;
/**
* `Brush of the text` of the annotation.
* @private
*/
private textBrush;
/**
* `Font of the text` of the annotation.
* @private
*/
private textFont;
/**
* `StringFormat of the text` of the annotation.
* @private
*/
private format;
/**
* `Text` of the annotation.
* @private
*/
private content;
/**
* Internal variable to store `dictionary`.
* @private
*/
private pdfDictionary;
/**
* To specifying the `Inner color` with which to fill the annotation
* @private
*/
private internalColor;
/**
* `opacity or darkness` of the annotation.
* @private
*/
private darkness;
/**
* `Color` of the annotation
* @private
*/
color: PdfColor;
/**
* To specifying the `Inner color` with which to fill the annotation
* @private
*/ | * @private
*/
bounds: RectangleF;
/**
* Parent `page` of the annotation.
* @private
*/
readonly page: PdfPage;
/**
* To specifying the `Font of the text` in the annotation.
* @private
*/
font: PdfFont;
/**
* To specifying the `StringFormat of the text` in the annotation.
* @private
*/
stringFormat: PdfStringFormat;
/**
* To specifying the `Brush of the text` in the annotation.
* @private
*/
brush: PdfBrush;
/**
* `Text` of the annotation.
* @private
*/
text: string;
/**
* Internal variable to store `dictionary`.
*/
dictionary: PdfDictionary;
/**
* Object initialization for `Annotation` class
* @private
*/
constructor();
constructor(bounds: RectangleF);
/**
* `Initialize` the annotation event handler and specifies the type of the annotation.
* @private
*/
protected initialize(): void;
/**
* Sets related `page` of the annotation.
* @private
*/
setPage(page: PdfPageBase): void;
/**
* Handles the `BeginSave` event of the Dictionary.
* @private
*/
beginSave(): void;
/**
* `Saves` an annotation.
* @private
*/
protected save(): void;
/**
* Gets the `element`.
* @private
*/
readonly element: IPdfPrimitive;
} | innerColor: PdfColor;
/**
* `bounds` of the annotation. |
struct.go | package disgord
import (
"encoding/json"
"fmt"
"time"
)
// common functionality/types used by struct_*.go files goes here
// Copier holds the CopyOverTo method which copies all it's content from one
// struct to another. Note that this requires a deep copy.
// useful when overwriting already existing content in the cache to reduce GC.
type Copier interface {
CopyOverTo(other interface{}) error
}
func | (message string) *ErrorUnsupportedType {
return &ErrorUnsupportedType{
info: message,
}
}
type ErrorUnsupportedType struct {
info string
}
func (eut *ErrorUnsupportedType) Error() string {
return eut.info
}
// DiscordUpdater holds the Update method for updating any given Discord struct
// (fetch the latest content). If you only want to keep up to date with the
// cache use the UpdateFromCache method.
// TODO: change param type for UpdateFromCache once caching is implemented
//type DiscordUpdater interface {
// Update(session Session)
// UpdateFromCache(session Session)
//}
// DiscordSaver holds the SaveToDiscord method for sending changes to the
// Discord API over REST.
// If you change any of the values and want to notify Discord about your change,
// use the Save method to send a REST request (assuming that the struct values
// can be updated).
//
// NOTE! if the struct has an snowflake/ID, it will update content. But if the
// snowflake is missing/not set, it will create content (if possible,
// otherwise you will get an error)
type discordSaver interface {
saveToDiscord(session Session) error
}
// DiscordDeleter holds the DeleteFromDiscord method which deletes a given
// object from the Discord servers.
type discordDeleter interface {
deleteFromDiscord(session Session) error
}
// DeepCopier holds the DeepCopy method which creates and returns a deep copy of
// any struct.
type DeepCopier interface {
DeepCopy() interface{}
}
// Discord types
// helperTypes: timestamp, levels, etc.
// discordTimeFormat to be able to correctly convert timestamps back into json,
// we need the micro timestamp with an addition at the ending.
// time.RFC3331 does not yield an output similar to the discord timestamp input, the date is however correct.
const timestampFormat = "2006-01-02T15:04:05.000000+00:00"
type Marshaler interface {
MarshalJSON() ([]byte, error)
}
type Unmarshaler interface {
UnmarshalJSON(data []byte) error
}
type Timestamp time.Time
// error: https://stackoverflow.com/questions/28464711/go-strange-json-hyphen-unmarshall-error
func (t Timestamp) MarshalJSON() ([]byte, error) {
// wrap in double qoutes for valid json parsing
jsonReady := fmt.Sprintf("\"%s\"", t.String())
return []byte(jsonReady), nil
}
func (t *Timestamp) UnmarshalJSON(data []byte) error {
var ts time.Time
err := json.Unmarshal(data, &ts)
if err != nil {
return err
}
*t = Timestamp(ts)
return nil
}
// String converts the timestamp into a discord formatted timestamp. time.RFC3331 does not suffice
func (t Timestamp) String() string {
return t.Time().Format(timestampFormat)
}
// Time converts the DiscordTimestamp into a time.Time type.......
func (t Timestamp) Time() time.Time {
return time.Time(t)
}
// -----------
// levels
// ExplicitContentFilterLvl ...
// https://discordapp.com/developers/docs/resources/guild#guild-object-explicit-content-filter-level
type ExplicitContentFilterLvl uint
func (ecfl *ExplicitContentFilterLvl) Disabled() bool {
return *ecfl == 0
}
func (ecfl *ExplicitContentFilterLvl) MembersWithoutRoles() bool {
return *ecfl == 1
}
func (ecfl *ExplicitContentFilterLvl) AllMembers() bool {
return *ecfl == 2
}
// MFA ...
// https://discordapp.com/developers/docs/resources/guild#guild-object-mfa-level
type MFALvl uint
func (mfal *MFALvl) None() bool {
return *mfal == 0
}
func (mfal *MFALvl) Elevated() bool {
return *mfal == 1
}
// Verification ...
// https://discordapp.com/developers/docs/resources/guild#guild-object-verification-level
type VerificationLvl uint
// None unrestricted
func (vl *VerificationLvl) None() bool {
return *vl == 0
}
// Low must have verified email on account
func (vl *VerificationLvl) Low() bool {
return *vl == 1
}
// Medium must be registered on Discord for longer than 5 minutes
func (vl *VerificationLvl) Medium() bool {
return *vl == 2
}
// High (╯°□°)╯︵ ┻━┻ - must be a member of the server for longer than 10 minutes
func (vl *VerificationLvl) High() bool {
return *vl == 3
}
// VeryHigh ┻━┻ミヽ(ಠ益ಠ)ノ彡┻━┻ - must have a verified phone number
func (vl *VerificationLvl) VeryHigh() bool {
return *vl == 4
}
// DefaultMessageNotification ...
// https://discordapp.com/developers/docs/resources/guild#guild-object-default-message-notification-level
type DefaultMessageNotificationLvl uint
func (dmnl *DefaultMessageNotificationLvl) AllMessages() bool {
return *dmnl == 0
}
func (dmnl *DefaultMessageNotificationLvl) OnlyMentions() bool {
return *dmnl == 1
}
func (dmnl *DefaultMessageNotificationLvl) Equals(v uint) bool {
return uint(*dmnl) == v
}
| NewErrorUnsupportedType |
test_kylin_cube.py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from datetime import datetime
from unittest.mock import MagicMock, patch
from airflow.exceptions import AirflowException
from airflow.models import TaskInstance
from airflow.models.dag import DAG
from airflow.providers.apache.kylin.operators.kylin_cube import KylinCubeOperator
from airflow.utils import timezone
DEFAULT_DATE = timezone.datetime(2020, 1, 1)
class TestKylinCubeOperator(unittest.TestCase):
_config = {
'kylin_conn_id': 'kylin_default',
'project': 'learn_kylin',
'cube': 'kylin_sales_cube',
'command': 'build',
'start_time': datetime(2012, 1, 2, 0, 0).strftime("%s") + '000',
'end_time': datetime(2012, 1, 3, 0, 0).strftime("%s") + '000',
}
cube_command = ['fullbuild', 'build', 'merge', 'refresh',
'delete', 'build_streaming', 'merge_streaming', 'refresh_streaming',
'disable', 'enable', 'purge', 'clone', 'drop']
build_response = {"uuid": "c143e0e4-ac5f-434d-acf3-46b0d15e3dc6"}
def setUp(self):
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
self.dag = DAG('test_dag_id', default_args=args)
@patch('airflow.providers.apache.kylin.operators.kylin_cube.KylinHook')
def test_execute(self, mock_hook):
operator = KylinCubeOperator(
task_id='kylin_task',
dag=self.dag,
**self._config
)
hook = MagicMock()
hook.invoke_command = ['fullbuild', 'build', 'merge', 'refresh',
'delete', 'build_streaming', 'merge_streaming', 'refresh_streaming',
'disable', 'enable', 'purge', 'clone', 'drop']
mock_hook.return_value = hook
mock_hook.cube_run.return_value = {}
self.assertIsNotNone(operator)
self.assertEqual(self._config['kylin_conn_id'], operator.kylin_conn_id)
self.assertEqual(self._config['project'], operator.project)
self.assertEqual(self._config['cube'], operator.cube)
self.assertEqual(self._config['command'], operator.command)
self.assertEqual(self._config['start_time'], operator.start_time)
self.assertEqual(self._config['end_time'], operator.end_time)
operator.execute(None)
mock_hook.assert_called_once_with(
kylin_conn_id=self._config['kylin_conn_id'],
project=self._config['project'],
dsn=None
)
mock_hook.return_value.cube_run.assert_called_once_with('kylin_sales_cube',
'build',
end=datetime(2012, 1, 3, 0, 0),
name=None,
offset_end=None,
offset_start=None,
start=datetime(2012, 1, 2, 0, 0))
@patch('airflow.providers.apache.kylin.operators.kylin_cube.KylinHook')
def test_execute_build(self, mock_hook):
operator = KylinCubeOperator(
is_track_job=True,
timeout=5,
interval=1,
task_id='kylin_task',
dag=self.dag,
**self._config
)
hook = MagicMock()
hook.invoke_command = self.cube_command
hook.cube_run.return_value = self.build_response
hook.get_job_status.side_effect = ["RUNNING", "RUNNING", "FINISHED"]
mock_hook.return_value = hook
self.assertEqual(operator.execute(None)['uuid'], "c143e0e4-ac5f-434d-acf3-46b0d15e3dc6")
@patch('airflow.providers.apache.kylin.operators.kylin_cube.KylinHook')
def test_execute_build_status_error(self, mock_hook):
operator = KylinCubeOperator(
is_track_job=True,
timeout=5,
interval=1,
task_id='kylin_task',
dag=self.dag,
**self._config
)
hook = MagicMock()
hook.invoke_command = self.cube_command
hook.cube_run.return_value = self.build_response
hook.get_job_status.return_value = "ERROR"
mock_hook.return_value = hook
self.assertRaises(AirflowException, operator.execute, None)
@patch('airflow.providers.apache.kylin.operators.kylin_cube.KylinHook')
def test_execute_build_time_out_error(self, mock_hook):
operator = KylinCubeOperator(
is_track_job=True,
timeout=5,
interval=1,
task_id='kylin_task', | dag=self.dag,
**self._config
)
hook = MagicMock()
hook.invoke_command = self.cube_command
hook.cube_run.return_value = self.build_response
hook.get_job_status.return_value = "RUNNING"
mock_hook.return_value = hook
self.assertRaises(AirflowException, operator.execute, None)
def test_render_template(self):
operator = KylinCubeOperator(
task_id="kylin_build_1",
kylin_conn_id='kylin_default',
project="{{ params.project }}",
cube="{{ params.cube }}",
command="{{ params.command }}",
start_time="{{ params.start_time }}",
end_time="{{ params.end_time }}",
is_track_job=True,
dag=self.dag,
params={
'project': 'learn_kylin',
'cube': 'kylin_sales_cube',
'command': 'build',
'start_time': '1483200000000',
'end_time': '1483286400000',
},
)
ti = TaskInstance(operator, DEFAULT_DATE)
ti.render_templates()
self.assertEqual('learn_kylin', getattr(operator, 'project'))
self.assertEqual('kylin_sales_cube', getattr(operator, 'cube'))
self.assertEqual('build', getattr(operator, 'command'))
self.assertEqual('1483200000000', getattr(operator, 'start_time'))
self.assertEqual('1483286400000', getattr(operator, 'end_time')) | |
isNil.test.js | expect(isNil(void 0)).to.be.true;
expect(isNil(undefined)).to.be.true;
expect(isNil(false)).to.be.false;
expect(isNil(0)).to.be.false;
expect(isNil([])).to.be.false;
}); | it('basic', function() {
expect(isNil(null)).to.be.true; |
|
assertResultIsNotAValue.ts | import { AssertionFailed } from '../../errors'; |
// eslint-disable-next-line @typescript-eslint/naming-convention
const assertResultIsNotAValue = function <TValue, TError extends Error>(
actual: Result<TValue, TError>
): Result<undefined, AssertionFailed> {
if (!actual.hasValue()) {
return value();
}
return error(new AssertionFailed({
message: 'The result is a value.',
actual: prettyPrint(dispel(actual))
}));
};
export {
assertResultIsNotAValue
}; | import { dispel } from '../../dispel/dispel';
import { prettyPrint } from '../../prettyPrint/typeAware/prettyPrint';
import { error, Result, value } from 'defekt'; |
util.py | import numpy as np
import librosa
# import pdb
import wget
local_config = {
'batch_size': 64,
'load_size': 22050*20,
'phase': 'extract'
}
def get_audio(audio_link):
file_name = audio_link.split('/')[-1]
save_location = "/Users/sanjitjain/projects/soundnet_tf/data/"
wget.download(audio_link, save_location + file_name + ".mp3")
return str(save_location+file_name)
def load_from_link(link, config=local_config):
audio_path = get_audio(link)
audio_path = "/Users/sanjitjain/projects/soundnet_tf/data/tame_impala.mp3"
sound_sample, _ = load_audio(audio_path)
audio = preprocess(sound_sample, config)
return audio
def load_from_list(name_list, config=local_config):
assert len(name_list) == config['batch_size'], \
"The length of name_list({})[{}] is not the same as batch_size[{}]".format(
name_list[0], len(name_list), config['batch_size'])
audios = np.zeros([config['batch_size'], config['load_size'], 1, 1])
for idx, audio_path in enumerate(name_list):
sound_sample, _ = load_audio(audio_path)
audios[idx] = preprocess(sound_sample, config)
return audios
def load_from_txt(txt_name, config=local_config):
with open(txt_name, 'r') as handle:
txt_list = handle.read().splitlines()
audios = []
for idx, audio_path in enumerate(txt_list):
sound_sample, _ = load_audio(audio_path)
audios.append(preprocess(sound_sample, config))
return audios
# NOTE: Load an audio as the same format in soundnet
# 1. Keep original sample rate (which conflicts their own paper)
# 2. Use first channel in multiple channels
# 3. Keep range in [-256, 256]
def load_audio(audio_path, sr=None):
# By default, librosa will resample the signal to 22050Hz(sr=None). And range in (-1., 1.)
sound_sample, sr = librosa.load(audio_path, sr=sr, mono=False)
return sound_sample, sr
def preprocess(raw_audio, config=local_config):
# Select first channel (mono)
if len(raw_audio.shape) > 1:
raw_audio = raw_audio[0]
# Make range [-256, 256]
raw_audio *= 256.0
# Make minimum length available
length = config['load_size']
if length > raw_audio.shape[0]: | if config['phase'] != 'extract':
raw_audio = raw_audio[:length]
# Check conditions
assert len(raw_audio.shape) == 1, "It seems this audio contains two channels, we only need the first channel"
assert np.max(raw_audio) <= 256, "It seems this audio contains signal that exceeds 256"
assert np.min(raw_audio) >= -256, "It seems this audio contains signal that exceeds -256"
# Shape to 1 x DIM x 1 x 1
raw_audio = np.reshape(raw_audio, [1, -1, 1, 1])
return raw_audio.copy() | raw_audio = np.tile(raw_audio, length/raw_audio.shape[0] + 1)
# Make equal training length |
describe_gtm_logs.go | package alidns
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// DescribeGtmLogs invokes the alidns.DescribeGtmLogs API synchronously
func (client *Client) DescribeGtmLogs(request *DescribeGtmLogsRequest) (response *DescribeGtmLogsResponse, err error) {
response = CreateDescribeGtmLogsResponse()
err = client.DoAction(request, response)
return
}
// DescribeGtmLogsWithChan invokes the alidns.DescribeGtmLogs API asynchronously
func (client *Client) DescribeGtmLogsWithChan(request *DescribeGtmLogsRequest) (<-chan *DescribeGtmLogsResponse, <-chan error) {
responseChan := make(chan *DescribeGtmLogsResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.DescribeGtmLogs(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// DescribeGtmLogsWithCallback invokes the alidns.DescribeGtmLogs API asynchronously
func (client *Client) DescribeGtmLogsWithCallback(request *DescribeGtmLogsRequest, callback func(response *DescribeGtmLogsResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *DescribeGtmLogsResponse
var err error
defer close(result)
response, err = client.DescribeGtmLogs(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// DescribeGtmLogsRequest is the request struct for api DescribeGtmLogs
type DescribeGtmLogsRequest struct {
*requests.RpcRequest
StartTimestamp requests.Integer `position:"Query" name:"StartTimestamp"`
PageNumber requests.Integer `position:"Query" name:"PageNumber"`
EndTimestamp requests.Integer `position:"Query" name:"EndTimestamp"`
InstanceId string `position:"Query" name:"InstanceId"`
UserClientIp string `position:"Query" name:"UserClientIp"`
PageSize requests.Integer `position:"Query" name:"PageSize"`
Lang string `position:"Query" name:"Lang"`
Keyword string `position:"Query" name:"Keyword"`
}
// DescribeGtmLogsResponse is the response struct for api DescribeGtmLogs
type DescribeGtmLogsResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
TotalItems int `json:"TotalItems" xml:"TotalItems"`
TotalPages int `json:"TotalPages" xml:"TotalPages"`
PageSize int `json:"PageSize" xml:"PageSize"`
PageNumber int `json:"PageNumber" xml:"PageNumber"`
Logs Logs `json:"Logs" xml:"Logs"`
}
// CreateDescribeGtmLogsRequest creates a request to invoke DescribeGtmLogs API
func CreateDescribeGtmLogsRequest() (request *DescribeGtmLogsRequest) {
request = &DescribeGtmLogsRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("Alidns", "2015-01-09", "DescribeGtmLogs", "alidns", "openAPI")
request.Method = requests.POST
return
}
// CreateDescribeGtmLogsResponse creates a response to parse from DescribeGtmLogs response
func | () (response *DescribeGtmLogsResponse) {
response = &DescribeGtmLogsResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
| CreateDescribeGtmLogsResponse |
chaincode.go | package chaincode
import ( | "github.com/spf13/cobra"
"io"
)
func NewChaincodeCmd(stdOut io.Writer, stdErr io.Writer) *cobra.Command {
consortiumCmd := &cobra.Command{
Use: "chaincode",
}
consortiumCmd.AddCommand(newChaincodeInstallCMD(stdOut, stdErr))
consortiumCmd.AddCommand(newChaincodeQueryInstalledCMD(stdOut, stdErr))
consortiumCmd.AddCommand(newChaincodeApproveCMD(stdOut, stdErr))
consortiumCmd.AddCommand(newChaincodeCommitCMD(stdOut, stdErr))
consortiumCmd.AddCommand(newQueryChaincodeCMD(stdOut, stdErr))
consortiumCmd.AddCommand(newInvokeChaincodeCMD(stdOut, stdErr))
return consortiumCmd
} | |
changelog.go | /*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package changelog
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/blang/semver"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"k8s.io/release/pkg/notes/options"
"sigs.k8s.io/release-sdk/git"
"sigs.k8s.io/release-utils/util"
)
// Options are the main settings for generating the changelog.
type Options struct {
RepoPath string
Tag string
Branch string
Bucket string
Tars string
Images string
HTMLFile string
JSONFile string
RecordDir string
ReplayDir string
CVEDataDir string
CloneCVEMaps bool
Dependencies bool
}
// Changelog can be used to generate the changelog for a release.
type Changelog struct {
options *Options
impl
}
// New creates a new Changelog instance.
func New(opts *Options) *Changelog {
return &Changelog{
options: opts,
impl: &defaultImpl{},
}
}
// SetImpl can be used to set the internal implementation.
func (c *Changelog) SetImpl(impl impl) {
c.impl = impl
}
// Run starts the changelog generation.
func (c *Changelog) Run() error {
tag, err := c.impl.TagStringToSemver(c.options.Tag)
if err != nil {
return errors.Wrapf(err, "parse tag %s", c.options.Tag)
}
// Automatically set the branch to a release branch if not provided
branch := c.options.Branch
if branch == "" {
branch = fmt.Sprintf("release-%d.%d", tag.Major, tag.Minor)
}
logrus.Infof("Using release branch %s", branch)
logrus.Infof("Using local repository path %s", c.options.RepoPath)
repo, err := c.impl.OpenRepo(c.options.RepoPath)
if err != nil {
return errors.Wrapf(err,
"open expected k/k repository %q", c.options.RepoPath,
)
}
if currentBranch, err := c.impl.CurrentBranch(repo); err == nil {
logrus.Infof("We're currently on branch: %s", currentBranch)
}
remoteBranch := git.Remotify(branch)
head, err := c.impl.RevParseTag(repo, remoteBranch)
if err != nil {
return errors.Wrap(err, "get latest branch commit")
}
logrus.Infof("Found latest %s commit %s", remoteBranch, head)
var markdown, jsonStr, startRev, endRev string
if tag.Patch == 0 {
if len(tag.Pre) == 0 { // nolint:gocritic // a switch case would not make it better
// Still create the downloads table
downloadsTable := &bytes.Buffer{}
startTag := util.SemverToTagString(semver.Version{
Major: tag.Major, Minor: tag.Minor - 1, Patch: 0,
})
startRev = startTag
endRev = head
if err := c.impl.CreateDownloadsTable(
downloadsTable, c.options.Bucket, c.options.Tars,
c.options.Images, startRev, c.options.Tag,
); err != nil {
return errors.Wrapf(err, "create downloads table")
}
// New final minor versions should have remote release notes
markdown, jsonStr, err = c.lookupRemoteReleaseNotes(branch)
markdown = downloadsTable.String() + markdown
} else if tag.Pre[0].String() == "alpha" && tag.Pre[1].VersionNum == 1 {
// v1.x.0-alpha.1 releases use the previous minor as start commit.
// Those are usually the first releases being cut on master after
// the previous final has been released.
startRev = util.SemverToTagString(semver.Version{
Major: tag.Major, Minor: tag.Minor - 1, Patch: 0,
})
logrus.Infof("Using previous minor %s as start tag", startRev)
// The end tag does not yet exist which means that we stick to
// the current HEAD as end revision.
endRev = head
markdown, jsonStr, err = c.generateReleaseNotes(branch, startRev, endRev)
} else {
// New minor alpha, beta and rc releases get generated notes
latestTags, tErr := c.impl.LatestGitHubTagsPerBranch()
if tErr != nil {
return errors.Wrap(tErr, "get latest GitHub tags")
}
if startTag, ok := latestTags[branch]; ok {
logrus.Infof("Found start tag %s", startTag)
// The end tag does not yet exist which means that we stick to
// the current HEAD as end revision.
startRev = startTag
endRev = head
markdown, jsonStr, err = c.generateReleaseNotes(branch, startRev, endRev)
} else {
return errors.Errorf(
"no latest tag available for branch %s", branch,
)
}
}
} else {
if c.options.CloneCVEMaps {
cveDir, err := c.impl.CloneCVEData()
if err != nil {
return errors.Wrap(err, "getting cve data maps")
}
c.options.CVEDataDir = cveDir
}
// A patch version, let’s just use the previous patch
startTag := util.SemverToTagString(semver.Version{
Major: tag.Major, Minor: tag.Minor, Patch: tag.Patch - 1,
})
startRev = startTag
endRev = head
markdown, jsonStr, err = c.generateReleaseNotes(branch, startTag, endRev)
}
if err != nil {
return errors.Wrap(err, "generate release notes")
}
if c.options.Dependencies {
logrus.Info("Generating dependency changes")
deps, err := c.impl.DependencyChanges(startRev, endRev)
if err != nil {
return errors.Wrap(err, "generate dependency changes")
}
markdown += strings.Repeat(nl, 2) + deps
}
logrus.Info("Generating TOC")
toc, err := c.impl.GenerateTOC(markdown)
if err != nil {
return errors.Wrap(err, "generate table of contents")
}
// Restore the currently checked out branch
currentBranch, err := c.impl.CurrentBranch(repo)
if err != nil {
return errors.Wrap(err, "get current branch")
}
if currentBranch != "" {
defer func() {
if err := c.impl.Checkout(repo, currentBranch); err != nil {
logrus.Errorf("Restore branch %s: %v", currentBranch, err)
}
}()
}
logrus.Infof("Checking out %s branch", git.DefaultBranch)
if err := c.impl.Checkout(repo, git.DefaultBranch); err != nil {
return errors.Wrapf(err, "checkout %s branch", git.DefaultBranch)
}
logrus.Info("Writing markdown")
if err := c.writeMarkdown(repo, toc, markdown, tag); err != nil {
return errors.Wrap(err, "write markdown")
}
logrus.Info("Writing HTML")
if err := c.writeHTML(tag, markdown); err != nil {
return errors.Wrap(err, "write HTML")
}
logrus.Info("Writing JSON")
if err := c.writeJSON(tag, jsonStr); err != nil {
return errors.Wrap(err, "write JSON")
}
logrus.Info("Committing changes")
return errors.Wrap(
c.commitChanges(repo, branch, tag),
"commit changes",
)
}
func (c *Changelog) generateReleaseNotes(
branch, startRev, endRev string,
) (markdown, jsonStr string, err error) {
logrus.Info("Generating release notes")
notesOptions := options.New()
notesOptions.Branch = branch
notesOptions.StartRev = startRev
notesOptions.EndSHA = endRev
notesOptions.RepoPath = c.options.RepoPath
notesOptions.ReleaseBucket = c.options.Bucket
notesOptions.ReleaseTars = c.options.Tars
notesOptions.Debug = logrus.StandardLogger().Level >= logrus.DebugLevel
notesOptions.RecordDir = c.options.RecordDir
notesOptions.ReplayDir = c.options.ReplayDir
notesOptions.Pull = false
notesOptions.AddMarkdownLinks = true
if c.options.CVEDataDir != "" {
notesOptions.MapProviderStrings = append(
notesOptions.MapProviderStrings, c.options.CVEDataDir,
)
}
if err := c.impl.ValidateAndFinish(notesOptions); err != nil {
return "", "", errors.Wrap(err, "validating notes options")
}
releaseNotes, err := c.impl.GatherReleaseNotes(notesOptions)
if err != nil {
return "", "", errors.Wrapf(err, "gather release notes")
}
doc, err := c.impl.NewDocument(releaseNotes, startRev, c.options.Tag)
if err != nil {
return "", "", errors.Wrapf(err, "create release note document")
}
releaseNotesJSON, err := json.MarshalIndent(releaseNotes.ByPR(), "", " ")
if err != nil {
return "", "", errors.Wrapf(err, "build release notes JSON")
}
markdown, err = c.impl.RenderMarkdownTemplate(
doc, c.options.Bucket, c.options.Tars, c.options.Images,
options.GoTemplateInline+releaseNotesTemplate,
)
if err != nil {
return "", "", errors.Wrapf(err, "render release notes to markdown")
}
return markdown, string(releaseNotesJSON), nil
}
func (c *Changelog) writeMarkdown(
repo *git.Repo, toc, markdown string, tag semver.Version,
) error {
changelogPath := filepath.Join(
c.impl.RepoDir(repo),
markdownChangelogFilename(tag),
)
writeFile := func(t, m string) error {
return c.impl.WriteFile(
changelogPath,
[]byte(addTocMarkers(t)+"\n"+strings.TrimSpace(m)),
os.FileMode(0o644),
)
}
// No changelog exists, simply write the content to a new one
if _, err := c.impl.Stat(changelogPath); os.IsNotExist(err) {
logrus.Infof("Changelog %q does not exist, creating it", changelogPath)
if err := c.adaptChangelogReadmeFile(repo, tag); err != nil {
return errors.Wrap(err, "adapt changelog readme")
}
return writeFile(toc, markdown)
}
// Changelog seems to exist, prepend the notes and re-generate the TOC
logrus.Infof("Adding new content to changelog file %s ", changelogPath)
content, err := c.impl.ReadFile(changelogPath)
if err != nil {
return errors.Wrap(err, "read changelog file")
}
tocEndIndex := bytes.Index(content, []byte(TocEnd))
if tocEndIndex < 0 {
return errors.Errorf(
"find table of contents end marker `%s` in %q",
TocEnd, changelogPath,
)
}
mergedMarkdown := fmt.Sprintf(
"%s\n%s", markdown, string(content[(len(TocEnd)+tocEndIndex):]),
)
mergedTOC, err := c.impl.GenerateTOC(mergedMarkdown)
if err != nil {
return errors.Wrap(err, "generate table of contents")
}
return errors.Wrap(
writeFile(mergedTOC, mergedMarkdown),
"write merged markdown",
)
}
func (c *Changelog) htmlChangelogFilename(tag semver.Version) string {
if c.options.HTMLFile != "" {
return c.options.HTMLFile
}
return changelogFilename(tag, "html")
}
func (c *Changelog) jsonChangelogFilename(tag semver.Version) string {
if c.options.JSONFile != "" {
return c.options.JSONFile
}
return changelogFilename(tag, "json")
}
func markdownChangelogReadme() string {
return filepath.Join(RepoChangelogDir, "README.md")
}
func markdownChangelogFilename(tag semver.Version) string {
return filepath.Join(RepoChangelogDir, changelogFilename(tag, "md"))
}
func changelogFilename(tag semver.Version, ext string) string {
| func addTocMarkers(toc string) string {
return fmt.Sprintf("%s\n\n%s\n%s\n", tocStart, toc, TocEnd)
}
func (c *Changelog) writeHTML(tag semver.Version, markdown string) error {
content := &bytes.Buffer{}
if err := c.impl.MarkdownToHTML(markdown, content); err != nil {
return errors.Wrap(err, "render HTML from markdown")
}
t, err := c.impl.ParseHTMLTemplate(htmlTemplate)
if err != nil {
return errors.Wrap(err, "parse HTML template")
}
output := bytes.Buffer{}
if err := c.impl.TemplateExecute(t, &output, struct {
Title, Content string
}{util.SemverToTagString(tag), content.String()}); err != nil {
return errors.Wrap(err, "execute HTML template")
}
absOutputPath, err := c.impl.Abs(c.htmlChangelogFilename(tag))
if err != nil {
return errors.Wrap(err, "get absolute file path")
}
logrus.Infof("Writing HTML file to %s", absOutputPath)
return errors.Wrap(
c.impl.WriteFile(absOutputPath, output.Bytes(), os.FileMode(0o644)),
"write template",
)
}
func (c *Changelog) writeJSON(tag semver.Version, jsonStr string) error {
absOutputPath, err := c.impl.Abs(c.jsonChangelogFilename(tag))
if err != nil {
return errors.Wrap(err, "get absolute file path")
}
logrus.Infof("Writing JSON file to %s", absOutputPath)
return errors.Wrap(
c.impl.WriteFile(absOutputPath, []byte(jsonStr), os.FileMode(0o644)),
"write JSON",
)
}
func (c *Changelog) lookupRemoteReleaseNotes(
branch string,
) (markdownStr, jsonStr string, err error) {
logrus.Info("Assuming new minor release, fetching remote release notes")
remoteBase := fmt.Sprintf(
"https://raw.githubusercontent.com/kubernetes/sig-release/%s/"+
"releases/%s/release-notes/", git.DefaultBranch, branch,
)
// Retrieve the markdown version
remoteMarkdown := remoteBase + "release-notes-draft.md"
markdownStr, err = c.impl.GetURLResponse(remoteMarkdown)
if err != nil {
return "", "", errors.Wrapf(err,
"fetch release notes markdown from remote: %s", remoteMarkdown,
)
}
logrus.Infof("Found remote release notes markdown on: %s", remoteMarkdown)
// Retrieve the JSON version
remoteJSON := remoteBase + "release-notes-draft.json"
jsonStr, err = c.impl.GetURLResponse(remoteJSON)
if err != nil {
logrus.Warnf(
"Unable to fetch release notes JSON from remote %s: %v",
remoteJSON, err,
)
// Fallback in case we're not able to retrieve a JSON draft.
jsonStr = "{}"
}
logrus.Infof("Found remote release notes JSON on: %s", remoteJSON)
return markdownStr, jsonStr, nil
}
func (c *Changelog) commitChanges(
repo *git.Repo, branch string, tag semver.Version,
) error {
// main branch modifications
releaseChangelog := markdownChangelogFilename(tag)
changelogReadme := markdownChangelogReadme()
changelogFiles := []string{
releaseChangelog,
changelogReadme,
}
for _, filename := range changelogFiles {
logrus.Infof("Adding %s to repository", filename)
if err := c.impl.Add(repo, filename); err != nil {
return errors.Wrapf(err, "add file %s to repository", filename)
}
}
logrus.Info("Committing changes to main branch in repository")
if err := c.impl.Commit(repo, fmt.Sprintf(
"CHANGELOG: Update directory for %s release", util.SemverToTagString(tag),
)); err != nil {
return errors.Wrap(err, "committing changes into repository")
}
if branch != git.DefaultBranch {
logrus.Infof("Checking out %s branch", branch)
// Release branch modifications
if err := c.impl.Checkout(repo, branch); err != nil {
return errors.Wrapf(err, "checking out release branch %s", branch)
}
// Remove all other changelog files if we’re on the the first official release
if tag.Patch == 0 && len(tag.Pre) == 0 {
pattern := filepath.Join(RepoChangelogDir, "CHANGELOG-*.md")
logrus.Infof("Removing unnecessary %s files", pattern)
if err := c.impl.Rm(repo, true, pattern); err != nil {
return errors.Wrapf(err, "removing %s files", pattern)
}
}
logrus.Info("Checking out changelog from main branch")
if err := c.impl.Checkout(
repo, git.DefaultBranch, releaseChangelog,
); err != nil {
return errors.Wrap(err, "check out main branch changelog")
}
logrus.Info("Committing changes to release branch in repository")
if err := c.impl.Commit(repo, fmt.Sprintf(
"Update %s for %s", releaseChangelog, util.SemverToTagString(tag),
)); err != nil {
return errors.Wrap(err, "committing changes into repository")
}
}
return nil
}
func (c *Changelog) adaptChangelogReadmeFile(
repo *git.Repo, tag semver.Version,
) error {
targetFile := filepath.Join(repo.Dir(), RepoChangelogDir, "README.md")
readme, err := c.impl.ReadFile(targetFile)
if err != nil {
return errors.Wrap(err, "read changelog README.md")
}
cf := filepath.Base(markdownChangelogFilename(tag))
const listPrefix = "- "
changelogEntry := fmt.Sprintf("%s[%s](./%s)", listPrefix, cf, cf)
scanner := bufio.NewScanner(bytes.NewReader(readme))
res := []string{}
inserted := false
for scanner.Scan() {
line := scanner.Text()
if !inserted && strings.HasPrefix(line, listPrefix) {
res = append(res, changelogEntry)
inserted = true
}
res = append(res, line)
}
if err := c.impl.WriteFile(
targetFile, []byte(strings.Join(res, nl)+nl), os.FileMode(0o644)); err != nil {
return errors.Wrap(err, "write changelog README.md")
}
return nil
}
| return fmt.Sprintf("CHANGELOG-%d.%d.%s", tag.Major, tag.Minor, ext)
}
|
mixins.py | import abc
from typing import Callable
from typing import Iterator
from typing import List
from typing import Optional
from xsdata.codegen.models import Class
from xsdata.models.config import GeneratorConfig
from xsdata.utils.constants import return_true
class ContainerInterface(metaclass=abc.ABCMeta):
"""Wrap a list of classes and expose a simple api for easy access and
process."""
config: GeneratorConfig
@abc.abstractmethod
def iterate(self) -> Iterator[Class]:
"""Create an iterator for the class map values."""
@abc.abstractmethod
def find(self, qname: str, condition: Callable = return_true) -> Optional[Class]:
"""Search by qualified name for a specific class with an optional
condition callable."""
@abc.abstractmethod | @abc.abstractmethod
def add(self, item: Class):
"""Add class item to the container."""
@abc.abstractmethod
def extend(self, items: List[Class]):
"""Add a list of classes the container."""
class HandlerInterface(metaclass=abc.ABCMeta):
"""Class handler interface."""
@abc.abstractmethod
def process(self, target: Class):
"""Process the given target class.""" | def find_inner(self, source: Class, qname: str) -> Class:
"""Search by qualified name for a specific inner class or fail."""
|
TestOperationalLayerEnsembleVissim.py | import os
import sys
import ctypes
import platform
import os
import numpy as np
from random import gauss
import win32com.client as com
def get_acceleration(lead_veh_acceleration,lead_veh_id,lead_veh_rel_velocity,lead_veh_type,timestep,
veh_id,veh_setspeed,veh_type,veh_used_distance_headway,veh_used_rel_vel,veh_velocity,
veh_distance_headway,prev_veh_executed_acceleration,
prev_veh_cc_setpoint,prev_veh_cruisecontrol_acceleration):
|
Vissim = com.gencache.EnsureDispatch("Vissim.Vissim")
GlosaNetworkPath='D:\\Projects\\ENSEMBLE\\Vissim_networks\\Pipeline'#'L:\\UserData\\Kingsley\\Ensemble'
#'L:\\UserData\\Kingsley\\SafeDriving'
#'L:\\UserData\\Kingsley\\Ensemble'#'C:\\Users\\Public\\Documents\\GLOSA\\GlosaTrafficLight'
Filename= os.path.join(GlosaNetworkPath, 'Pipeline.inpx')
#os.path.join(GlosaNetworkPath, 'KnooppuntZonzeelBackup.inpx') #os.path.join(GlosaNetworkPath, 'GlosaTestNetwork2.inpx')
#os.path.join(GlosaNetworkPath, 'TestNetwork.inpx')
flag_read_additionally = False # you can read network(elements) additionally, in this case set "flag_read_additionally" to true
Vissim.LoadNet(Filename, flag_read_additionally)
## Load a Layout:
Filename = os.path.join(GlosaNetworkPath, 'Pipeline.layx')
#os.path.join(GlosaNetworkPath, 'KnooppuntZonzeelBackup.layx')
#os.path.join(GlosaNetworkPath, 'TestNetwork.layx')
#os.path.join(GlosaNetworkPath, 'KnooppuntZonzeelBackup.layx')#os.path.join(GlosaNetworkPath, 'GlosaTestNetwork2.layx')
Vissim.LoadLayout(Filename)
End_of_simulation = 6000 # simulation second [s]
Simulation_Resolution = 10 # simulation second [s]
Number_Runs=4
Simulation_Period=300
Vissim.Simulation.SetAttValue('SimRes', Simulation_Resolution)
Vissim.Simulation.SetAttValue('NumRuns', Number_Runs)
Vissim.Simulation.SetAttValue('SimPeriod', Simulation_Period)
#UDA6
#Vissim.Net.UserDefinedAttributes.AddUserDefinedDataAttribute(6,'Vehicle','vehAcceleration','vehAcceleration',2,0)
#Vissim.Net.UserDefinedAttributes.ItemByKey(6).SetAttValue('DefValue',-1)
#UDA6
Vissim.Net.UserDefinedAttributes.AddUserDefinedDataAttribute(3,'Vehicle','COM_cruise_control_Ac','COM_cruise_control_Ac',2,0)
Vissim.Net.UserDefinedAttributes.ItemByKey(3).SetAttValue('DefValue',-1)
#UDA
Vissim.Net.UserDefinedAttributes.AddUserDefinedDataAttribute(4,'Vehicle','COM_cc_setpoint','COM_cc_setpoint',2,0)
Vissim.Net.UserDefinedAttributes.ItemByKey(4).SetAttValue('DefValue',-1)
def get_leader_info(Vehicle):
lead_veh_id = Vehicle.AttValue('LeadTargNo')
lead_veh_type = Vehicle.AttValue('LeadTargType')
if lead_veh_type == 'VEHICLE' and lead_veh_id != None:
try:
front_vehicle = Vissim.Net.Vehicles.ItemByKey(lead_veh_id)
except:
front_vehicle = -1
else:
front_vehicle=-1
return front_vehicle
#prev_veh_cc_setpoint = np.zeros(number_of_vehicles)
for i in range(6000):
for Vehicle in Vissim.Net.Vehicles.FilteredBy("[VEHTYPE\\NO]=210"):
lead_veh=get_leader_info(Vehicle)
if lead_veh!=-1 and (Vehicle.AttValue('Lane')==lead_veh.AttValue('Lane')):
#simulation info
timestep=Vehicle.AttValue('SimSec')
#Ego Info
print((Vehicle.AttValue('Lane')))
veh_id=Vehicle.AttValue('No')
veh_setspeed=Vehicle.AttValue('DesSpeed')/3.6
veh_type=Vehicle.AttValue('VehType\\No')
veh_used_distance_headway=Vehicle.AttValue('FollowDistNet')
veh_used_rel_vel=(Vehicle.AttValue('Speed')-lead_veh.AttValue('Speed'))/3.6
veh_velocity=Vehicle.AttValue('Speed')/3.6
veh_distance_headway=Vehicle.AttValue('FollowDistNet')
prev_veh_executed_acceleration = Vehicle.AttValue('COM_Ac')
prev_veh_cc_setpoint = Vehicle.AttValue('COM_cc_setpoint')
prev_veh_cruisecontrol_acceleration=Vehicle.AttValue('COM_cruise_control_Ac')
#veh_executed_acceleration=a_prev[veh_id]
#veh_cc_setpoint=prev_veh_cc_setpoint[veh_id]
# Leader Info
lead_veh_acceleration=lead_veh.AttValue('Acceleration')
lead_veh_id=lead_veh.AttValue('No')
lead_veh_rel_velocity=(Vehicle.AttValue('Speed')-lead_veh.AttValue('Speed'))/3.6
lead_veh_type=lead_veh.AttValue('VehType\\No')
curr_veh_executed_acceleration,curr_veh_cc_set_point,curr_veh_cruisecontrol_acceleration=get_acceleration(lead_veh_acceleration,lead_veh_id,lead_veh_rel_velocity,lead_veh_type,timestep,
veh_id,veh_setspeed,veh_type,veh_used_distance_headway,veh_used_rel_vel,veh_velocity,
veh_distance_headway,prev_veh_executed_acceleration,
prev_veh_cc_setpoint,prev_veh_cruisecontrol_acceleration)
# a=call vehicle_model(curr_veh_executed_acceleration)
write_file('file_variables.txt')
Vehicle.SetAttValue('COM_Ac',curr_veh_executed_acceleration)
#Vehicle.SetAttValue('COM_Ac', a)
Vehicle.SetAttValue('COM_At',3)
Vehicle.SetAttValue('COM_cc_setpoint', curr_veh_cc_set_point)
Vehicle.SetAttValue('COM_cruise_control_Ac', curr_veh_cruisecontrol_acceleration)
else:
continue
Vissim.Simulation.RunSingleStep()
| if platform.system() == 'Windows':
print('Running on win')
filepath = "L:\\UserData\\Kingsley\\PythonEnsembleTestBed"
file_dll = os.path.join(filepath, 'OperationalDLL.dll')
#file_dll = './OperationalDLL.dll'
elif platform.system() == 'Darwin':
print('Running on mac')
file_dll = 'OperationalDLL.dylib'
else:
print('System not supported')
sys.exit()
# Load operational DLL
lib = None
try:
lib = ctypes.cdll.LoadLibrary(file_dll)
except:
print('Error: DLL file could not be found')
quit()
# Set input values: Write value's for current vehicle, in current timestep
curr_lead_veh_acceleration = ctypes.c_double(lead_veh_acceleration) #2.0
curr_lead_veh_id = ctypes.c_long(lead_veh_id) #40
curr_lead_veh_rel_velocity = ctypes.c_double(lead_veh_rel_velocity ) #-1.0
curr_lead_veh_type = ctypes.c_long(lead_veh_type) #10
curr_timestep = ctypes.c_double(timestep) #55.0
curr_ts_length = ctypes.c_double(0.1)
curr_veh_id = ctypes.c_long(veh_id) #10
curr_veh_setspeed = ctypes.c_double(veh_setspeed) #88/3.6
curr_veh_type = ctypes.c_long(veh_type) #10
curr_veh_controller_in_use = ctypes.c_long(1) # from tactical layer 1=ACC,2=CACC
curr_veh_ACC_h = ctypes.c_double(1.6)
curr_veh_CACC_h = ctypes.c_double(0.6)
curr_veh_used_distance_headway = ctypes.c_double(veh_used_distance_headway)#20.0
curr_veh_used_rel_vel = ctypes.c_double(veh_used_rel_vel) #-1.0
curr_veh_velocity = ctypes.c_double(veh_velocity) #85/3.6
curr_veh_autonomous_operational_warning = ctypes.c_long(10)
curr_veh_platooning_max_acceleration = ctypes.c_double(2.0)
prev_veh_cc_setpoint = ctypes.c_double(prev_veh_cc_setpoint)
prev_veh_cruisecontrol_acceleration = ctypes.c_double(prev_veh_cruisecontrol_acceleration)
prev_veh_distance_headway = ctypes.c_double(veh_distance_headway) #20.0
prev_veh_executed_acceleration = ctypes.c_double(prev_veh_executed_acceleration) #-2.0
# Define variables for return values: These are placeholders, no action required
veh_autonomous_operational_acceleration = ctypes.c_double(1)
veh_autonomous_operational_mixingmode = ctypes.c_long(1)
veh_autonomous_operational_warning = ctypes.c_double(1)
veh_cc_setpoint = ctypes.c_double(1)
veh_cruisecontrol_acceleration = ctypes.c_double(1)
success = ctypes.c_int(0)
print("Now call the OL itself...")
# Call operational controller
lib.operational_controller(
curr_lead_veh_acceleration,
curr_lead_veh_id,
curr_lead_veh_rel_velocity,
curr_lead_veh_type,
curr_timestep,
curr_ts_length,
curr_veh_id,
curr_veh_setspeed,
curr_veh_type,
curr_veh_controller_in_use,
curr_veh_ACC_h,
curr_veh_CACC_h,
curr_veh_used_distance_headway,
curr_veh_used_rel_vel,
curr_veh_velocity,
curr_veh_autonomous_operational_warning,
curr_veh_platooning_max_acceleration,
prev_veh_cc_setpoint,
prev_veh_cruisecontrol_acceleration,
prev_veh_distance_headway,
prev_veh_executed_acceleration,
ctypes.byref(veh_autonomous_operational_acceleration),
ctypes.byref(veh_autonomous_operational_mixingmode),
ctypes.byref(veh_autonomous_operational_warning),
ctypes.byref(veh_cc_setpoint),
ctypes.byref(veh_cruisecontrol_acceleration),
ctypes.byref(success))
# Print the return values
if success.value > 0:
veh_acceleration=veh_autonomous_operational_acceleration.value
#print(veh_autonomous_operational_mixingmode.value)
#print(veh_autonomous_operational_warning.value)
veh_cc_set_point=veh_cc_setpoint.value
veh_cruise_control_acceleration=veh_cruisecontrol_acceleration.value
else:
veh_acceleration=-999
veh_cc_setpoint=-999
veh_cruise_control_acceleration=-999
print('An error occurred while calling DLL')
return veh_acceleration,veh_cc_setpoint,veh_cruise_control_acceleration |
trainer.py | from matplotlib.pyplot import show
import torch
from torch.autograd import Variable
from torch.cuda.amp import GradScaler, autocast
import numpy as np
from sklearn.metrics import roc_auc_score
from callbacks.cb_handler import CallbackHandler
from callbacks.cb_base import BaseCB
from callbacks.cb_lr_patch_clf import LR_SchedCB_patch
from callbacks.cb_lr_full_clf import LR_SchedCB_full
from callbacks.cb_lr_2views_clf import LR_SchedCB_2views
from callbacks.cb_lr_w_cyc_cos import LR_SchedCB_W_Cyc_Cos
from callbacks.cb_lr_w_cos import LR_SchedCB_W_Cos
from callbacks.cb_auc import AUC_CB
# from parallel import DataParallelModel, DataParallelCriterion
from util.util import show_auc, calc_auc_desv
parallel = False
#APAGAR
import cv2
# Accuracy
def acc(y_hat, labels):
""" Default accuracy """
# para parallel
if len(y_hat) > 1 and parallel:
y_hat = torch.cat(y_hat)
return (torch.argmax(y_hat, dim=1) == labels).float().sum()
class Trainer():
"""
Many possible configurations for Trainer
config = {
'num_epochs': NUM_EPOCHS,
'batch_size': MINI_BATCH,
'name': 'example',
'title': 'Cats & Dogs Classifier',
'save_last': True, # optional: Save last model (default=False)
'save_best': True, # optional: Save best models (ACC, {AUC}) (default=True)
'stable_metric: N # optional: extend epochs number to wait N epochs with no metric change (ex.AUC)
'save_checkpoints': N, # Save checkpoint each N epochs
'features': ['auc'], # optional: features like auc stats or some scheduler (if none default:optim)
'save_path': folder, # if want to save artifacts in other place (eg.cloud)
'show_plots': False, # if want to show plots
'make_plots': False, # if want to disable plots
'cv_k': (number), # interactio number if using Cross Validation
}
"""
def __init__(self, model, train_dataloader, val_dataloader,
loss_criterion, optimizer, optimizer_args,
device, config):
self.model = model
self.device = device
self.loss_criterion = loss_criterion
# parts of config are only retrieved in callbacks
self.epochs = int(config['num_epochs']) if 'num_epochs' in config else 10
self.mini_batch = int(config['batch_size']) if 'batch_size' in config else 1
self.first_epoch = int(config['start_epoch']) if 'start_epoch' in config else 1
self.stable_metric = int(config['stable_metric']) if 'stable_metric' in config else False
self.name = config['name'] if 'name' in config else 'default'
self.title = config['title'] if 'title' in config else 'Classifier'
self.features = config['features'] if 'features' in config else []
self.make_plots = config['make_plots'] if 'make_plots' in config else True
if train_dataloader:
self.train_dataloader = train_dataloader
else:
return
self.train_dataloader = train_dataloader
self.val_dataloader = val_dataloader
self.optimizer = optimizer
self.optimizer_args = optimizer_args
print(self.title)
# Load Callbacks for this session
callbacks = [BaseCB(self.name, self.title, config)]
for feat in self.features:
if feat == 'auc':
callbacks.append(AUC_CB(self.name, config))
if feat == 'lr_step_full':
callbacks.append(LR_SchedCB_full())
if feat == 'lr_step_patch':
callbacks.append(LR_SchedCB_patch())
if feat == 'lr_step_2views':
callbacks.append(LR_SchedCB_2views())
if feat == 'lr_warmup_cos':
callbacks.append(LR_SchedCB_W_Cos())
if feat == 'lr_warmup_cyc_cos':
callbacks.append(LR_SchedCB_W_Cyc_Cos())
if feat == 'LR_SchedCB_W_Cos':
callbacks.append(LR_SchedCB_W_Cos())
self.cb = CallbackHandler(callbacks)
def train_and_validate(self, **kwargs):
"""
Main train and validate function that runs main loop (fit).
Receives all parameters and feed callback system.
Loop through epochs and executes pytorch forward, loss,
backpropagation and optimization (grads calc).
Returns the model trained.
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
input_dict = kwargs.get('input_dict') if kwargs.get('input_dict') else []
if not self.cb.begin_train_val(self.epochs, self.model, self.train_dataloader,
self.val_dataloader, self.mini_batch, self.optimizer):
return
self.cb.update_loss(self.loss_criterion, calc_acc)
device = self.device
for epoch in range(self.first_epoch, self.epochs+1):
self.model.train()
train_loss, train_acc = 0.0, 0.0
val_loss, val_acc = 0.0, 0.0
if not self.cb.begin_epoch(epoch): return # noqa: E701
optim = self.cb.update_LR(epoch, self.model, self.optimizer, self.optimizer_args)
if optim: self.optimizer = optim
# Train loop
for _, (inputs, labels) in enumerate(self.train_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
# inserting MIXUP handling
res = self.cb.begin_batch(inputs, labels)
if res: inputs, labels, self.loss_criterion, calc_acc = res
self.optimizer.zero_grad() # clean existing gradients
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean() # list in this case
loss.backward() # backprop the gradients
self.optimizer.step() # update parameters
train_loss += loss.item() * labels.size(0) # inputs.size(0) == mini_batch size
train_acc += calc_acc(outputs, labels).item()
self.cb.after_step(labels.size(0), labels, outputs)
# validation - no gradient tracking needed
with torch.no_grad():
self.model.eval()
self.cb.begin_val()
# validation loop
for _, (inputs, labels) in enumerate(self.val_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean()
val_loss += loss.item() * labels.size(0) # inputs.size(0) == mini_batch size
val_acc += calc_acc(outputs, labels).item()
self.cb.after_step_val(labels.size(0), labels, outputs)
self.cb.after_epoch(self.model, train_acc, train_loss, val_acc, val_loss)
self.cb.after_train_val()
return self.model
def train_and_validate_amp(self, **kwargs):
"""
Mixed precision (automatic) version for train_and_validate.
Uses FP16 and FP32 in main loop with pytorch Automatic Mixed Precision.
In simple tests: use 75% of memory in 66% of time. Less memory and faster.
Sometimes it just don't work and get worse, like for resnest...
"""
assert torch.__version__ >= '1.6.0', "[Mixed precision] Please use PyTorch 1.6.0+"
print('Using AMP')
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
input_dict = kwargs.get('input_dict') if kwargs.get('input_dict') else []
if not self.cb.begin_train_val(self.epochs, self.model, self.train_dataloader,
self.val_dataloader, self.mini_batch, self.optimizer):
|
# Creates a GradScaler once at the beginning of training.
scaler = GradScaler()
device = self.device
# for epoch in range(self.first_epoch, self.epochs+1):
epoch = self.first_epoch # suport for "wait N epochs after best metric"
last_epoch = self.epochs
while epoch <= last_epoch:
self.model.train()
train_loss, train_acc = 0.0, 0.0
val_loss, val_acc = 0.0, 0.0
if not self.cb.begin_epoch(epoch): return # noqa: E701
optim = self.cb.update_LR(epoch, self.model, self.optimizer, self.optimizer_args)
if optim: self.optimizer = optim
# Train loop
for _, (inputs, labels) in enumerate(self.train_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
self.optimizer.zero_grad() # clean existing gradients
# Runs the forward pass with autocasting.
with autocast():
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean() # list in this case
scaler.scale(loss).backward() # backward() on scaled loss for scaled gradients.
scaler.step(self.optimizer) # update parameters
scaler.update() # Updates the scale for next iteration.
train_loss += loss.item() * labels.size(0) # == mini_batch size
train_acc += calc_acc(outputs, labels).item()
self.cb.after_step(labels.size(0), labels, outputs)
# validation - no gradient tracking needed
with torch.no_grad():
self.model.eval()
# validation loop
for _, (inputs, labels) in enumerate(self.val_dataloader):
if isinstance(inputs, dict):
for key in input_dict:
inputs[key] = inputs[key].to(device)
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = self.model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean()
val_loss += loss.item() * labels.size(0) # == mini_batch size
val_acc += calc_acc(outputs, labels).item()
self.cb.after_step_val(labels.size(0), labels, outputs)
self.cb.after_epoch(self.model, train_acc, train_loss, val_acc, val_loss)
epoch += 1
# print('-', self.cb.best_metric_epoch[self.cb.metric_name[-1]], last_epoch)
# Is use stable metric - will stop training earlier, after
# stable_metric epochs without validation metric (to be selected) improve
# last_epoch = self.epochs if not self.stable_metric else max(self.epochs, self.cb.best_metric_epoch[self.cb.metric_name[-1]] + self.stable_metric)
# for metric in self.cb.metric_name:
# print(metric)
last_epoch = self.epochs if not self.stable_metric else min(self.epochs, self.cb.best_metric_epoch[self.cb.metric_name[-1]] + self.stable_metric)
self.cb.after_train_val()
values = [self.cb.best_metric, self.cb.best_metric_epoch, self.cb.elapsed_mins,
self.cb.metric_name, self.cb.loss_plot, self.cb.metric_plot,
self.cb.best_model_file]
return values
def run_test(self, test_dataloader, model_type, **kwargs):
""" Run test from test_dataloader according to model_type.
if model_type = 'normal' : use last saved model
if model_type = 'best' : use best model
Uses: loss function from Trainer
Input: test_dataloader
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
quiet = kwargs.get('quiet') if kwargs.get('quiet') else False
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'bootstrap':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
device = self.device
with torch.no_grad():
model.eval()
# validation loop
for _, (inputs, labels) in enumerate(test_dataloader):
if isinstance(inputs, dict):
for key in ['CC', 'MLO']:
inputs[key] = inputs[key].to(device)
labels = Variable(labels.to(device))
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
if parallel:
loss = loss.mean()
test_loss += loss.item() * labels.size(0)
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
# Find average test loss and test accuracy
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
if not quiet:
print(f'Model: {model_type} - Test accuracy : {avg_test_acc:.5f}' +
f' Test loss : {avg_test_loss:.5f}')
return avg_test_acc
def run_test_auc(self, test_dataloader, model_type, **kwargs):
""" Run test from test_dataloader, calculating AUC and ROC curve
According to model_type:
if model_type = 'normal' : use last saved model
if model_type = 'best' : use best model
If we are running test iunference only can pass model through kwargs.
Uses: loss function from Trainer
Input: test_dataloader
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
model = kwargs.get('model') if kwargs.get('model') else None
show_results = kwargs.get('show_results') if kwargs.get('show_results') else False
m_positive = kwargs.get('m') if kwargs.get('m') else False
n_negative = kwargs.get('n') if kwargs.get('n') else False
if model is None:
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'test':
model = self.model
elif model_type == 'bootstrap':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
y_hat_auc, label_auc = [], []
device = self.device
with torch.no_grad():
model.eval()
# validation loop
for _, (inputs, labels) in enumerate(test_dataloader):
if isinstance(inputs, dict):
for key in ['CC', 'MLO']:
inputs[key] = inputs[key].to(device)
labels = Variable(labels.to(device))
else:
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
outputs = model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
test_loss += loss.item() * labels.size(0)
# calculate acc
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
# Store auc for malignant
label_auc = np.append(label_auc, labels.cpu().detach().numpy())
y_hat_auc = np.append(y_hat_auc, torch.softmax(outputs, dim=1)[:, 1].cpu().detach().numpy())
# enter show result mode
if self.mini_batch == 1 and show_results:
print(f'{labels.item()} {torch.softmax(outputs, dim=1)[:, 1].item():.3f}')
# Find average test loss and test accuracy
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
print(f"Model: {model_type} - Test accuracy : {avg_test_acc:.3f}" +
f" Test loss : {avg_test_loss:.4f}", end='')
# calculate AUC TEST
auc_mal_val = roc_auc_score(label_auc.ravel(), y_hat_auc.ravel())
# print(f' AUC Malignant: {auc_mal_val:.4f}', end='')
if m_positive and n_negative:
auc_final = f'{auc_mal_val:.4f}±{calc_auc_desv(m_positive, n_negative, auc_mal_val):.4f}'
# print(f'±{calc_auc_desv(m_positive, n_negative, auc_mal_val):.4f}')
print(f' AUC Malignant: {auc_final}')
else:
auc_final = f'{auc_mal_val:.4f}'
print(f' AUC Malignant: {auc_final}')
# print()
if self.make_plots:
show_auc(label_auc, y_hat_auc, self.title, show_plt=False)
# return auc_mal_val
return auc_final
# Not fully tested yet (2021-05)
# it seems to be working - maybe integrate in single function as above
# and use kwargs to indicate that it is test-data- aug?
def run_test_data_aug_auc(self, test_dataloader, model_type, **kwargs):
""" Run test from test_dataloader, calculating AUC and ROC curve
--> Using test-data augmentation: rotation 0°, 90°, 180°, 270°
--> All rotated sample will be infered and AUC will consider all.
According to model_type:
if model_type = 'normal' : use last saved model
if model_type = 'best' : use best model
If we are running test iunference only can pass model through kwargs.
Uses: loss function from Trainer
Input: test_dataloader
"""
calc_acc = kwargs.get('accuracy') if kwargs.get('accuracy') else acc
model = kwargs.get('model') if kwargs.get('model') else None
if model is None:
if model_type == 'normal':
model = self.cb.last_model
elif model_type == 'best':
model = self.cb.best_model
elif model_type == 'test':
model = self.model
test_acc, test_loss = 0., 0.
batch_val_counter = 0
y_hat_auc, label_auc = [], []
device = self.device
with torch.no_grad():
model.eval()
# validation loop
for _, (inputs, labels) in enumerate(test_dataloader):
for rot in range(0,4):
# print(rot, inputs.shape)
inputs = torch.rot90(inputs, rot, [2, 3])
# inputs = Variable(inputs.to(device))
# labels = Variable(labels.to(device))
# print(counter, rot, inputs.shape)
inputs = Variable(inputs.to(device))
labels = Variable(labels.to(device))
# img = inputs.cpu().detach().numpy()
# img = img.transpose(0,2,3,1)
# print(img[0, :, :, 0:3].shape)
# cv2.imwrite('thrash/test-aug_'+str(rot)+'.png', img[0, :, :, 0:3]*65535)
outputs = model(inputs) # forward pass
loss = self.loss_criterion(outputs, labels) # compute loss
test_loss += loss.item() * labels.size(0)
# calculate acc
test_acc += calc_acc(outputs, labels).item()
batch_val_counter += labels.size(0)
# Store auc for malignant
label_auc = np.append(label_auc, labels.cpu().detach().numpy())
y_hat_auc = np.append(y_hat_auc, torch.softmax(outputs, dim=1)[:, 1].cpu().detach().numpy())
# enter show result mode
if self.mini_batch == 1:
print(f'{labels.item()} {torch.softmax(outputs, dim=1)[:, 1].item():.3f}')
print('batch_val_counter ', batch_val_counter)
# Find average test loss and test accuracy
avg_test_loss = test_loss/batch_val_counter
avg_test_acc = test_acc/batch_val_counter
print(f"Model: {model_type} - Test accuracy : {avg_test_acc:.3f}" +
f" Test loss : {avg_test_loss:.4f}", end='')
# calculate AUC TEST
auc_mal_val = roc_auc_score(label_auc.ravel(), y_hat_auc.ravel())
print(f' AUC Malignant: {auc_mal_val:.4f}')
if self.make_plots:
show_auc(label_auc, y_hat_auc, self.title, show_plt=False)
return auc_mal_val
| return |
test_fixtures.py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import fixtures as fx
import futurist
import mock
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
from oslo_utils import uuidutils
from oslotest import output
import sqlalchemy
import testtools
from nova.compute import rpcapi as compute_rpcapi
from nova import conductor
from nova import context
from nova.db.sqlalchemy import api as session
from nova import exception
from nova.network import neutron as neutron_api
from nova import objects
from nova.objects import base as obj_base
from nova.objects import service as service_obj
from nova import test
from nova.tests import fixtures
from nova.tests.unit import conf_fixture
from nova.tests.unit import fake_instance
from nova import utils
CONF = cfg.CONF
class TestLogging(testtools.TestCase):
def test_default_logging(self):
stdlog = self.useFixture(fixtures.StandardLogging())
root = logging.getLogger()
# there should be a null handler as well at DEBUG
self.assertEqual(2, len(root.handlers), root.handlers)
log = logging.getLogger(__name__)
log.info("at info")
log.debug("at debug")
self.assertIn("at info", stdlog.logger.output)
self.assertNotIn("at debug", stdlog.logger.output)
# broken debug messages should still explode, even though we
# aren't logging them in the regular handler
self.assertRaises(TypeError, log.debug, "this is broken %s %s", "foo")
# and, ensure that one of the terrible log messages isn't
# output at info
warn_log = logging.getLogger('migrate.versioning.api')
warn_log.info("warn_log at info, should be skipped")
warn_log.error("warn_log at error")
self.assertIn("warn_log at error", stdlog.logger.output)
self.assertNotIn("warn_log at info", stdlog.logger.output)
def test_debug_logging(self):
self.useFixture(fx.EnvironmentVariable('OS_DEBUG', '1'))
stdlog = self.useFixture(fixtures.StandardLogging())
root = logging.getLogger()
# there should no longer be a null handler
self.assertEqual(1, len(root.handlers), root.handlers)
log = logging.getLogger(__name__)
log.info("at info")
log.debug("at debug")
self.assertIn("at info", stdlog.logger.output)
self.assertIn("at debug", stdlog.logger.output)
class TestOSAPIFixture(testtools.TestCase):
@mock.patch('nova.objects.Service.get_by_host_and_binary')
@mock.patch('nova.objects.Service.create')
def test_responds_to_version(self, mock_service_create, mock_get):
"""Ensure the OSAPI server responds to calls sensibly."""
self.useFixture(output.CaptureOutput())
self.useFixture(fixtures.StandardLogging())
self.useFixture(conf_fixture.ConfFixture())
self.useFixture(fixtures.RPCFixture('nova.test'))
api = self.useFixture(fixtures.OSAPIFixture()).api
# request the API root, which provides us the versions of the API
resp = api.api_request('/', strip_version=True)
self.assertEqual(200, resp.status_code, resp.content)
# request a bad root url, should be a 404
#
# NOTE(sdague): this currently fails, as it falls into the 300
# dispatcher instead. This is a bug. The test case is left in
# here, commented out until we can address it.
#
# resp = api.api_request('/foo', strip_version=True)
# self.assertEqual(resp.status_code, 400, resp.content)
# request a known bad url, and we should get a 404
resp = api.api_request('/foo')
self.assertEqual(404, resp.status_code, resp.content)
class TestDatabaseFixture(testtools.TestCase):
def test_fixture_reset(self):
# because this sets up reasonable db connection strings
self.useFixture(conf_fixture.ConfFixture())
self.useFixture(fixtures.Database())
engine = session.get_engine()
conn = engine.connect()
result = conn.execute("select * from instance_types")
rows = result.fetchall()
self.assertEqual(0, len(rows), "Rows %s" % rows)
# insert a 6th instance type, column 5 below is an int id
# which has a constraint on it, so if new standard instance
# types are added you have to bump it.
conn.execute("insert into instance_types VALUES "
"(NULL, NULL, NULL, 't1.test', 6, 4096, 2, 0, NULL, '87'"
", 1.0, 40, 0, 0, 1, 0)")
result = conn.execute("select * from instance_types")
rows = result.fetchall()
self.assertEqual(1, len(rows), "Rows %s" % rows)
# reset by invoking the fixture again
#
# NOTE(sdague): it's important to reestablish the db
# connection because otherwise we have a reference to the old
# in mem db.
self.useFixture(fixtures.Database())
conn = engine.connect()
result = conn.execute("select * from instance_types")
rows = result.fetchall()
self.assertEqual(0, len(rows), "Rows %s" % rows)
def test_api_fixture_reset(self):
# This sets up reasonable db connection strings
self.useFixture(conf_fixture.ConfFixture())
self.useFixture(fixtures.Database(database='api'))
engine = session.get_api_engine()
conn = engine.connect()
result = conn.execute("select * from cell_mappings")
rows = result.fetchall()
self.assertEqual(0, len(rows), "Rows %s" % rows)
uuid = uuidutils.generate_uuid()
conn.execute("insert into cell_mappings (uuid, name) VALUES "
"('%s', 'fake-cell')" % (uuid,))
result = conn.execute("select * from cell_mappings")
rows = result.fetchall()
self.assertEqual(1, len(rows), "Rows %s" % rows)
# reset by invoking the fixture again
#
# NOTE(sdague): it's important to reestablish the db
# connection because otherwise we have a reference to the old
# in mem db.
self.useFixture(fixtures.Database(database='api'))
conn = engine.connect()
result = conn.execute("select * from cell_mappings")
rows = result.fetchall()
self.assertEqual(0, len(rows), "Rows %s" % rows)
def test_fixture_cleanup(self):
# because this sets up reasonable db connection strings
self.useFixture(conf_fixture.ConfFixture())
fix = fixtures.Database()
self.useFixture(fix)
# manually do the cleanup that addCleanup will do
fix.cleanup()
# ensure the db contains nothing
engine = session.get_engine()
conn = engine.connect()
schema = "".join(line for line in conn.connection.iterdump())
self.assertEqual(schema, "BEGIN TRANSACTION;COMMIT;")
def test_api_fixture_cleanup(self):
# This sets up reasonable db connection strings
self.useFixture(conf_fixture.ConfFixture())
fix = fixtures.Database(database='api')
self.useFixture(fix)
# No data inserted by migrations so we need to add a row
engine = session.get_api_engine()
conn = engine.connect()
uuid = uuidutils.generate_uuid()
conn.execute("insert into cell_mappings (uuid, name) VALUES "
"('%s', 'fake-cell')" % (uuid,))
result = conn.execute("select * from cell_mappings")
rows = result.fetchall()
self.assertEqual(1, len(rows), "Rows %s" % rows)
# Manually do the cleanup that addCleanup will do
fix.cleanup()
# Ensure the db contains nothing
engine = session.get_api_engine()
conn = engine.connect()
schema = "".join(line for line in conn.connection.iterdump())
self.assertEqual("BEGIN TRANSACTION;COMMIT;", schema)
class TestDatabaseAtVersionFixture(testtools.TestCase):
def test_fixture_schema_version(self):
self.useFixture(conf_fixture.ConfFixture())
# In/after 317 aggregates did have uuid
self.useFixture(fixtures.DatabaseAtVersion(318))
engine = session.get_engine()
engine.connect()
meta = sqlalchemy.MetaData(engine)
aggregate = sqlalchemy.Table('aggregates', meta, autoload=True)
self.assertTrue(hasattr(aggregate.c, 'uuid'))
# Before 317, aggregates had no uuid
self.useFixture(fixtures.DatabaseAtVersion(316))
engine = session.get_engine()
engine.connect()
meta = sqlalchemy.MetaData(engine)
aggregate = sqlalchemy.Table('aggregates', meta, autoload=True)
self.assertFalse(hasattr(aggregate.c, 'uuid'))
engine.dispose()
def test_fixture_after_database_fixture(self):
self.useFixture(conf_fixture.ConfFixture())
self.useFixture(fixtures.Database())
self.useFixture(fixtures.DatabaseAtVersion(318))
class TestDefaultFlavorsFixture(testtools.TestCase):
@mock.patch("nova.objects.flavor.Flavor._send_notification")
def test_flavors(self, mock_send_notification):
self.useFixture(conf_fixture.ConfFixture())
self.useFixture(fixtures.Database())
self.useFixture(fixtures.Database(database='api'))
engine = session.get_api_engine()
conn = engine.connect()
result = conn.execute("select * from flavors")
rows = result.fetchall()
self.assertEqual(0, len(rows), "Rows %s" % rows)
self.useFixture(fixtures.DefaultFlavorsFixture())
result = conn.execute("select * from flavors")
rows = result.fetchall()
self.assertEqual(6, len(rows), "Rows %s" % rows)
class TestIndirectionAPIFixture(testtools.TestCase):
def test_indirection_api(self):
# Should initially be None
self.assertIsNone(obj_base.NovaObject.indirection_api)
# make sure the fixture correctly sets the value
fix = fixtures.IndirectionAPIFixture('foo')
self.useFixture(fix)
self.assertEqual('foo', obj_base.NovaObject.indirection_api)
# manually do the cleanup that addCleanup will do
fix.cleanup()
# ensure the initial value is restored
self.assertIsNone(obj_base.NovaObject.indirection_api)
class TestSpawnIsSynchronousFixture(testtools.TestCase):
def test_spawn_patch(self):
orig_spawn = utils.spawn_n
fix = fixtures.SpawnIsSynchronousFixture()
self.useFixture(fix)
self.assertNotEqual(orig_spawn, utils.spawn_n)
def test_spawn_passes_through(self):
self.useFixture(fixtures.SpawnIsSynchronousFixture())
tester = mock.MagicMock()
utils.spawn_n(tester.function, 'foo', bar='bar')
tester.function.assert_called_once_with('foo', bar='bar')
def test_spawn_return_has_wait(self):
self.useFixture(fixtures.SpawnIsSynchronousFixture())
gt = utils.spawn(lambda x: '%s' % x, 'foo')
foo = gt.wait()
self.assertEqual('foo', foo)
def test_spawn_n_return_has_wait(self):
self.useFixture(fixtures.SpawnIsSynchronousFixture())
gt = utils.spawn_n(lambda x: '%s' % x, 'foo')
foo = gt.wait()
self.assertEqual('foo', foo)
def test_spawn_has_link(self):
self.useFixture(fixtures.SpawnIsSynchronousFixture())
gt = utils.spawn(mock.MagicMock)
passed_arg = 'test'
call_count = []
def fake(thread, param):
self.assertEqual(gt, thread)
self.assertEqual(passed_arg, param)
call_count.append(1)
gt.link(fake, passed_arg)
self.assertEqual(1, len(call_count))
def test_spawn_n_has_link(self):
self.useFixture(fixtures.SpawnIsSynchronousFixture())
gt = utils.spawn_n(mock.MagicMock)
passed_arg = 'test'
call_count = []
def fake(thread, param):
self.assertEqual(gt, thread)
self.assertEqual(passed_arg, param)
call_count.append(1)
gt.link(fake, passed_arg)
self.assertEqual(1, len(call_count))
class TestSynchronousThreadPoolExecutorFixture(testtools.TestCase):
def test_submit_passes_through(self):
self.useFixture(fixtures.SynchronousThreadPoolExecutorFixture())
tester = mock.MagicMock()
executor = futurist.GreenThreadPoolExecutor()
future = executor.submit(tester.function, 'foo', bar='bar')
tester.function.assert_called_once_with('foo', bar='bar')
result = future.result()
self.assertEqual(tester.function.return_value, result)
class TestBannedDBSchemaOperations(testtools.TestCase):
def test_column(self):
column = sqlalchemy.Column()
with fixtures.BannedDBSchemaOperations(['Column']):
self.assertRaises(exception.DBNotAllowed,
column.drop)
self.assertRaises(exception.DBNotAllowed,
column.alter)
def test_table(self):
table = sqlalchemy.Table()
with fixtures.BannedDBSchemaOperations(['Table']):
self.assertRaises(exception.DBNotAllowed,
table.drop)
self.assertRaises(exception.DBNotAllowed,
table.alter)
class TestAllServicesCurrentFixture(testtools.TestCase):
@mock.patch('nova.objects.Service._db_service_get_minimum_version')
def test_services_current(self, mock_db):
mock_db.return_value = {'nova-compute': 123}
self.assertEqual(123, service_obj.Service.get_minimum_version(
None, 'nova-compute'))
mock_db.assert_called_once_with(None, ['nova-compute'],
use_slave=False)
mock_db.reset_mock()
compute_rpcapi.LAST_VERSION = 123
self.useFixture(fixtures.AllServicesCurrent())
self.assertIsNone(compute_rpcapi.LAST_VERSION)
self.assertEqual(service_obj.SERVICE_VERSION,
service_obj.Service.get_minimum_version(
None, 'nova-compute'))
self.assertFalse(mock_db.called)
class TestNoopConductorFixture(testtools.TestCase):
@mock.patch('nova.conductor.api.ComputeTaskAPI.resize_instance')
def test_task_api_not_called(self, mock_resize):
self.useFixture(fixtures.NoopConductorFixture())
conductor.ComputeTaskAPI().resize_instance()
self.assertFalse(mock_resize.called)
@mock.patch('nova.conductor.api.API.wait_until_ready')
def test_api_not_called(self, mock_wait):
self.useFixture(fixtures.NoopConductorFixture())
conductor.API().wait_until_ready()
self.assertFalse(mock_wait.called)
class TestSingleCellSimpleFixture(testtools.TestCase):
def test_single_cell(self):
self.useFixture(fixtures.SingleCellSimple())
cml = objects.CellMappingList.get_all(None)
self.assertEqual(1, len(cml))
def test_target_cell(self):
self.useFixture(fixtures.SingleCellSimple())
with context.target_cell(mock.sentinel.context, None) as c:
self.assertIs(mock.sentinel.context, c)
class TestWarningsFixture(test.TestCase): | """Creating an oslo.versionedobject with an invalid UUID value for a
UUIDField should raise an exception.
"""
valid_migration_kwargs = {
"created_at": timeutils.utcnow().replace(microsecond=0),
"updated_at": None,
"deleted_at": None,
"deleted": False,
"id": 123,
"uuid": uuids.migration,
"source_compute": "compute-source",
"dest_compute": "compute-dest",
"source_node": "node-source",
"dest_node": "node-dest",
"dest_host": "host-dest",
"old_instance_type_id": 42,
"new_instance_type_id": 84,
"instance_uuid": "fake-uuid",
"status": "migrating",
"migration_type": "resize",
"hidden": False,
"memory_total": 123456,
"memory_processed": 12345,
"memory_remaining": 111111,
"disk_total": 234567,
"disk_processed": 23456,
"disk_remaining": 211111,
}
# this shall not throw FutureWarning
objects.migration.Migration(**valid_migration_kwargs)
invalid_migration_kwargs = copy.deepcopy(valid_migration_kwargs)
invalid_migration_kwargs["uuid"] = "fake_id"
self.assertRaises(FutureWarning, objects.migration.Migration,
**invalid_migration_kwargs)
class TestDownCellFixture(test.TestCase):
def test_fixture(self):
# The test setup creates two cell mappings (cell0 and cell1) by
# default. Let's first list servers across all cells while they are
# "up" to make sure that works as expected. We'll create a single
# instance in cell1.
ctxt = context.get_admin_context()
cell1 = self.cell_mappings[test.CELL1_NAME]
with context.target_cell(ctxt, cell1) as cctxt:
inst = fake_instance.fake_instance_obj(cctxt)
if 'id' in inst:
delattr(inst, 'id')
inst.create()
# Now list all instances from all cells (should get one back).
results = context.scatter_gather_all_cells(
ctxt, objects.InstanceList.get_all)
self.assertEqual(2, len(results))
self.assertEqual(0, len(results[objects.CellMapping.CELL0_UUID]))
self.assertEqual(1, len(results[cell1.uuid]))
# Now do the same but with the DownCellFixture which should result
# in exception results from both cells.
with fixtures.DownCellFixture():
results = context.scatter_gather_all_cells(
ctxt, objects.InstanceList.get_all)
self.assertEqual(2, len(results))
for result in results.values():
self.assertIsInstance(result, db_exc.DBError)
def test_fixture_when_explicitly_passing_down_cell_mappings(self):
# The test setup creates two cell mappings (cell0 and cell1) by
# default. We'll create one instance per cell and pass cell0 as
# the down cell. We should thus get db_exc.DBError for cell0 and
# correct InstanceList object from cell1.
ctxt = context.get_admin_context()
cell0 = self.cell_mappings['cell0']
cell1 = self.cell_mappings['cell1']
with context.target_cell(ctxt, cell0) as cctxt:
inst1 = fake_instance.fake_instance_obj(cctxt)
if 'id' in inst1:
delattr(inst1, 'id')
inst1.create()
with context.target_cell(ctxt, cell1) as cctxt:
inst2 = fake_instance.fake_instance_obj(cctxt)
if 'id' in inst2:
delattr(inst2, 'id')
inst2.create()
with fixtures.DownCellFixture([cell0]):
results = context.scatter_gather_all_cells(
ctxt, objects.InstanceList.get_all)
self.assertEqual(2, len(results))
for cell_uuid, result in results.items():
if cell_uuid == cell0.uuid:
self.assertIsInstance(result, db_exc.DBError)
else:
self.assertIsInstance(result, objects.InstanceList)
self.assertEqual(1, len(result))
self.assertEqual(inst2.uuid, result[0].uuid)
def test_fixture_for_an_individual_down_cell_targeted_call(self):
# We have cell0 and cell1 by default in the setup. We try targeting
# both the cells. We should get a db error for the down cell and
# the correct result for the up cell.
ctxt = context.get_admin_context()
cell0 = self.cell_mappings['cell0']
cell1 = self.cell_mappings['cell1']
with context.target_cell(ctxt, cell0) as cctxt:
inst1 = fake_instance.fake_instance_obj(cctxt)
if 'id' in inst1:
delattr(inst1, 'id')
inst1.create()
with context.target_cell(ctxt, cell1) as cctxt:
inst2 = fake_instance.fake_instance_obj(cctxt)
if 'id' in inst2:
delattr(inst2, 'id')
inst2.create()
def dummy_tester(ctxt, cell_mapping, uuid):
with context.target_cell(ctxt, cell_mapping) as cctxt:
return objects.Instance.get_by_uuid(cctxt, uuid)
# Scenario A: We do not pass any down cells, fixture automatically
# assumes the targeted cell is down whether its cell0 or cell1.
with fixtures.DownCellFixture():
self.assertRaises(
db_exc.DBError, dummy_tester, ctxt, cell1, inst2.uuid)
# Scenario B: We pass cell0 as the down cell.
with fixtures.DownCellFixture([cell0]):
self.assertRaises(
db_exc.DBError, dummy_tester, ctxt, cell0, inst1.uuid)
# Scenario C: We get the correct result from the up cell
# when targeted.
result = dummy_tester(ctxt, cell1, inst2.uuid)
self.assertEqual(inst2.uuid, result.uuid)
class TestNeutronFixture(test.NoDBTestCase):
def setUp(self):
super(TestNeutronFixture, self).setUp()
self.neutron = self.useFixture(fixtures.NeutronFixture(self))
def test_list_ports_with_resource_request_non_admin_client(self):
ctxt = context.get_context()
client = neutron_api.get_client(ctxt)
ports = client.list_ports(ctxt)['ports']
port_id = self.neutron.port_with_resource_request['id']
ports = [port for port in ports if port_id == port['id']]
self.assertIsNone(ports[0]['resource_request'])
def test_list_ports_with_resource_request_admin_client(self):
ctxt = context.get_admin_context()
client = neutron_api.get_client(ctxt)
ports = client.list_ports(ctxt)['ports']
port_id = self.neutron.port_with_resource_request['id']
ports = [port for port in ports if port_id == port['id']]
self.assertIsNotNone(ports[0]['resource_request']) | def test_invalid_uuid_errors(self): |
util.go | // Copyright 2020, OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package awsemfexporter
import (
"fmt"
"sort"
"strings"
"time"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/translator/conventions"
"go.uber.org/zap"
)
var patternKeyToAttributeMap = map[string]string{
"ClusterName": "aws.ecs.cluster.name",
"TaskId": "aws.ecs.task.id",
}
func replacePatterns(s string, attrMap pdata.AttributeMap, logger *zap.Logger) string {
for key := range patternKeyToAttributeMap {
s = replacePatternWithResource(s, key, attrMap, logger)
}
return s
}
func replacePatternWithResource(s, patternKey string, attrMap pdata.AttributeMap, logger *zap.Logger) string {
pattern := "{" + patternKey + "}"
if strings.Contains(s, pattern) {
if value, ok := attrMap.Get(patternKey); ok {
return replace(s, pattern, value, logger)
} else if value, ok := attrMap.Get(patternKeyToAttributeMap[patternKey]); ok {
return replace(s, pattern, value, logger)
} else {
logger.Debug("No resource attribute found for pattern " + pattern)
return strings.Replace(s, pattern, "undefined", -1)
}
}
return s
}
func replace(s, pattern string, value pdata.AttributeValue, logger *zap.Logger) string {
if value.StringVal() == "" {
logger.Debug("Empty resource attribute value found for pattern " + pattern)
return strings.Replace(s, pattern, "undefined", -1)
}
return strings.Replace(s, pattern, value.StringVal(), -1)
}
// getNamespace retrieves namespace for given set of metrics from user config.
func getNamespace(rm *pdata.ResourceMetrics, namespace string) string {
if len(namespace) == 0 {
serviceName, svcNameOk := rm.Resource().Attributes().Get(conventions.AttributeServiceName)
serviceNamespace, svcNsOk := rm.Resource().Attributes().Get(conventions.AttributeServiceNamespace)
if svcNameOk && svcNsOk && serviceName.Type() == pdata.AttributeValueSTRING && serviceNamespace.Type() == pdata.AttributeValueSTRING {
namespace = fmt.Sprintf("%s/%s", serviceNamespace.StringVal(), serviceName.StringVal())
} else if svcNameOk && serviceName.Type() == pdata.AttributeValueSTRING {
namespace = serviceName.StringVal()
} else if svcNsOk && serviceNamespace.Type() == pdata.AttributeValueSTRING {
namespace = serviceNamespace.StringVal()
}
}
if len(namespace) == 0 {
namespace = defaultNamespace
}
return namespace
}
// getLogInfo retrieves the log group and log stream names from a given set of metrics.
func getLogInfo(rm *pdata.ResourceMetrics, cWNamespace string, config *Config) (logGroup, logStream string) {
if cWNamespace != "" {
logGroup = fmt.Sprintf("/metrics/%s", cWNamespace)
}
// Override log group/stream if specified in config. However, in this case, customer won't have correlation experience
if len(config.LogGroupName) > 0 {
logGroup = replacePatterns(config.LogGroupName, rm.Resource().Attributes(), config.logger)
}
if len(config.LogStreamName) > 0 {
logStream = replacePatterns(config.LogStreamName, rm.Resource().Attributes(), config.logger)
}
return
}
// dedupDimensions removes duplicated dimension sets from the given dimensions.
// Prerequisite: each dimension set is already sorted
func dedupDimensions(dimensions [][]string) (deduped [][]string) |
// dimensionRollup creates rolled-up dimensions from the metric's label set.
// The returned dimensions are sorted in alphabetical order within each dimension set
func dimensionRollup(dimensionRollupOption string, labels map[string]string) [][]string {
var rollupDimensionArray [][]string
dimensionZero := make([]string, 0)
instrLibName, hasOTelKey := labels[oTellibDimensionKey]
if hasOTelKey {
// If OTel key exists in labels, add it as a zero dimension but remove it
// temporarily from labels as it is not an original label
dimensionZero = []string{oTellibDimensionKey}
delete(labels, oTellibDimensionKey)
}
if dimensionRollupOption == zeroAndSingleDimensionRollup {
//"Zero" dimension rollup
if len(labels) > 0 {
rollupDimensionArray = append(rollupDimensionArray, dimensionZero)
}
}
if dimensionRollupOption == zeroAndSingleDimensionRollup || dimensionRollupOption == singleDimensionRollupOnly {
//"One" dimension rollup
for labelName := range labels {
dimSet := append(dimensionZero, labelName)
sort.Strings(dimSet)
rollupDimensionArray = append(rollupDimensionArray, dimSet)
}
}
// Add back OTel key to labels if it was removed
if hasOTelKey {
labels[oTellibDimensionKey] = instrLibName
}
return rollupDimensionArray
}
// unixNanoToMilliseconds converts a timestamp in nanoseconds to milliseconds.
func unixNanoToMilliseconds(timestamp pdata.Timestamp) int64 {
return int64(uint64(timestamp) / uint64(time.Millisecond))
}
| {
seen := make(map[string]bool)
for _, dimSet := range dimensions {
key := strings.Join(dimSet, ",")
// Only add dimension set if not a duplicate
if _, ok := seen[key]; !ok {
deduped = append(deduped, dimSet)
seen[key] = true
}
}
return
} |
_wkb.py | """
This code has been a variation of geomet: https://github.com/geomet/geomet
It has been modified under the Apache 2.0 license to fit the needs of the
Esri JSON specificaction as defined here: https://developers.arcgis.com/documentation/common-data-types/geometry-objects.htm
"""
import binascii
import struct
from ._utils import block_splitter
from ._utils import take
from ._utils import as_bin_str
from ._utils import flatten_multi_dim
from itertools import chain
#: '\x00': The first byte of any WKB string. Indicates big endian byte
#: ordering for the data.
BIG_ENDIAN = b'\x00'
#: '\x01': The first byte of any WKB string. Indicates little endian byte
#: ordering for the data.
LITTLE_ENDIAN = b'\x01'
#: High byte in a 4-byte geometry type field to indicate that a 4-byte SRID
#: field follows.
SRID_FLAG = b'\x20'
#: Mapping of GeoJSON geometry types to the "2D" 4-byte binary string
#: representation for WKB. "2D" indicates that the geometry is 2-dimensional,
#: X and Y components.
#: NOTE: Byte ordering is big endian.
WKB_2D = {
'Point': b'\x00\x00\x00\x01',
'LineString': b'\x00\x00\x00\x02',
'Polygon': b'\x00\x00\x00\x03',
'MultiPoint': b'\x00\x00\x00\x04',
'MultiLineString': b'\x00\x00\x00\x05',
'MultiPolygon': b'\x00\x00\x00\x06',
'GeometryCollection': b'\x00\x00\x00\x07',
}
#: Mapping of GeoJSON geometry types to the "Z" 4-byte binary string
#: representation for WKB. "Z" indicates that the geometry is 3-dimensional,
#: with X, Y, and Z components.
#: NOTE: Byte ordering is big endian.
WKB_Z = {
'Point': b'\x00\x00\x03\xe9',
'LineString': b'\x00\x00\x03\xea',
'Polygon': b'\x00\x00\x03\xeb',
'MultiPoint': b'\x00\x00\x03\xec',
'MultiLineString': b'\x00\x00\x03\xed',
'MultiPolygon': b'\x00\x00\x03\xee',
'GeometryCollection': b'\x00\x00\x03\xef',
}
#: Mapping of GeoJSON geometry types to the "M" 4-byte binary string
#: representation for WKB. "M" indicates that the geometry is 2-dimensional,
#: with X, Y, and M ("Measure") components.
#: NOTE: Byte ordering is big endian.
WKB_M = {
'Point': b'\x00\x00\x07\xd1',
'LineString': b'\x00\x00\x07\xd2',
'Polygon': b'\x00\x00\x07\xd3',
'MultiPoint': b'\x00\x00\x07\xd4',
'MultiLineString': b'\x00\x00\x07\xd5',
'MultiPolygon': b'\x00\x00\x07\xd6',
'GeometryCollection': b'\x00\x00\x07\xd7',
}
#: Mapping of GeoJSON geometry types to the "ZM" 4-byte binary string
#: representation for WKB. "ZM" indicates that the geometry is 4-dimensional,
#: with X, Y, Z, and M ("Measure") components.
#: NOTE: Byte ordering is big endian.
WKB_ZM = {
'Point': b'\x00\x00\x0b\xb9',
'LineString': b'\x00\x00\x0b\xba',
'Polygon': b'\x00\x00\x0b\xbb',
'MultiPoint': b'\x00\x00\x0b\xbc',
'MultiLineString': b'\x00\x00\x0b\xbd',
'MultiPolygon': b'\x00\x00\x0b\xbe',
'GeometryCollection': b'\x00\x00\x0b\xbf',
}
#: Mapping of dimension types to maps of GeoJSON geometry type -> 4-byte binary
#: string representation for WKB.
_WKB = {
'2D': WKB_2D,
'Z': WKB_Z,
'M': WKB_M,
'ZM': WKB_ZM,
}
#: Mapping from binary geometry type (as a 4-byte binary string) to GeoJSON
#: geometry type.
#: NOTE: Byte ordering is big endian.
_BINARY_TO_GEOM_TYPE = dict(
chain(*((reversed(x) for x in wkb_map.items())
for wkb_map in _WKB.values()))
)
_INT_TO_DIM_LABEL = {2: '2D', 3: 'Z', 4: 'ZM'}
def _get_geom_type(type_bytes):
"""Get the GeoJSON geometry type label from a WKB type byte string.
:param type_bytes:
4 byte string in big endian byte order containing a WKB type number.
It may also contain a "has SRID" flag in the high byte (the first type,
since this is big endian byte order), indicated as 0x20. If the SRID
flag is not set, the high byte will always be null (0x00).
:returns:
3-tuple ofGeoJSON geometry type label, the bytes resprenting the
geometry type, and a separate "has SRID" flag. If the input
`type_bytes` contains an SRID flag, it will be removed.
>>> # Z Point, with SRID flag
>>> _get_geom_type(b'\\x20\\x00\\x03\\xe9') == (
... 'Point', b'\\x00\\x00\\x03\\xe9', True)
True
>>> # 2D MultiLineString, without SRID flag
>>> _get_geom_type(b'\\x00\\x00\\x00\\x05') == (
... 'MultiLineString', b'\\x00\\x00\\x00\\x05', False)
True
"""
# slice off the high byte, which may contain the SRID flag
high_byte = type_bytes[0]
high_byte = bytes([high_byte])
has_srid = high_byte == b'\x20'
if has_srid:
# replace the high byte with a null byte
type_bytes = as_bin_str(b'\x00' + type_bytes[1:])
else:
type_bytes = as_bin_str(type_bytes)
# look up the geometry type
geom_type = _BINARY_TO_GEOM_TYPE.get(type_bytes)
return geom_type, type_bytes, has_srid
def | (obj, dest_file):
"""
Dump GeoJSON-like `dict` to WKB and write it to the `dest_file`.
:param dict obj:
A GeoJSON-like dictionary. It must at least the keys 'type' and
'coordinates'.
:param dest_file:
Open and writable file-like object.
"""
dest_file.write(dumps(obj))
def load(source_file, wkid=4326):
"""
Load a EsriJSON `dict` object from a ``source_file`` containing WKB (as a
byte string).
:param source_file:
Open and readable file-like object.
:returns:
A GeoJSON `dict` representing the geometry read from the file.
"""
return loads(source_file.read(), wkid=wkid)
def dumps(obj, big_endian=False):
"""
Dump a EsriJSON-like `dict` to a WKB string.
:param dict obj:
GeoJson-like `dict` object.
:param bool big_endian:
Defaults to `False`. If `True`, data values in the generated WKB will
be represented using big endian byte order. Else, little endian.
:returns:
A WKB binary string representing of the ``obj``.
"""
def lu_geom(ks):
if 'point' in ks:
return "Point"
elif 'paths' in ks:
return "MultiLineString"
elif 'x' in ks:
return "Point"
elif 'rings' in ks:
return "MultiPolygon"
elif 'points' in ks:
return "MultiPoint"
geom_type = lu_geom(obj.keys())
meta = obj.get('meta', {})
exporter = _dumps_registry.get(geom_type)
if exporter is None:
_unsupported_geom_type(geom_type)
return exporter(obj, big_endian, meta)
def loads(string, wkid=4326):
"""
Construct a EsriJSON `dict` from WKB (`string`).
:param str string:
WKB string.
:param int wkid:
The srid of the coordinate system. The default is 4326.
"""
string = iter(string)
endianness = as_bin_str(take(1, string))
if endianness == BIG_ENDIAN:
big_endian = True
elif endianness == LITTLE_ENDIAN:
big_endian = False
else:
raise ValueError("Invalid endian byte: '0x%s'. Expected 0x00 or 0x01"
% binascii.hexlify(endianness.encode()).decode())
endian_token = '>' if big_endian else '<'
# type_bytes = string[1:5]
type_bytes = as_bin_str(take(4, string))
if not big_endian:
# To identify the type, order the type bytes in big endian:
type_bytes = type_bytes[::-1]
geom_type, type_bytes, has_srid = _get_geom_type(type_bytes)
srid = None
if has_srid:
srid_field = as_bin_str(take(4, string))
[srid] = struct.unpack('%si' % endian_token, srid_field)
# data_bytes = string[5:] # FIXME: This won't work for GeometryCollections
data_bytes = string
importer = _loads_registry_esri.get(geom_type)
if importer is None:
_unsupported_geom_type(geom_type)
data_bytes = iter(data_bytes)
result = importer(big_endian, type_bytes, data_bytes, wkid)
if has_srid:
# As mentioned in the docstring above, includeEsriJSONpproaches to
# indicating the SRID.
result['meta'] = {'srid': int(srid)}
result['crs'] = {
'type': 'name',
'properties': {'name': 'EPSG%s' % srid},
}
return result
def _unsupported_geom_type(geom_type):
raise ValueError("Unsupported geometry type '%s'" % geom_type)
# TODO: dont default meta to none
def _header_bytefmt_byteorder(geom_type, num_dims, big_endian, meta=None):
"""
Utility function to get the WKB header (endian byte + type header), byte
format string, and byte order string.
"""
dim = _INT_TO_DIM_LABEL.get(num_dims)
if dim is None:
pass # TODO: raise
type_byte_str = _WKB[dim][geom_type]
srid = meta.get('srid')
if srid is not None:
# Add the srid flag
type_byte_str = SRID_FLAG + type_byte_str[1:]
if big_endian:
header = BIG_ENDIAN
byte_fmt = b'>'
byte_order = '>'
else:
header = LITTLE_ENDIAN
byte_fmt = b'<'
byte_order = '<'
# reverse the byte ordering for little endian
type_byte_str = type_byte_str[::-1]
header += type_byte_str
if srid is not None:
srid = int(srid)
if big_endian:
srid_header = struct.pack('>i', srid)
else:
srid_header = struct.pack('<i', srid)
header += srid_header
byte_fmt += b'd' * num_dims
return header, byte_fmt, byte_order
def _dump_point(obj, big_endian, meta):
"""
Dump a EsriJSON-like `dict` to a point WKB string.
:param dict obj:
EsriJSON-like `dict` object.
:param bool big_endian:
If `True`, data values in the generated WKB will be represented using
big endian byte order. Else, little endian.
:param dict meta:
Metadata associated with the GeoJSON object. Currently supported
metadata:
- srid: Used to support EWKT/EWKB. For example, ``meta`` equal to
``{'srid': '4326'}`` indicates that the geometry is defined using
Extended WKT/WKB and that it bears a Spatial Reference System
Identifier of 4326. This ID will be encoded into the resulting
binary.
Any other meta data objects will simply be ignored by this function.
:returns:
A WKB binary string representing of the Point ``obj``.
"""
coords = [obj['x'], obj['y']]
num_dims = len(coords)
wkb_string, byte_fmt, _ = _header_bytefmt_byteorder(
'Point', num_dims, big_endian, meta
)
wkb_string += struct.pack(byte_fmt, *coords)
return wkb_string
def _dump_linestring(obj, big_endian, meta):
"""
Dump a GeoJSON-like `dict` to a linestring WKB string.
Input parameters and output are similar to :func:`_dump_point`.
"""
coords = obj['coordinates']
vertex = coords[0]
# Infer the number of dimensions from the first vertex
num_dims = len(vertex)
wkb_string, byte_fmt, byte_order = _header_bytefmt_byteorder(
'LineString', num_dims, big_endian, meta
)
# append number of vertices in linestring
wkb_string += struct.pack('%sl' % byte_order, len(coords))
for vertex in coords:
wkb_string += struct.pack(byte_fmt, *vertex)
return wkb_string
def _dump_polygon(obj, big_endian, meta):
"""
Dump a GeoJSON-like `dict` to a polygon WKB string.
Input parameters and output are similar to :funct:`_dump_point`.
"""
coords = obj['coordinates']
vertex = coords[0][0]
# Infer the number of dimensions from the first vertex
num_dims = len(vertex)
wkb_string, byte_fmt, byte_order = _header_bytefmt_byteorder(
'Polygon', num_dims, big_endian, meta
)
# number of rings:
wkb_string += struct.pack('%sl' % byte_order, len(coords))
for ring in coords:
# number of verts in this ring:
wkb_string += struct.pack('%sl' % byte_order, len(ring))
for vertex in ring:
wkb_string += struct.pack(byte_fmt, *vertex)
return wkb_string
def _dump_multipoint(obj, big_endian, meta):
"""
Dump a GeoJSON-like `dict` to a multipoint WKB string.
Input parameters and output are similar to :funct:`_dump_point`.
"""
coords = obj['points']
vertex = coords[0]
num_dims = len(vertex)
wkb_string, byte_fmt, byte_order = _header_bytefmt_byteorder(
'MultiPoint', num_dims, big_endian, meta
)
point_type = _WKB[_INT_TO_DIM_LABEL.get(num_dims)]['Point']
if big_endian:
point_type = BIG_ENDIAN + point_type
else:
point_type = LITTLE_ENDIAN + point_type[::-1]
wkb_string += struct.pack('%sl' % byte_order, len(coords))
for vertex in coords:
# POINT type strings
wkb_string += point_type
wkb_string += struct.pack(byte_fmt, *vertex)
return wkb_string
def _dump_multilinestring(obj, big_endian, meta):
"""
Dump a GeoJSON-like `dict` to a multilinestring WKB string.
Input parameters and output are similar to :funct:`_dump_point`.
"""
coords = obj['paths']
vertex = coords[0][0]
num_dims = len(vertex)
wkb_string, byte_fmt, byte_order = _header_bytefmt_byteorder(
'MultiLineString', num_dims, big_endian, meta
)
ls_type = _WKB[_INT_TO_DIM_LABEL.get(num_dims)]['LineString']
if big_endian:
ls_type = BIG_ENDIAN + ls_type
else:
ls_type = LITTLE_ENDIAN + ls_type[::-1]
# append the number of linestrings
wkb_string += struct.pack('%sl' % byte_order, len(coords))
for linestring in coords:
wkb_string += ls_type
# append the number of vertices in each linestring
wkb_string += struct.pack('%sl' % byte_order, len(linestring))
for vertex in linestring:
wkb_string += struct.pack(byte_fmt, *vertex)
return wkb_string
def _dump_multipolygon(obj, big_endian, meta):
"""
Dump a GeoJSON-like `dict` to a multipolygon WKB string.
Input parameters and output are similar to :funct:`_dump_point`.
"""
coords = [obj['rings']]
vertex = coords[0][0][0]
num_dims = len(vertex)
wkb_string, byte_fmt, byte_order = _header_bytefmt_byteorder(
'MultiPolygon', num_dims, big_endian, meta
)
poly_type = _WKB[_INT_TO_DIM_LABEL.get(num_dims)]['Polygon']
if big_endian:
poly_type = BIG_ENDIAN + poly_type
else:
poly_type = LITTLE_ENDIAN + poly_type[::-1]
# apped the number of polygons
wkb_string += struct.pack('%sl' % byte_order, len(coords))
for polygon in coords:
# append polygon header
wkb_string += poly_type
# append the number of rings in this polygon
wkb_string += struct.pack('%sl' % byte_order, len(polygon))
for ring in polygon:
# append the number of vertices in this ring
wkb_string += struct.pack('%sl' % byte_order, len(ring))
for vertex in ring:
wkb_string += struct.pack(byte_fmt, *vertex)
return wkb_string
def _dump_geometrycollection(obj, big_endian, meta):
# TODO: handle empty collections
geoms = obj['geometries']
# determine the dimensionality (2d, 3d, 4d) of the collection
# by sampling the first geometry
first_geom = geoms[0]
rest = geoms[1:]
first_wkb = dumps(first_geom, big_endian=big_endian)
first_type = first_wkb[1:5]
if not big_endian:
first_type = first_type[::-1]
if first_type in WKB_2D.values():
num_dims = 2
elif first_type in WKB_Z.values():
num_dims = 3
elif first_type in WKB_ZM.values():
num_dims = 4
wkb_string, byte_fmt, byte_order = _header_bytefmt_byteorder(
'GeometryCollection', num_dims, big_endian, meta
)
# append the number of geometries
wkb_string += struct.pack('%sl' % byte_order, len(geoms))
wkb_string += first_wkb
for geom in rest:
wkb_string += dumps(geom, big_endian=big_endian)
return wkb_string
def _load_point_esri(big_endian, type_bytes, data_bytes, wkid):
"""
Convert byte data for a Point to a EsriJSON `dict`.
:param bool big_endian:
If `True`, interpret the ``data_bytes`` in big endian order, else
little endian.
:param str type_bytes:
4-byte integer (as a binary string) indicating the geometry type
(Point) and the dimensions (2D, Z, M or ZM). For consistency, these
bytes are expected to always be in big endian order, regardless of the
value of ``big_endian``.
:param str data_bytes:
Coordinate data in a binary string.
:returns:
EsriJSON `dict` representing the Point geometry.
"""
endian_token = '>' if big_endian else '<'
if type_bytes == WKB_2D['Point']:
coords = struct.unpack('%sdd' % endian_token,
as_bin_str(take(16, data_bytes)))
elif type_bytes == WKB_Z['Point']:
coords = struct.unpack('%sddd' % endian_token,
as_bin_str(take(24, data_bytes)))
elif type_bytes == WKB_M['Point']:
# NOTE: The use of XYM types geometries is quite rare. In the interest
# of removing ambiguity, we will treat all XYM geometries as XYZM when
# generate the GeoJSON. A default Z value of `0.0` will be given in
# this case.
coords = list(struct.unpack('%sddd' % endian_token,
as_bin_str(take(24, data_bytes))))
coords.insert(2, 0.0)
elif type_bytes == WKB_ZM['Point']:
coords = struct.unpack('%sdddd' % endian_token,
as_bin_str(take(32, data_bytes)))
return { 'x': coords[0], 'y': coords[1],
"spatialReference" : {'wkid' : wkid}}
def _load_linestring_esri(big_endian, type_bytes, data_bytes, wkid):
"""converts wkb to esri json"""
endian_token = '>' if big_endian else '<'
is_m = False
if type_bytes in WKB_2D.values():
num_dims = 2
elif type_bytes in WKB_Z.values():
num_dims = 3
elif type_bytes in WKB_M.values():
num_dims = 3
is_m = True
elif type_bytes in WKB_ZM.values():
num_dims = 4
coords = []
[num_verts] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
while True:
vert_wkb = as_bin_str(take(8 * num_dims, data_bytes))
fmt = '%s' + 'd' * num_dims
vert = list(struct.unpack(fmt % endian_token, vert_wkb))
if is_m:
vert.insert(2, 0.0)
coords.append(vert)
if len(coords) == num_verts:
break
return dict(paths=[list(coords)], spatialReference={'wkid' : wkid})
def _load_polygon_esri(big_endian, type_bytes, data_bytes, wkid):
"""converts wkb to esri json"""
endian_token = '>' if big_endian else '<'
data_bytes = iter(data_bytes)
is_m = False
if type_bytes in WKB_2D.values():
num_dims = 2
elif type_bytes in WKB_Z.values():
num_dims = 3
elif type_bytes in WKB_M.values():
num_dims = 3
is_m = True
elif type_bytes in WKB_ZM.values():
num_dims = 4
coords = []
[num_rings] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
while True:
ring = []
[num_verts] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
verts_wkb = as_bin_str(take(8 * num_verts * num_dims, data_bytes))
verts = block_splitter(verts_wkb, 8)
verts = (b''.join(bytes([y]) for y in x) for x in verts)
for vert_wkb in block_splitter(verts, num_dims):
values = [struct.unpack('%sd' % endian_token, x)[0]
for x in vert_wkb]
if is_m:
values.insert(2, 0.0)
ring.append(values)
coords.append(ring)
if len(coords) == num_rings:
break
return dict(rings=coords, spatialReference={'wkid' : wkid})
def _load_multipoint_esri(big_endian, type_bytes, data_bytes, wkid):
"""converts wkb to esri json"""
endian_token = '>' if big_endian else '<'
data_bytes = iter(data_bytes)
is_m = False
if type_bytes in WKB_2D.values():
num_dims = 2
elif type_bytes in WKB_Z.values():
num_dims = 3
elif type_bytes in WKB_M.values():
num_dims = 3
is_m = True
elif type_bytes in WKB_ZM.values():
num_dims = 4
if is_m:
dim = 'M'
else:
dim = _INT_TO_DIM_LABEL[num_dims]
coords = []
[num_points] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
while True:
point_endian = as_bin_str(take(1, data_bytes))
point_type = as_bin_str(take(4, data_bytes))
values = struct.unpack('%s%s' % (endian_token, 'd' * num_dims),
as_bin_str(take(8 * num_dims, data_bytes)))
values = list(values)
if is_m:
values.insert(2, 0.0)
if big_endian:
assert point_endian == BIG_ENDIAN
assert point_type == _WKB[dim]['Point']
else:
assert point_endian == LITTLE_ENDIAN
assert point_type[::-1] == _WKB[dim]['Point']
coords.append(list(values))
if len(coords) == num_points:
break
return dict(points=coords, spatialReference={'wkid' : wkid})
def _load_multilinestring_esri(big_endian, type_bytes, data_bytes, wkid):
"""converts wkb to esri json"""
endian_token = '>' if big_endian else '<'
data_bytes = iter(data_bytes)
is_m = False
if type_bytes in WKB_2D.values():
num_dims = 2
elif type_bytes in WKB_Z.values():
num_dims = 3
elif type_bytes in WKB_M.values():
num_dims = 3
is_m = True
elif type_bytes in WKB_ZM.values():
num_dims = 4
if is_m:
dim = 'M'
else:
dim = _INT_TO_DIM_LABEL[num_dims]
[num_ls] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
coords = []
while True:
ls_endian = as_bin_str(take(1, data_bytes))
ls_type = as_bin_str(take(4, data_bytes))
if big_endian:
assert ls_endian == BIG_ENDIAN
assert ls_type == _WKB[dim]['LineString']
else:
assert ls_endian == LITTLE_ENDIAN
assert ls_type[::-1] == _WKB[dim]['LineString']
[num_verts] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
num_values = num_dims * num_verts
values = struct.unpack(endian_token + 'd' * num_values,
as_bin_str(take(8 * num_values, data_bytes)))
values = list(block_splitter(values, num_dims))
if is_m:
for v in values:
v.insert(2, 0.0)
coords.append(values)
if len(coords) == num_ls:
break
return dict(paths=coords, spatialReference={'wkid' : wkid})
def _load_multipolygon_esri(big_endian, type_bytes, data_bytes, wkid):
"""converts wkb to esri json"""
endian_token = '>' if big_endian else '<'
is_m = False
if type_bytes in WKB_2D.values():
num_dims = 2
elif type_bytes in WKB_Z.values():
num_dims = 3
elif type_bytes in WKB_M.values():
num_dims = 3
is_m = True
elif type_bytes in WKB_ZM.values():
num_dims = 4
if is_m:
dim = 'M'
else:
dim = _INT_TO_DIM_LABEL[num_dims]
[num_polys] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
coords = []
while True:
polygon = []
poly_endian = as_bin_str(take(1, data_bytes))
poly_type = as_bin_str(take(4, data_bytes))
if big_endian:
assert poly_endian == BIG_ENDIAN
assert poly_type == _WKB[dim]['Polygon']
else:
assert poly_endian == LITTLE_ENDIAN
assert poly_type[::-1] == _WKB[dim]['Polygon']
[num_rings] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
for _ in range(num_rings):
ring = []
[num_verts] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
for _ in range(num_verts):
vert_wkb = as_bin_str(take(8 * num_dims, data_bytes))
fmt = '%s' + 'd' * num_dims
vert = list(struct.unpack(fmt % endian_token, vert_wkb))
if is_m:
vert.insert(2, 0.0)
ring.append(vert)
polygon.append(ring)
coords.append(polygon)
if len(coords) == num_polys:
break
return dict(rings=[coord[0] for coord in coords],
spatialReference={'wkid' : wkid})
def _check_dimensionality(geom, num_dims):
def first_geom(gc):
for g in gc['geometries']:
if not g['type'] == 'GeometryCollection':
return g
first_vert = {
'Point': lambda x: x['coordinates'],
'LineString': lambda x: x['coordinates'][0],
'Polygon': lambda x: x['coordinates'][0][0],
'MultiLineString': lambda x: x['coordinates'][0][0],
'MultiPolygon': lambda x: x['coordinates'][0][0][0],
'GeometryCollection': first_geom,
}
if not len(first_vert[geom['type']](geom)) == num_dims:
error = 'Cannot mix dimensionality in a geometry'
raise Exception(error)
def _load_geometrycollection(big_endian, type_bytes, data_bytes):
endian_token = '>' if big_endian else '<'
is_m = False
if type_bytes in WKB_2D.values():
num_dims = 2
elif type_bytes in WKB_Z.values():
num_dims = 3
elif type_bytes in WKB_M.values():
num_dims = 3
is_m = True
elif type_bytes in WKB_ZM.values():
num_dims = 4
geometries = []
[num_geoms] = struct.unpack('%sl' % endian_token,
as_bin_str(take(4, data_bytes)))
while True:
geometry = loads(data_bytes)
if is_m:
_check_dimensionality(geometry, 4)
else:
_check_dimensionality(geometry, num_dims)
# TODO(LB): Add type assertions for the geometry; collections should
# not mix 2d, 3d, 4d, etc.
geometries.append(geometry)
if len(geometries) == num_geoms:
break
return dict(type='GeometryCollection', geometries=geometries)
_dumps_registry = {
'Point': _dump_point,
'LineString': _dump_linestring,
'Polygon': _dump_polygon,
'MultiPoint': _dump_multipoint,
'MultiLineString': _dump_multilinestring,
'MultiPolygon': _dump_multipolygon,
'GeometryCollection': _dump_geometrycollection,
}
_loads_registry_esri = {
'Point': _load_point_esri,
'LineString': _load_linestring_esri,
'Polygon': _load_polygon_esri,
'MultiPoint': _load_multipoint_esri,
'MultiLineString': _load_multilinestring_esri,
'MultiPolygon': _load_multipolygon_esri
}
| dump |
docker_util.py | # -*- coding: utf-8 -*- #
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility for interacting with `artifacts docker` command group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from apitools.base.py import exceptions as api_exceptions
from googlecloudsdk.api_lib.artifacts import exceptions as ar_exceptions
from googlecloudsdk.api_lib.util import common_args
from googlecloudsdk.api_lib.util import waiter
from googlecloudsdk.command_lib.artifacts import containeranalysis_util as ca_util
from googlecloudsdk.command_lib.artifacts import requests as ar_requests
from googlecloudsdk.command_lib.artifacts import util as ar_util
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
from googlecloudsdk.core.console import console_io
ARTIFACTREGISTRY_API_NAME = "artifactregistry"
_INVALID_IMAGE_PATH_ERROR = """Invalid Docker string.
A valid Docker repository has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID
A valid image has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE
"""
_INVALID_DEFAULT_DOCKER_STRING_ERROR = (
"""Fail to construct Docker string from config values:
core/project: {project}, artifacts/location: {location}, artifacts/repository: {repo}
A valid Docker repository has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID
A valid image has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE
""")
_INVALID_IMAGE_ERROR = """Invalid Docker image.
A valid container image has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE
A valid container image that can be referenced by tag or digest, has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE@sha256:digest
"""
_INVALID_DOCKER_IMAGE_ERROR = """Invalid Docker image.
A valid container image can be referenced by tag or digest, has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE@sha256:digest
"""
_INVALID_DOCKER_TAG_ERROR = """Invalid Docker tag.
A valid Docker tag has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
"""
_DOCKER_IMAGE_NOT_FOUND = """Image not found.
A valid container image can be referenced by tag or digest, has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE@sha256:digest
"""
DOCKER_REPO_REGEX = (
r"^(?P<location>.*)-docker.pkg.dev\/(?P<project>[^\/]+)\/(?P<repo>[^\/]+)")
DOCKER_IMG_BY_TAG_REGEX = r"^.*-docker.pkg.dev\/[^\/]+\/[^\/]+\/(?P<img>.*):(?P<tag>.*)"
DOCKER_IMG_BY_DIGEST_REGEX = (
r"^.*-docker.pkg.dev\/[^\/]+\/[^\/]+\/(?P<img>.*)@(?P<digest>sha256:.*)")
DOCKER_IMG_REGEX = r"^.*-docker.pkg.dev\/[^\/]+\/[^\/]+\/(?P<img>.*)"
_VERSION_COLLECTION_NAME = "artifactregistry.projects.locations.repositories.packages.versions"
def _GetDefaultResources():
"""Gets default config values for project, location, and repository."""
project = properties.VALUES.core.project.Get()
location = properties.VALUES.artifacts.location.Get()
repo = properties.VALUES.artifacts.repository.Get()
if not project or not location or not repo:
raise ar_exceptions.InvalidInputValueError(
_INVALID_DEFAULT_DOCKER_STRING_ERROR.format(**{
"project": project,
"location": location,
"repo": repo,
}))
ar_util.ValidateLocation(location, project)
return DockerRepo(project, location, repo)
def _ParseInput(input_str):
"""Parses user input into project, location, and repository values.
Args:
input_str: str, user input. Ex: us-docker.pkg.dev/my-proj/my-repo/my-img
Raises:
ar_exceptions.InvalidInputValueError if user input is invalid.
ar_exceptions.UnsupportedLocationError if provided location is invalid.
Returns:
A DockerRepo.
"""
matches = re.match(DOCKER_REPO_REGEX, input_str)
if not matches:
raise ar_exceptions.InvalidInputValueError()
location = matches.group("location")
project_id = matches.group("project")
return DockerRepo(project_id, location, matches.group("repo"))
def ParseDockerImagePath(img_path):
"""Validates and parses an image path into a DockerImage or a DockerRepo."""
if not img_path:
return _GetDefaultResources()
resource_val_list = list(filter(None, img_path.split("/")))
try:
docker_repo = _ParseInput(img_path)
except ar_exceptions.InvalidInputValueError:
raise ar_exceptions.InvalidInputValueError(_INVALID_IMAGE_PATH_ERROR)
ar_util.ValidateLocation(docker_repo.location, docker_repo.project)
if len(resource_val_list) == 3:
return docker_repo
elif len(resource_val_list) > 3:
return DockerImage(docker_repo, "/".join(resource_val_list[3:]))
raise ar_exceptions.InvalidInputValueError(_INVALID_IMAGE_PATH_ERROR)
def _ParseDockerImage(img_str, err_msg):
"""Validates and parses an image string into a DockerImage.
Args:
img_str: str, User input docker formatted string.
err_msg: str, Error message to return to user.
Raises:
ar_exceptions.InvalidInputValueError if user input is invalid.
ar_exceptions.UnsupportedLocationError if provided location is invalid.
Returns:
A DockerImage, and a DockerTag or a DockerVersion.
"""
try:
docker_repo = _ParseInput(img_str)
except ar_exceptions.InvalidInputValueError:
raise ar_exceptions.InvalidInputValueError(_INVALID_DOCKER_IMAGE_ERROR)
ar_util.ValidateLocation(docker_repo.location, docker_repo.project)
img_by_digest_match = re.match(DOCKER_IMG_BY_DIGEST_REGEX, img_str)
if img_by_digest_match:
docker_img = DockerImage(docker_repo, img_by_digest_match.group("img"))
return docker_img, DockerVersion(docker_img,
img_by_digest_match.group("digest"))
img_by_tag_match = re.match(DOCKER_IMG_BY_TAG_REGEX, img_str)
if img_by_tag_match:
docker_img = DockerImage(docker_repo, img_by_tag_match.group("img"))
return docker_img, DockerTag(docker_img, img_by_tag_match.group("tag"))
whole_img_match = re.match(DOCKER_IMG_REGEX, img_str)
if whole_img_match:
return DockerImage(docker_repo,
whole_img_match.group("img").strip("/")), None
raise ar_exceptions.InvalidInputValueError(err_msg)
def _ParseDockerTag(tag):
"""Validates and parses a tag string.
Args:
tag: str, User input Docker tag string.
Raises:
ar_exceptions.InvalidInputValueError if user input is invalid.
ar_exceptions.UnsupportedLocationError if provided location is invalid.
Returns:
A DockerImage and a DockerTag.
"""
try:
docker_repo = _ParseInput(tag)
except ar_exceptions.InvalidInputValueError:
raise ar_exceptions.InvalidInputValueError(_INVALID_DOCKER_TAG_ERROR)
img_by_tag_match = re.match(DOCKER_IMG_BY_TAG_REGEX, tag)
if img_by_tag_match:
docker_img = DockerImage(docker_repo, img_by_tag_match.group("img"))
return docker_img, DockerTag(docker_img, img_by_tag_match.group("tag"))
else:
raise ar_exceptions.InvalidInputValueError(_INVALID_DOCKER_TAG_ERROR)
def _GetDockerPackagesAndVersions(docker_repo,
include_tags,
page_size,
order_by,
limit,
is_nested=False):
"""Gets a list of packages with versions for a Docker repository."""
client = ar_requests.GetClient()
messages = ar_requests.GetMessages()
img_list = []
for pkg in ar_requests.ListPackages(
client, messages, docker_repo.GetRepositoryName(), page_size=page_size):
parts = pkg.name.split("/")
if len(parts) != 8:
raise ar_exceptions.ArtifactRegistryError(
"Internal error. Corrupted package name: {}".format(pkg.name))
img = DockerImage(DockerRepo(parts[1], parts[3], parts[5]), parts[7])
img_list.extend(_GetDockerVersions(img, include_tags,
page_size, order_by, limit, is_nested))
return img_list
def _GetDockerNestedVersions(docker_img,
include_tags,
page_size,
order_by,
limit,
is_nested=False):
"""Gets a list of versions for a Docker nested image."""
prefix = docker_img.GetDockerString() + "/"
all_versions = _GetDockerPackagesAndVersions(
docker_img.docker_repo, include_tags,
page_size, order_by, limit, is_nested)
return [
ver for ver in all_versions
if ver["package"].startswith(prefix)
]
def _GetDockerVersions(docker_img,
include_tags,
page_size=None,
order_by=None,
limit=None,
is_nested=False):
"""Gets a list of versions for a Docker image."""
client = ar_requests.GetClient()
messages = ar_requests.GetMessages()
ver_view = (
messages
.ArtifactregistryProjectsLocationsRepositoriesPackagesVersionsListRequest
.ViewValueValuesEnum.BASIC)
if include_tags:
ver_view = (
messages.
ArtifactregistryProjectsLocationsRepositoriesPackagesVersionsListRequest
.ViewValueValuesEnum.FULL)
ver_list = ar_requests.ListVersions(client, messages,
docker_img.GetPackageName(), ver_view,
page_size, order_by, limit)
# If there's no result, the package name might be part of a nested package.
# E.g. us-west1-docker.pkg.dev/fake-project/docker-repo/nested1 in
# us-west1-docker.pkg.dev/fake-project/docker-repo/nested1/nested2/test-image
# Try to get the list of versions through the list of all packages.
if not ver_list and not is_nested:
return _GetDockerNestedVersions(
docker_img, include_tags, page_size, order_by, limit, is_nested=True)
img_list = []
for ver in ver_list:
v = resources.REGISTRY.Parse(
ver.name, collection=_VERSION_COLLECTION_NAME).Name()
img_list.append({
"package": docker_img.GetDockerString(),
"tags": ", ".join([tag.name.split("/")[-1] for tag in ver.relatedTags]),
"version": v,
"createTime": ver.createTime,
"updateTime": ver.updateTime
})
return img_list
def _LogResourcesToDelete(docker_version, docker_tags):
"""Logs user visible messages on resources to be deleted."""
log.status.Print("Digests:\n- " + docker_version.GetDockerString())
if docker_tags:
log.status.Print("\nTags:")
for tag in docker_tags:
log.status.Print("- " + tag.GetDockerString())
def _GetDockerVersionTags(client, messages, docker_version):
"""Gets a list of DockerTag associated with the given DockerVersion."""
tags = ar_requests.ListVersionTags(client, messages,
docker_version.GetPackageName(),
docker_version.GetVersionName())
return [
DockerTag(docker_version.image,
tag.name.split("/")[-1]) for tag in tags
]
def _ValidateDockerRepo(repo_name):
repo = ar_requests.GetRepository(repo_name)
messages = ar_requests.GetMessages()
if repo.format != messages.Repository.FormatValueValuesEnum.DOCKER:
raise ar_exceptions.InvalidInputValueError(
"Invalid repository type {}. The `artifacts docker` command group can "
"only be used on Docker repositories.".format(repo.format))
def _ValidateAndGetDockerVersion(version_or_tag):
"""Validates a version_or_tag and returns the validated DockerVersion object.
Args:
version_or_tag: a docker version or a docker tag.
Returns:
a DockerVersion object.
Raises:
ar_exceptions.InvalidInputValueError if version_or_tag is not valid.
"""
try:
if isinstance(version_or_tag, DockerVersion):
# We have all the information about the docker digest.
# Call the API to make sure it exists.
ar_requests.GetVersion(ar_requests.GetClient(), ar_requests.GetMessages(),
version_or_tag.GetVersionName())
return version_or_tag
elif isinstance(version_or_tag, DockerTag):
digest = ar_requests.GetVersionFromTag(ar_requests.GetClient(),
ar_requests.GetMessages(),
version_or_tag.GetTagName())
docker_version = DockerVersion(version_or_tag.image, digest)
return docker_version
else:
raise ar_exceptions.InvalidInputValueError(_INVALID_DOCKER_IMAGE_ERROR)
except api_exceptions.HttpNotFoundError:
raise ar_exceptions.InvalidInputValueError(_DOCKER_IMAGE_NOT_FOUND)
class DockerRepo(object):
"""Holder for a Docker repository.
A valid Docker repository has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID
Properties:
project: str, The name of cloud project.
location: str, The location of the Docker resource.
repo: str, The name of the repository.
"""
def __init__(self, project_id, location_id, repo_id):
self._project = project_id
self._location = location_id
self._repo = repo_id
@property
def project(self):
return self._project
@property
def location(self):
return self._location
@property
def repo(self):
return self._repo
def __eq__(self, other):
if isinstance(other, DockerRepo):
return self._project == other._project \
and self._location == other._location \
and self._repo == other._repo
return NotImplemented
def GetDockerString(self):
return "{}-docker.pkg.dev/{}/{}".format(self.location, self.project,
self.repo)
def GetRepositoryName(self):
return "projects/{}/locations/{}/repositories/{}".format(
self.project, self.location, self.repo)
class DockerImage(object):
"""Holder for a Docker image resource.
A valid image has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE_PATH
Properties:
project: str, The name of cloud project.
docker_repo: DockerRepo, The Docker repository.
pkg: str, The name of the package.
"""
def __init__(self, docker_repo, pkg_id):
self._docker_repo = docker_repo
self._pkg = pkg_id
@property
def project(self):
return self._docker_repo.project
@property
def docker_repo(self):
return self._docker_repo
@property
def pkg(self):
return self._pkg
def __eq__(self, other):
if isinstance(other, DockerImage):
return self._docker_repo == other._docker_repo and self._pkg == other._pkg
return NotImplemented
def GetPackageName(self): | def GetDockerString(self):
return "{}-docker.pkg.dev/{}/{}/{}".format(
self.docker_repo.location,
self.docker_repo.project,
self.docker_repo.repo,
self.pkg.replace("%2F", "/"))
class DockerTag(object):
"""Holder for a Docker tag.
A valid Docker tag has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE:tag
Properties:
image: DockerImage, The DockerImage containing the tag.
tag: str, The name of the Docker tag.
"""
def __init__(self, docker_img, tag_id):
self._image = docker_img
self._tag = tag_id
@property
def image(self):
return self._image
@property
def tag(self):
return self._tag
def __eq__(self, other):
if isinstance(other, DockerTag):
return self._image == other._image and self._tag == other._tag
return NotImplemented
def GetTagName(self):
return "{}/tags/{}".format(self.image.GetPackageName(), self.tag)
def GetPackageName(self):
return self.image.GetPackageName()
def GetDockerString(self):
return "{}:{}".format(self.image.GetDockerString(), self.tag)
class DockerVersion(object):
"""Holder for a Docker version.
A valid Docker version has the format of
LOCATION-docker.pkg.dev/PROJECT-ID/REPOSITORY-ID/IMAGE@sha256:digest
Properties:
image: DockerImage, The DockerImage containing the tag.
digest: str, The name of the Docker digest.
project: str, the project this image belongs to.
"""
def __init__(self, docker_img, digest):
self._image = docker_img
self._digest = digest
@property
def image(self):
return self._image
@property
def digest(self):
return self._digest
@property
def project(self):
return self._image.docker_repo.project
def __eq__(self, other):
if isinstance(other, DockerVersion):
return self._image == other._image and self._digest == other._digest
return NotImplemented
def GetVersionName(self):
return "{}/versions/{}".format(self.image.GetPackageName(), self.digest)
def GetPackageName(self):
return self.image.GetPackageName()
def GetDockerString(self):
return "{}@{}".format(self.image.GetDockerString(), self.digest)
def GetDockerImages(resource, args):
"""Gets Docker images."""
limit = args.limit
# If filter is set, we leave limiting to gcloud SDK.
if args.filter is not None:
limit = None
order_by = common_args.ParseSortByArg(args.sort_by)
# Multi-ordering is not supported yet on backend.
if order_by is not None:
if "," in order_by:
order_by = None
limit = None
if isinstance(resource, DockerRepo):
_ValidateDockerRepo(resource.GetRepositoryName())
log.status.Print(
"Listing items under project {}, location {}, repository {}.\n".format(
resource.project, resource.location, resource.repo))
return _GetDockerPackagesAndVersions(resource, args.include_tags,
args.page_size, order_by, limit)
elif isinstance(resource, DockerImage):
_ValidateDockerRepo(resource.docker_repo.GetRepositoryName())
log.status.Print(
"Listing items under project {}, location {}, repository {}.\n".format(
resource.docker_repo.project, resource.docker_repo.location,
resource.docker_repo.repo))
return _GetDockerVersions(resource, args.include_tags,
args.page_size, order_by, limit)
return []
def WaitForOperation(operation, message):
"""Waits for the given google.longrunning.Operation to complete.
Args:
operation: The operation to poll.
message: String to display for default progress_tracker.
Raises:
apitools.base.py.HttpError: if the request returns an HTTP error
"""
op_service = ar_requests.GetClient().projects_locations_operations
op_resource = resources.REGISTRY.ParseRelativeName(
operation.name,
collection="artifactregistry.projects.locations.operations")
poller = waiter.CloudOperationPollerNoResources(op_service)
waiter.WaitFor(poller, op_resource, message)
def DescribeDockerImage(args):
"""Retrieves information about a docker image based on the fully-qualified name.
Args:
args: user input arguments.
Returns:
A dictionary of information about the given docker image.
"""
image, version_or_tag = _ParseDockerImage(args.IMAGE, _INVALID_IMAGE_ERROR)
_ValidateDockerRepo(image.docker_repo.GetRepositoryName())
docker_version = _ValidateAndGetDockerVersion(version_or_tag)
result = {}
result["image_summary"] = {
"digest":
docker_version.digest,
"fully_qualified_digest":
docker_version.GetDockerString(),
"registry":
"{}-docker.pkg.dev".format(docker_version.image.docker_repo.location),
"repository":
docker_version.image.docker_repo.repo,
}
metadata = ca_util.GetContainerAnalysisMetadata(docker_version, args)
result.update(metadata.ImagesDescribeView())
return result
def DeleteDockerImage(args):
"""Deletes a Docker digest or image.
If input is an image, delete the image along with its resources.
If input is an image identified by digest, delete the digest.
If input is an image identified by tag, delete the digest and the tag.
If --delete-tags is specified, delete all tags associated with the image
digest.
Args:
args: user input arguments.
Returns:
The long-running operation from DeletePackage API call.
"""
image, version_or_tag = _ParseDockerImage(args.IMAGE, _INVALID_IMAGE_ERROR)
_ValidateDockerRepo(image.docker_repo.GetRepositoryName())
client = ar_requests.GetClient()
messages = ar_requests.GetMessages()
if not version_or_tag:
console_io.PromptContinue(
message="\nThis operation will delete all tags and images for " +
image.GetDockerString() + ".",
cancel_on_no=True)
return ar_requests.DeletePackage(client, messages, image.GetPackageName())
else:
provided_tags = []
docker_version = version_or_tag
if isinstance(version_or_tag, DockerTag):
docker_version = DockerVersion(
version_or_tag.image,
ar_requests.GetVersionFromTag(client, messages,
version_or_tag.GetTagName()))
provided_tags.append(version_or_tag)
existing_tags = _GetDockerVersionTags(client, messages, docker_version)
if not args.delete_tags and existing_tags != provided_tags:
raise ar_exceptions.ArtifactRegistryError(
"Cannot delete image {} because it is tagged. "
"Existing tags are:\n- {}".format(
args.IMAGE,
"\n- ".join(tag.GetDockerString() for tag in existing_tags)))
_LogResourcesToDelete(docker_version, existing_tags)
console_io.PromptContinue(
message="\nThis operation will delete the above resources.",
cancel_on_no=True)
for tag in existing_tags:
ar_requests.DeleteTag(client, messages, tag.GetTagName())
return ar_requests.DeleteVersion(client, messages,
docker_version.GetVersionName())
def GetDockerImage(image_url):
"""Gets a Docker image.
Args:
image_url (str): path to a Docker image.
Returns:
package: Docker image package
Throws:
HttpNotFoundError: if repo or image path are invalid
"""
image, _ = _ParseDockerImage(image_url, _INVALID_IMAGE_ERROR)
_ValidateDockerRepo(image.docker_repo.GetRepositoryName())
return ar_requests.GetPackage(image.GetPackageName())
def AddDockerTag(args):
"""Adds a Docker tag."""
src_image, version_or_tag = _ParseDockerImage(args.DOCKER_IMAGE,
_INVALID_DOCKER_IMAGE_ERROR)
if version_or_tag is None:
raise ar_exceptions.InvalidInputValueError(_INVALID_DOCKER_IMAGE_ERROR)
dest_image, tag = _ParseDockerTag(args.DOCKER_TAG)
if src_image.GetPackageName() != dest_image.GetPackageName():
raise ar_exceptions.InvalidInputValueError(
"Image {}\ndoes not match image {}".format(
src_image.GetDockerString(), dest_image.GetDockerString()))
_ValidateDockerRepo(src_image.docker_repo.GetRepositoryName())
client = ar_requests.GetClient()
messages = ar_requests.GetMessages()
docker_version = version_or_tag
if isinstance(version_or_tag, DockerTag):
docker_version = DockerVersion(
version_or_tag.image,
ar_requests.GetVersionFromTag(client, messages,
version_or_tag.GetTagName()))
try:
ar_requests.GetTag(client, messages, tag.GetTagName())
except api_exceptions.HttpNotFoundError:
ar_requests.CreateDockerTag(client, messages, tag, docker_version)
else:
ar_requests.DeleteTag(client, messages, tag.GetTagName())
ar_requests.CreateDockerTag(client, messages, tag, docker_version)
log.status.Print("Added tag [{}] to image [{}].".format(
tag.GetDockerString(), args.DOCKER_IMAGE))
def DeleteDockerTag(args):
"""Deletes a Docker tag."""
img, tag = _ParseDockerTag(args.DOCKER_TAG)
ar_util.ValidateLocation(img.docker_repo.location, img.docker_repo.project)
_ValidateDockerRepo(img.docker_repo.GetRepositoryName())
console_io.PromptContinue(
message="You are about to delete tag [{}]".format(tag.GetDockerString()),
cancel_on_no=True)
ar_requests.DeleteTag(ar_requests.GetClient(), ar_requests.GetMessages(),
tag.GetTagName())
log.status.Print("Deleted tag [{}].".format(tag.GetDockerString()))
def ListDockerTags(args):
"""Lists Docker tags."""
resource = ParseDockerImagePath(args.IMAGE_PATH)
client = ar_requests.GetClient()
messages = ar_requests.GetMessages()
img_list = []
if isinstance(resource, DockerRepo):
_ValidateDockerRepo(resource.GetRepositoryName())
log.status.Print(
"Listing items under project {}, location {}, repository {}.\n".format(
resource.project, resource.location, resource.repo))
for pkg in ar_requests.ListPackages(client, messages,
resource.GetRepositoryName()):
img_list.append(DockerImage(resource, pkg.name.split("/")[-1]))
elif isinstance(resource, DockerImage):
_ValidateDockerRepo(resource.docker_repo.GetRepositoryName())
log.status.Print(
"Listing items under project {}, location {}, repository {}.\n".format(
resource.docker_repo.project, resource.docker_repo.location,
resource.docker_repo.repo))
img_list.append(resource)
tag_list = []
for img in img_list:
for tag in ar_requests.ListTags(client, messages, img.GetPackageName(),
args.page_size):
tag_list.append({
"tag": tag.name,
"image": img.GetDockerString(),
"version": tag.version,
})
return tag_list | return "{}/packages/{}".format(self.docker_repo.GetRepositoryName(),
self.pkg.replace("/", "%2F"))
|
index.tsx | import React from "react";
import { View, ImageBackground, Text } from "react-native";
import giveClasses from "../../assets/images/give-classes-background.png";
import { RectButton } from "react-native-gesture-handler";
import { useNavigation } from "@react-navigation/native";
import styles from "./styles";
| function hendleNavigationBack() {
goBack();
}
return (
<View style={styles.container}>
<ImageBackground
resizeMode="contain"
source={giveClasses}
style={styles.content}
>
<Text style={styles.title}> Quer ser um proffy?</Text>
<Text style={styles.description}>
{" "}
Para começar voce precisa cadastra na nossa plataforma web
</Text>
</ImageBackground>
<RectButton onPress={hendleNavigationBack} style={styles.okButton}>
<Text style={styles.okButtonText}>Tudo bem</Text>
</RectButton>
</View>
);
}
export default GiveClasses; | function GiveClasses() {
const { goBack } = useNavigation();
|
flag.go | // Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package objabi
import (
"bytes"
"flag"
"fmt"
"internal/buildcfg"
"io"
"io/ioutil"
"log"
"os"
"reflect"
"sort"
"strconv"
"strings"
)
func Flagcount(name, usage string, val *int) {
flag.Var((*count)(val), name, usage)
}
func Flagfn1(name, usage string, f func(string)) {
flag.Var(fn1(f), name, usage)
}
func Flagprint(w io.Writer) {
flag.CommandLine.SetOutput(w)
flag.PrintDefaults()
}
func Flagparse(usage func()) {
flag.Usage = usage
os.Args = expandArgs(os.Args)
flag.Parse()
}
// expandArgs expands "response files" arguments in the provided slice.
//
// A "response file" argument starts with '@' and the rest of that
// argument is a filename with CR-or-CRLF-separated arguments. Each
// argument in the named files can also contain response file
// arguments. See Issue 18468.
//
// The returned slice 'out' aliases 'in' iff the input did not contain
// any response file arguments.
//
// TODO: handle relative paths of recursive expansions in different directories?
// Is there a spec for this? Are relative paths allowed?
func expandArgs(in []string) (out []string) {
// out is nil until we see a "@" argument.
for i, s := range in {
if strings.HasPrefix(s, "@") {
if out == nil {
out = make([]string, 0, len(in)*2)
out = append(out, in[:i]...)
}
slurp, err := ioutil.ReadFile(s[1:])
if err != nil {
log.Fatal(err)
}
args := strings.Split(strings.TrimSpace(strings.Replace(string(slurp), "\r", "", -1)), "\n")
for i, arg := range args {
args[i] = DecodeArg(arg)
}
out = append(out, expandArgs(args)...)
} else if out != nil {
out = append(out, s)
}
}
if out == nil {
return in
}
return
}
func AddVersionFlag() {
flag.Var(versionFlag{}, "V", "print version and exit")
}
var buildID string // filled in by linker
type versionFlag struct{}
func (versionFlag) IsBoolFlag() bool { return true }
func (versionFlag) Get() interface{} { return nil }
func (versionFlag) String() string { return "" }
func (versionFlag) Set(s string) error {
name := os.Args[0]
name = name[strings.LastIndex(name, `/`)+1:]
name = name[strings.LastIndex(name, `\`)+1:]
name = strings.TrimSuffix(name, ".exe")
p := ""
if s == "goexperiment" {
// test/run.go uses this to discover the full set of
// experiment tags. Report everything.
p = " X:" + strings.Join(buildcfg.AllExperiments(), ",")
} else {
// If the enabled experiments differ from the defaults,
// include that difference.
if goexperiment := buildcfg.GOEXPERIMENT(); goexperiment != "" {
p = " X:" + goexperiment
}
}
// The go command invokes -V=full to get a unique identifier
// for this tool. It is assumed that the release version is sufficient
// for releases, but during development we include the full
// build ID of the binary, so that if the compiler is changed and
// rebuilt, we notice and rebuild all packages.
if s == "full" {
if strings.HasPrefix(buildcfg.Version, "devel") {
p += " buildID=" + buildID
}
}
fmt.Printf("%s version %s%s\n", name, buildcfg.Version, p)
os.Exit(0)
return nil
}
// count is a flag.Value that is like a flag.Bool and a flag.Int.
// If used as -name, it increments the count, but -name=x sets the count.
// Used for verbose flag -v.
type count int
func (c *count) String() string {
return fmt.Sprint(int(*c))
}
func (c *count) Set(s string) error {
switch s {
case "true":
*c++
case "false":
*c = 0
default:
n, err := strconv.Atoi(s)
if err != nil {
return fmt.Errorf("invalid count %q", s)
}
*c = count(n)
}
return nil
}
func (c *count) Get() interface{} {
return int(*c)
}
func (c *count) IsBoolFlag() bool {
return true
}
func (c *count) IsCountFlag() bool {
return true
}
type fn1 func(string)
func (f fn1) Set(s string) error {
f(s)
return nil
}
func (f fn1) String() string { return "" }
// DecodeArg decodes an argument.
//
// This function is public for testing with the parallel encoder.
func DecodeArg(arg string) string {
// If no encoding, fastpath out.
if !strings.ContainsAny(arg, "\\\n") {
return arg
}
// We can't use strings.Builder as this must work at bootstrap.
var b bytes.Buffer
var wasBS bool
for _, r := range arg {
if wasBS {
switch r {
case '\\':
b.WriteByte('\\')
case 'n':
b.WriteByte('\n')
default:
// This shouldn't happen. The only backslashes that reach here
// should encode '\n' and '\\' exclusively.
panic("badly formatted input")
}
} else if r == '\\' {
wasBS = true
continue
} else {
b.WriteRune(r)
}
wasBS = false
}
return b.String()
}
type debugField struct {
name string
help string
val interface{} // *int or *string
}
type DebugFlag struct {
tab map[string]debugField
any *bool
debugSSA DebugSSA
}
// A DebugSSA function is called to set a -d ssa/... option.
// If nil, those options are reported as invalid options.
// If DebugSSA returns a non-empty string, that text is reported as a compiler error.
// If phase is "help", it should print usage information and terminate the process.
type DebugSSA func(phase, flag string, val int, valString string) string
// NewDebugFlag constructs a DebugFlag for the fields of debug, which
// must be a pointer to a struct.
//
// Each field of *debug is a different value, named for the lower-case of the field name.
// Each field must be an int or string and must have a `help` struct tag.
// There may be an "Any bool" field, which will be set if any debug flags are set.
//
// The returned flag takes a comma-separated list of settings.
// Each setting is name=value; for ints, name is short for name=1.
//
// If debugSSA is non-nil, any debug flags of the form ssa/... will be
// passed to debugSSA for processing.
func NewDebugFlag(debug interface{}, debugSSA DebugSSA) *DebugFlag {
flag := &DebugFlag{
tab: make(map[string]debugField),
debugSSA: debugSSA,
}
v := reflect.ValueOf(debug).Elem()
t := v.Type()
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
ptr := v.Field(i).Addr().Interface()
if f.Name == "Any" {
switch ptr := ptr.(type) {
default:
panic("debug.Any must have type bool")
case *bool:
flag.any = ptr
}
continue
}
name := strings.ToLower(f.Name)
help := f.Tag.Get("help")
if help == "" {
panic(fmt.Sprintf("debug.%s is missing help text", f.Name))
}
switch ptr.(type) {
default:
panic(fmt.Sprintf("debug.%s has invalid type %v (must be int or string)", f.Name, f.Type))
case *int, *string:
// ok
}
flag.tab[name] = debugField{name, help, ptr}
}
return flag
}
func (f *DebugFlag) Set(debugstr string) error {
if debugstr == "" {
return nil
}
if f.any != nil {
*f.any = true
}
for _, name := range strings.Split(debugstr, ",") {
if name == "" {
continue
}
// display help about the debug option itself and quit
if name == "help" |
val, valstring, haveInt := 1, "", true
if i := strings.IndexAny(name, "=:"); i >= 0 {
var err error
name, valstring = name[:i], name[i+1:]
val, err = strconv.Atoi(valstring)
if err != nil {
val, haveInt = 1, false
}
}
if t, ok := f.tab[name]; ok {
switch vp := t.val.(type) {
case nil:
// Ignore
case *string:
*vp = valstring
case *int:
if !haveInt {
log.Fatalf("invalid debug value %v", name)
}
*vp = val
default:
panic("bad debugtab type")
}
} else if f.debugSSA != nil && strings.HasPrefix(name, "ssa/") {
// expect form ssa/phase/flag
// e.g. -d=ssa/generic_cse/time
// _ in phase name also matches space
phase := name[4:]
flag := "debug" // default flag is debug
if i := strings.Index(phase, "/"); i >= 0 {
flag = phase[i+1:]
phase = phase[:i]
}
err := f.debugSSA(phase, flag, val, valstring)
if err != "" {
log.Fatalf(err)
}
} else {
return fmt.Errorf("unknown debug key %s\n", name)
}
}
return nil
}
const debugHelpHeader = `usage: -d arg[,arg]* and arg is <key>[=<value>]
<key> is one of:
`
func (f *DebugFlag) String() string {
return ""
}
| {
fmt.Print(debugHelpHeader)
maxLen, names := 0, []string{}
if f.debugSSA != nil {
maxLen = len("ssa/help")
}
for name := range f.tab {
if len(name) > maxLen {
maxLen = len(name)
}
names = append(names, name)
}
sort.Strings(names)
// Indent multi-line help messages.
nl := fmt.Sprintf("\n\t%-*s\t", maxLen, "")
for _, name := range names {
help := f.tab[name].help
fmt.Printf("\t%-*s\t%s\n", maxLen, name, strings.Replace(help, "\n", nl, -1))
}
if f.debugSSA != nil {
// ssa options have their own help
fmt.Printf("\t%-*s\t%s\n", maxLen, "ssa/help", "print help about SSA debugging")
}
os.Exit(0)
} |
Clock.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _TimeDisplay = require('./TimeDisplay');
var _TimeDisplay2 = _interopRequireDefault(_TimeDisplay);
var _ClockHours = require('./ClockHours');
var _ClockHours2 = _interopRequireDefault(_ClockHours);
var _ClockMinutes = require('./ClockMinutes');
var _ClockMinutes2 = _interopRequireDefault(_ClockMinutes);
var _getMuiTheme = require('../styles/getMuiTheme');
var _getMuiTheme2 = _interopRequireDefault(_getMuiTheme);
function | (obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var Clock = _react2.default.createClass({
displayName: 'Clock',
propTypes: {
format: _react2.default.PropTypes.oneOf(['ampm', '24hr']),
initialTime: _react2.default.PropTypes.object,
isActive: _react2.default.PropTypes.bool,
mode: _react2.default.PropTypes.oneOf(['hour', 'minute']),
onChangeHours: _react2.default.PropTypes.func,
onChangeMinutes: _react2.default.PropTypes.func
},
contextTypes: {
muiTheme: _react2.default.PropTypes.object
},
getDefaultProps: function getDefaultProps() {
return {
initialTime: new Date()
};
},
getInitialState: function getInitialState() {
return {
muiTheme: this.context.muiTheme || (0, _getMuiTheme2.default)(),
selectedTime: this.props.initialTime || new Date(),
mode: 'hour'
};
},
componentWillReceiveProps: function componentWillReceiveProps(nextProps, nextContext) {
this.setState({
muiTheme: nextContext.muiTheme || this.state.muiTheme,
selectedTime: nextProps.initialTime || new Date()
});
},
_setMode: function _setMode(mode) {
var _this = this;
setTimeout(function () {
_this.setState({
mode: mode
});
}, 100);
},
handleSelectAffix: function handleSelectAffix(affix) {
if (affix === this._getAffix()) return;
var hours = this.state.selectedTime.getHours();
if (affix === 'am') {
this.handleChangeHours(hours - 12, affix);
return;
}
this.handleChangeHours(hours + 12, affix);
},
_getAffix: function _getAffix() {
if (this.props.format !== 'ampm') return '';
var hours = this.state.selectedTime.getHours();
if (hours < 12) {
return 'am';
}
return 'pm';
},
handleChangeHours: function handleChangeHours(hours, finished) {
var _this2 = this;
var time = new Date(this.state.selectedTime);
var affix = void 0;
if (typeof finished === 'string') {
affix = finished;
finished = undefined;
}
if (!affix) {
affix = this._getAffix();
}
if (affix === 'pm' && hours < 12) {
hours += 12;
}
time.setHours(hours);
this.setState({
selectedTime: time
});
var onChangeHours = this.props.onChangeHours;
if (finished) {
setTimeout(function () {
_this2.setState({
mode: 'minute'
});
if (typeof onChangeHours === 'function') {
onChangeHours(time);
}
}, 100);
}
},
handleChangeMinutes: function handleChangeMinutes(minutes) {
var time = new Date(this.state.selectedTime);
time.setMinutes(minutes);
this.setState({
selectedTime: time
});
var onChangeMinutes = this.props.onChangeMinutes;
if (typeof onChangeMinutes === 'function') {
setTimeout(function () {
onChangeMinutes(time);
}, 0);
}
},
getSelectedTime: function getSelectedTime() {
return this.state.selectedTime;
},
render: function render() {
var clock = null;
var prepareStyles = this.state.muiTheme.prepareStyles;
var styles = {
root: {},
container: {
height: 280,
padding: 10,
position: 'relative'
},
circle: {
position: 'absolute',
top: 20,
width: 260,
height: 260,
borderRadius: '100%',
backgroundColor: this.state.muiTheme.timePicker.clockCircleColor
}
};
if (this.state.mode === 'hour') {
clock = _react2.default.createElement(_ClockHours2.default, { key: 'hours',
format: this.props.format,
onChange: this.handleChangeHours,
initialHours: this.state.selectedTime.getHours()
});
} else {
clock = _react2.default.createElement(_ClockMinutes2.default, { key: 'minutes',
onChange: this.handleChangeMinutes,
initialMinutes: this.state.selectedTime.getMinutes()
});
}
return _react2.default.createElement(
'div',
{ style: prepareStyles(styles.root) },
_react2.default.createElement(_TimeDisplay2.default, {
selectedTime: this.state.selectedTime,
mode: this.state.mode,
format: this.props.format,
affix: this._getAffix(),
onSelectAffix: this.handleSelectAffix,
onSelectHour: this._setMode.bind(this, 'hour'),
onSelectMin: this._setMode.bind(this, 'minute')
}),
_react2.default.createElement(
'div',
{ style: prepareStyles(styles.container) },
_react2.default.createElement('div', { style: prepareStyles(styles.circle) }),
clock
)
);
}
});
exports.default = Clock; | _interopRequireDefault |
test_relay.py | #! /usr/bin/env python
# encoding: utf-8
import sys
sys.path.append('..')
sys.path.append('mock')
import unittest
from mock import Mock
import simulator.relay
class TestPacket(unittest.TestCase):
"""Class for testing Relay."""
def test_instantiation(self):
|
if __name__ == '__main__':
unittest.main()
| """Test instantiation."""
id = "test_id"
stats = {}
decoder = Mock(name="decoder_object")
decoder.block_size = Mock(return_value=100)
c = simulator.relay.Relay(id, stats, decoder)
self.assertEqual(c.sender.id, id)
self.assertEqual(c.receiver.id, id)
self.assertEqual(c.receiver.decoder, decoder) |
DiscoverTransactionLink.test.tsx | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
| import { getDiscoverQuery } from '../DiscoverTransactionLink';
function getMockTransaction() {
return {
transaction: {
id: '8b60bd32ecc6e150'
},
trace: {
id: '8b60bd32ecc6e1506735a8b6cfcf175c'
}
} as Transaction;
}
describe('getDiscoverQuery', () => {
it('should return the correct query params object', () => {
const transaction = getMockTransaction();
const result = getDiscoverQuery(transaction);
expect(result).toMatchSnapshot();
});
}); | import 'jest-styled-components';
// @ts-ignore
import configureStore from 'x-pack/plugins/apm/public/store/config/configureStore';
import { Transaction } from 'x-pack/plugins/apm/typings/es_schemas/Transaction'; |
middleware.rs | use crate::response::CookieEvent;
use crate::utils::BoxFuture;
use crate::{Middleware, Next, Request};
use crate::http::cookies::{Cookie, CookieJar, Delta};
use crate::http::headers;
use std::sync::{Arc, RwLock};
/// A middleware for making cookie data available in requests.
///
/// # Examples
///
/// ```
/// # use tide::{Request, Response, StatusCode};
/// # use tide::http::cookies::Cookie;
/// let mut app = tide::Server::new();
/// app.at("/get").get(|cx: Request<()>| async move { Ok(cx.cookie("testCookie").unwrap().value().to_string()) });
/// app.at("/set").get(|_| async {
/// let mut res = Response::new(StatusCode::Ok);
/// res.insert_cookie(Cookie::new("testCookie", "NewCookieValue"));
/// Ok(res)
/// });
/// ```
#[derive(Debug, Clone, Default)]
pub(crate) struct CookiesMiddleware;
impl CookiesMiddleware {
/// Creates a new `CookiesMiddleware`.
pub fn new() -> Self {
Self::default()
}
}
impl<State: Send + Sync + 'static> Middleware<State> for CookiesMiddleware {
fn handle<'a>(
&'a self,
mut ctx: Request<State>,
next: Next<'a, State>,
) -> BoxFuture<'a, crate::Result> {
Box::pin(async move {
let cookie_jar = if let Some(cookie_data) = ctx.ext::<CookieData>() {
cookie_data.content.clone()
} else {
let cookie_data = CookieData::from_request(&ctx);
// no cookie data in ext context, so we try to create it
let content = cookie_data.content.clone();
ctx.set_ext(cookie_data);
content
};
let mut res = next.run(ctx).await?;
// Don't do anything if there are no cookies.
if res.cookie_events.is_empty() {
return Ok(res);
}
let jar = &mut *cookie_jar.write().unwrap();
// add modifications from response to original
for cookie in res.cookie_events.drain(..) {
match cookie {
CookieEvent::Added(cookie) => jar.add(cookie.clone()),
CookieEvent::Removed(cookie) => jar.remove(cookie.clone()),
}
}
// iterate over added and removed cookies
for cookie in jar.delta() {
let encoded_cookie = cookie.encoded().to_string();
res.append_header(headers::SET_COOKIE, encoded_cookie);
}
Ok(res)
})
}
}
#[derive(Debug, Default, Clone)]
pub(crate) struct CookieData {
pub(crate) content: Arc<RwLock<LazyJar>>,
}
#[derive(Debug, Default, Clone)]
/// Wrapper around `CookieJar`, that initializes only when actually used.
pub(crate) struct LazyJar(Option<CookieJar>);
impl LazyJar {
fn add(&mut self, cookie: Cookie<'static>) {
self.get_jar().add(cookie)
}
fn remove(&mut self, cookie: Cookie<'static>) {
self.get_jar().remove(cookie)
}
fn delta(&mut self) -> Delta<'_> {
self.get_jar().delta()
}
pub(crate) fn get(&self, name: &str) -> Option<&Cookie<'static>> {
if let Some(jar) = &self.0 {
return jar.get(name);
}
None
}
fn get_jar(&mut self) -> &mut CookieJar |
}
impl CookieData {
pub(crate) fn from_request<S>(req: &Request<S>) -> Self {
let jar = if let Some(cookie_headers) = req.header(&headers::COOKIE) {
let mut jar = CookieJar::new();
for cookie_header in cookie_headers {
// spec says there should be only one, so this is permissive
for pair in cookie_header.as_str().split(';') {
if let Ok(cookie) = Cookie::parse_encoded(String::from(pair)) {
jar.add_original(cookie);
}
}
}
LazyJar(Some(jar))
} else {
LazyJar::default()
};
CookieData {
content: Arc::new(RwLock::new(jar)),
}
}
}
| {
if self.0.is_none() {
self.0 = Some(CookieJar::new());
}
self.0.as_mut().unwrap()
} |
blur-off.js | 'use strict';
Object.defineProperty(exports, "__esModule", { |
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _pure = require('recompose/pure');
var _pure2 = _interopRequireDefault(_pure);
var _svgIcon = require('../../svg-icon');
var _svgIcon2 = _interopRequireDefault(_svgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var ImageBlurOff = function ImageBlurOff(props) {
return _react2.default.createElement(
_svgIcon2.default,
props,
_react2.default.createElement('path', { d: 'M14 7c.55 0 1-.45 1-1s-.45-1-1-1-1 .45-1 1 .45 1 1 1zm-.2 4.48l.2.02c.83 0 1.5-.67 1.5-1.5s-.67-1.5-1.5-1.5-1.5.67-1.5 1.5l.02.2c.09.67.61 1.19 1.28 1.28zM14 3.5c.28 0 .5-.22.5-.5s-.22-.5-.5-.5-.5.22-.5.5.22.5.5.5zm-4 0c.28 0 .5-.22.5-.5s-.22-.5-.5-.5-.5.22-.5.5.22.5.5.5zm11 7c.28 0 .5-.22.5-.5s-.22-.5-.5-.5-.5.22-.5.5.22.5.5.5zM10 7c.55 0 1-.45 1-1s-.45-1-1-1-1 .45-1 1 .45 1 1 1zm8 8c.55 0 1-.45 1-1s-.45-1-1-1-1 .45-1 1 .45 1 1 1zm0-4c.55 0 1-.45 1-1s-.45-1-1-1-1 .45-1 1 .45 1 1 1zm0-4c.55 0 1-.45 1-1s-.45-1-1-1-1 .45-1 1 .45 1 1 1zm-4 13.5c-.28 0-.5.22-.5.5s.22.5.5.5.5-.22.5-.5-.22-.5-.5-.5zM2.5 5.27l3.78 3.78L6 9c-.55 0-1 .45-1 1s.45 1 1 1 1-.45 1-1c0-.1-.03-.19-.06-.28l2.81 2.81c-.71.11-1.25.73-1.25 1.47 0 .83.67 1.5 1.5 1.5.74 0 1.36-.54 1.47-1.25l2.81 2.81c-.09-.03-.18-.06-.28-.06-.55 0-1 .45-1 1s.45 1 1 1 1-.45 1-1c0-.1-.03-.19-.06-.28l3.78 3.78L20 20.23 3.77 4 2.5 5.27zM10 17c-.55 0-1 .45-1 1s.45 1 1 1 1-.45 1-1-.45-1-1-1zm11-3.5c-.28 0-.5.22-.5.5s.22.5.5.5.5-.22.5-.5-.22-.5-.5-.5zM6 13c-.55 0-1 .45-1 1s.45 1 1 1 1-.45 1-1-.45-1-1-1zM3 9.5c-.28 0-.5.22-.5.5s.22.5.5.5.5-.22.5-.5-.22-.5-.5-.5zm7 11c-.28 0-.5.22-.5.5s.22.5.5.5.5-.22.5-.5-.22-.5-.5-.5zM6 17c-.55 0-1 .45-1 1s.45 1 1 1 1-.45 1-1-.45-1-1-1zm-3-3.5c-.28 0-.5.22-.5.5s.22.5.5.5.5-.22.5-.5-.22-.5-.5-.5z' })
);
};
ImageBlurOff = (0, _pure2.default)(ImageBlurOff);
ImageBlurOff.displayName = 'ImageBlurOff';
exports.default = ImageBlurOff;
module.exports = exports['default']; | value: true
}); |
userSlice.js | import { createSlice } from '@reduxjs/toolkit'
const initialState = {
name: "",
email:"",
locationInfo:{},
ip:"",
country:"",
flag: " ",
city: "",
}
export const userSlice = createSlice({
name: 'user',
initialState, |
changeName: (state, action) => {
state.name = action.payload
},
changeEmail: (state, action) => {
state.email = action.payload
},
setUser: (state, action) => {
state.email = action.payload.email
state.name = action.payload.name
},
setLocationInfo: (state, action) => {
state.ip = action.payload.ip;
state.country = action.payload.country_name;
state.flag = action.payload.flag;
state.city = action.payload.city;
}
},
})
// Action creators are generated for each case reducer function
export const { changeName, changeEmail, setUser , setLocationInfo } = userSlice.actions
export default userSlice.reducer | reducers: { |
create_paymetric_token_response.go | // Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"strconv"
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// CreatePaymetricTokenResponse CreatePaymetricTokenResponse
//
// swagger:model CreatePaymetricTokenResponse
type CreatePaymetricTokenResponse struct {
// A list of errors that occurred.
Errors []*APIError `json:"_errors"`
// Indicates if there are errors.
HasErrors bool `json:"_hasErrors,omitempty"`
// A list of resource links
Links []*APILink `json:"_links"`
// A list of log entries detailing what happened during the request. Ideally only used during development or troubleshooting as this can be quite verbose.
Logs []string `json:"_logs"`
// Response information from the processor.
Processor *Processor `json:"_processor,omitempty"`
// The type of object held in the result.
Type string `json:"_type,omitempty"`
// A list of warnings that occurred.
Warnings []*APIWarning `json:"_warnings"`
// The card account number.
AccountNumber string `json:"accountNumber,omitempty"`
| CardHolderName string `json:"cardHolderName,omitempty"`
// The card logo. Possible values are: Visa, Mastercard, Discover, Amex, Diners Club, JCB, Carte Blanche, Other.
CardLogo string `json:"cardLogo,omitempty"`
// Description of how card was entered. This value is only provided for a CreatePaymetricTokenRequest NOT for a CreatePaymetricTokenWithTransIdRequest.
EntryMode string `json:"entryMode,omitempty"`
// The card's expiration month
ExpirationMonth string `json:"expirationMonth,omitempty"`
// The card's expiration year
ExpirationYear string `json:"expirationYear,omitempty"`
// A boolean value indicating whether triPOS is disconnected from the host.
IsOffline bool `json:"isOffline,omitempty"`
// The merchant ID used to process the transaction.
MerchantID string `json:"merchantId,omitempty"`
// The Token ID.
TokenID string `json:"tokenId,omitempty"`
// The Token Provider.
TokenProvider string `json:"tokenProvider,omitempty"`
// Transaction date/time in ISO8601 format
TransactionDateTime string `json:"transactionDateTime,omitempty"`
}
// Validate validates this create paymetric token response
func (m *CreatePaymetricTokenResponse) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateErrors(formats); err != nil {
res = append(res, err)
}
if err := m.validateLinks(formats); err != nil {
res = append(res, err)
}
if err := m.validateProcessor(formats); err != nil {
res = append(res, err)
}
if err := m.validateWarnings(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *CreatePaymetricTokenResponse) validateErrors(formats strfmt.Registry) error {
if swag.IsZero(m.Errors) { // not required
return nil
}
for i := 0; i < len(m.Errors); i++ {
if swag.IsZero(m.Errors[i]) { // not required
continue
}
if m.Errors[i] != nil {
if err := m.Errors[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_errors" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_errors" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
func (m *CreatePaymetricTokenResponse) validateLinks(formats strfmt.Registry) error {
if swag.IsZero(m.Links) { // not required
return nil
}
for i := 0; i < len(m.Links); i++ {
if swag.IsZero(m.Links[i]) { // not required
continue
}
if m.Links[i] != nil {
if err := m.Links[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_links" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_links" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
func (m *CreatePaymetricTokenResponse) validateProcessor(formats strfmt.Registry) error {
if swag.IsZero(m.Processor) { // not required
return nil
}
if m.Processor != nil {
if err := m.Processor.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_processor")
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_processor")
}
return err
}
}
return nil
}
func (m *CreatePaymetricTokenResponse) validateWarnings(formats strfmt.Registry) error {
if swag.IsZero(m.Warnings) { // not required
return nil
}
for i := 0; i < len(m.Warnings); i++ {
if swag.IsZero(m.Warnings[i]) { // not required
continue
}
if m.Warnings[i] != nil {
if err := m.Warnings[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_warnings" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_warnings" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
// ContextValidate validate this create paymetric token response based on the context it is used
func (m *CreatePaymetricTokenResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
if err := m.contextValidateErrors(ctx, formats); err != nil {
res = append(res, err)
}
if err := m.contextValidateLinks(ctx, formats); err != nil {
res = append(res, err)
}
if err := m.contextValidateProcessor(ctx, formats); err != nil {
res = append(res, err)
}
if err := m.contextValidateWarnings(ctx, formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *CreatePaymetricTokenResponse) contextValidateErrors(ctx context.Context, formats strfmt.Registry) error {
for i := 0; i < len(m.Errors); i++ {
if m.Errors[i] != nil {
if err := m.Errors[i].ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_errors" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_errors" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
func (m *CreatePaymetricTokenResponse) contextValidateLinks(ctx context.Context, formats strfmt.Registry) error {
for i := 0; i < len(m.Links); i++ {
if m.Links[i] != nil {
if err := m.Links[i].ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_links" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_links" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
func (m *CreatePaymetricTokenResponse) contextValidateProcessor(ctx context.Context, formats strfmt.Registry) error {
if m.Processor != nil {
if err := m.Processor.ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_processor")
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_processor")
}
return err
}
}
return nil
}
func (m *CreatePaymetricTokenResponse) contextValidateWarnings(ctx context.Context, formats strfmt.Registry) error {
for i := 0; i < len(m.Warnings); i++ {
if m.Warnings[i] != nil {
if err := m.Warnings[i].ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("_warnings" + "." + strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("_warnings" + "." + strconv.Itoa(i))
}
return err
}
}
}
return nil
}
// MarshalBinary interface implementation
func (m *CreatePaymetricTokenResponse) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *CreatePaymetricTokenResponse) UnmarshalBinary(b []byte) error {
var res CreatePaymetricTokenResponse
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
} | // The card bin value. This value is only provided for a CreatePaymetricTokenRequest NOT for a CreatePaymetricTokenWithTransIdRequest.
BinValue string `json:"binValue,omitempty"`
// The card holder name on the card. This value is only provided for a CreatePaymetricTokenRequest NOT for a CreatePaymetricTokenWithTransIdRequest. If the card is keyed for a CreatePaymetricTokenRequest this value will be empty. |
_app.tsx | import React, { FC } from 'react';
import { AppProps } from 'next/app';
import Head from 'next/head';
import { RecoilRoot } from 'recoil';
import { useTranslation } from 'react-i18next';
import { ThemeProvider } from 'styled-components';
import WithAppContainers from 'containers';
import theme from 'styles/theme';
import Layout from 'sections/shared/Layout';
import AppLayout from 'sections/shared/Layout/AppLayout';
import { MediaContextProvider } from 'styles/media';
import { QueryClient, QueryClientProvider } from 'react-query';
import { ReactQueryDevtools } from 'react-query/devtools';
import { DEFAULT_REQUEST_REFRESH_INTERVAL } from 'constants/defaults';
import { SynthetixQueryContextProvider, createQueryContext } from '@synthetixio/queries';
import SystemStatus from 'sections/shared/SystemStatus';
import 'styles/main.css';
import '@reach/dialog/styles.css';
import 'slick-carousel/slick/slick.css';
import 'slick-carousel/slick/slick-theme.css';
import 'tippy.js/dist/tippy.css';
import '../i18n';
import Connector from 'containers/Connector';
const queryClient = new QueryClient({
defaultOptions: {
queries: {
refetchInterval: DEFAULT_REQUEST_REFRESH_INTERVAL,
},
},
});
const InnerApp: FC<AppProps> = ({ Component, pageProps }) => {
const { provider, signer, network } = Connector.useContainer();
return (
<>
<SynthetixQueryContextProvider
value={
provider && network
? createQueryContext({
provider: provider,
signer: signer || undefined,
networkId: network!.id,
})
: createQueryContext({
networkId: null,
})
}
>
<Layout>
<SystemStatus>
<AppLayout>
<Component {...pageProps} />
</AppLayout>
</SystemStatus>
</Layout>
<ReactQueryDevtools />
</SynthetixQueryContextProvider>
</>
);
};
const App: FC<AppProps> = (props) => {
const { t } = useTranslation();
return (
<>
<Head>
<meta charSet="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="description" content={t('meta.description')} />
{/* open graph */}
<meta property="og:url" content="https://staking.synthetix.io/" />
<meta property="og:type" content="website" />
<meta property="og:title" content={t('meta.og.title')} />
<meta property="og:description" content={t('meta.description')} />
<meta property="og:image" content="/images/staking-facebook.jpg" />
<meta property="og:image:alt" content={t('meta.og.title')} />
<meta property="og:site_name" content={t('meta.og.site-name')} />
{/* twitter */}
<meta name="twitter:card" content="summary_large_image" />
<meta name="twitter:site" content="@synthetix_io" />
<meta name="twitter:creator" content="@synthetix_io" />
<meta name="twitter:image" content="/images/staking-twitter.jpg" />
<meta name="twitter:url" content="https://staking.synthetix.io" />
<link rel="icon" href="/images/favicon.ico" /> | <script
dangerouslySetInnerHTML={{
__html: `
var _paq = window._paq = window._paq || [];
/* tracker methods like "setCustomDimension" should be called before "trackPageView" */
_paq.push(['trackPageView']);
_paq.push(['enableLinkTracking']);
(function() {
var u="https://analytics.synthetix.io/";
_paq.push(['setTrackerUrl', u+'matomo.php']);
_paq.push(['setSiteId', '3']);
var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
g.async=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s);
})();
`,
}}
/>
</Head>
<ThemeProvider theme={theme}>
<RecoilRoot>
<QueryClientProvider client={queryClient} contextSharing={true}>
<WithAppContainers>
<MediaContextProvider>
<InnerApp {...props} />
</MediaContextProvider>
</WithAppContainers>
</QueryClientProvider>
</RecoilRoot>
</ThemeProvider>
</>
);
};
export default App; |
{/* matomo */} |
ddl.rs | // Copyright 2018 sqlparser-rs contributors. All rights reserved.
// Copyright Materialize, Inc. and contributors. All rights reserved.
//
// This file is derived from the sqlparser-rs project, available at
// https://github.com/andygrove/sqlparser-rs. It was incorporated
// directly into Materialize on December 21, 2019.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE file at the
// root of this repository, or online at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! AST types specific to CREATE/ALTER variants of [crate::ast::Statement]
//! (commonly referred to as Data Definition Language, or DDL)
use std::fmt;
use std::path::PathBuf;
use enum_kinds::EnumKind;
use crate::ast::display::{self, AstDisplay, AstFormatter};
use crate::ast::{AstInfo, DataType, Expr, Ident, SqlOption, UnresolvedObjectName, WithOption};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum | {
File(PathBuf),
Inline(String),
}
impl AstDisplay for Schema {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
Self::File(path) => {
f.write_str("SCHEMA FILE '");
f.write_node(&display::escape_single_quote_string(
&path.display().to_string(),
));
f.write_str("'");
}
Self::Inline(inner) => {
f.write_str("SCHEMA '");
f.write_node(&display::escape_single_quote_string(inner));
f.write_str("'");
}
}
}
}
impl_display!(Schema);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AvroSchema<T: AstInfo> {
Csr {
csr_connector: CsrConnectorAvro<T>,
},
InlineSchema {
schema: Schema,
with_options: Vec<WithOption>,
},
}
impl<T: AstInfo> AstDisplay for AvroSchema<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
Self::Csr { csr_connector } => {
f.write_node(csr_connector);
}
Self::InlineSchema {
schema,
with_options,
} => {
f.write_str("USING ");
schema.fmt(f);
if !with_options.is_empty() {
f.write_str(" WITH (");
f.write_node(&display::comma_separated(with_options));
f.write_str(")");
}
}
}
}
}
impl_display_t!(AvroSchema);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProtobufSchema<T: AstInfo> {
Csr {
csr_connector: CsrConnectorProto<T>,
},
InlineSchema {
message_name: String,
schema: Schema,
},
}
impl<T: AstInfo> AstDisplay for ProtobufSchema<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
Self::Csr { csr_connector } => {
f.write_node(csr_connector);
}
Self::InlineSchema {
message_name,
schema,
} => {
f.write_str("MESSAGE '");
f.write_node(&display::escape_single_quote_string(message_name));
f.write_str("' USING ");
f.write_str(schema);
}
}
}
}
impl_display_t!(ProtobufSchema);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CsrConnectorAvro<T: AstInfo> {
pub url: String,
pub seed: Option<CsrSeed>,
pub with_options: Vec<SqlOption<T>>,
}
impl<T: AstInfo> AstDisplay for CsrConnectorAvro<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_str("USING CONFLUENT SCHEMA REGISTRY '");
f.write_node(&display::escape_single_quote_string(&self.url));
f.write_str("'");
if let Some(seed) = &self.seed {
f.write_str(" ");
f.write_node(seed);
}
if !self.with_options.is_empty() {
f.write_str(" WITH (");
f.write_node(&display::comma_separated(&self.with_options));
f.write_str(")");
}
}
}
impl_display_t!(CsrConnectorAvro);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CsrConnectorProto<T: AstInfo> {
pub url: String,
pub seed: Option<CsrSeedCompiledOrLegacy>,
pub with_options: Vec<SqlOption<T>>,
}
impl<T: AstInfo> AstDisplay for CsrConnectorProto<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_str("USING CONFLUENT SCHEMA REGISTRY '");
f.write_node(&display::escape_single_quote_string(&self.url));
f.write_str("'");
if let Some(seed) = &self.seed {
f.write_str(" ");
f.write_node(seed);
}
if !self.with_options.is_empty() {
f.write_str(" WITH (");
f.write_node(&display::comma_separated(&self.with_options));
f.write_str(")");
}
}
}
impl_display_t!(CsrConnectorProto);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CsrSeed {
pub key_schema: Option<String>,
pub value_schema: String,
}
impl AstDisplay for CsrSeed {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_str("SEED");
if let Some(key_schema) = &self.key_schema {
f.write_str(" KEY SCHEMA '");
f.write_node(&display::escape_single_quote_string(key_schema));
f.write_str("'");
}
f.write_str(" VALUE SCHEMA '");
f.write_node(&display::escape_single_quote_string(&self.value_schema));
f.write_str("'");
}
}
impl_display!(CsrSeed);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CsrSeedCompiledOrLegacy {
Compiled(CsrSeedCompiled),
// Starting with version 0.9.13, Legacy should only be found when reading
// from the catalog and should be transformed during migration.
Legacy(CsrSeed),
}
impl AstDisplay for CsrSeedCompiledOrLegacy {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
CsrSeedCompiledOrLegacy::Compiled(c) => f.write_node(c),
CsrSeedCompiledOrLegacy::Legacy(l) => f.write_node(l),
}
}
}
impl_display!(CsrSeedCompiledOrLegacy);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CsrSeedCompiled {
pub key: Option<CsrSeedCompiledEncoding>,
pub value: CsrSeedCompiledEncoding,
}
impl AstDisplay for CsrSeedCompiled {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_str("SEED COMPILED");
if let Some(key) = &self.key {
f.write_str(" KEY ");
f.write_node(key);
}
f.write_str(" VALUE ");
f.write_node(&self.value);
}
}
impl_display!(CsrSeedCompiled);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct CsrSeedCompiledEncoding {
// Hex encoded string.
pub schema: String,
pub message_name: String,
}
impl AstDisplay for CsrSeedCompiledEncoding {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_str(" SCHEMA '");
f.write_str(&display::escape_single_quote_string(&self.schema));
f.write_str("' MESSAGE '");
f.write_str(&self.message_name);
f.write_str("'");
}
}
impl_display!(CsrSeedCompiledEncoding);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CreateSourceFormat<T: AstInfo> {
None,
/// `CREATE SOURCE .. FORMAT`
Bare(Format<T>),
/// `CREATE SOURCE .. KEY FORMAT .. VALUE FORMAT`
///
/// Also the destination for the legacy `ENVELOPE UPSERT FORMAT ...`
KeyValue {
key: Format<T>,
value: Format<T>,
},
}
impl<T: AstInfo> AstDisplay for CreateSourceFormat<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
CreateSourceFormat::None => {}
CreateSourceFormat::Bare(format) => {
f.write_str(" FORMAT ");
f.write_node(format)
}
CreateSourceFormat::KeyValue { key, value } => {
f.write_str(" KEY FORMAT ");
f.write_node(key);
f.write_str(" VALUE FORMAT ");
f.write_node(value);
}
}
}
}
impl_display_t!(CreateSourceFormat);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Format<T: AstInfo> {
Bytes,
Avro(AvroSchema<T>),
Protobuf(ProtobufSchema<T>),
Regex(String),
Csv {
columns: CsvColumns,
delimiter: char,
},
Json,
Text,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CsvColumns {
/// `WITH count COLUMNS`
Count(usize),
/// `WITH HEADER (ident, ...)?`: `names` is empty if there are no names specified
Header { names: Vec<Ident> },
}
impl AstDisplay for CsvColumns {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
CsvColumns::Count(n) => {
f.write_str(n);
f.write_str(" COLUMNS")
}
CsvColumns::Header { names } => {
f.write_str("HEADER");
if !names.is_empty() {
f.write_str(" (");
f.write_node(&display::comma_separated(&names));
f.write_str(")");
}
}
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum SourceIncludeMetadataType {
Key,
Timestamp,
Partition,
Topic,
Offset,
}
impl AstDisplay for SourceIncludeMetadataType {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
SourceIncludeMetadataType::Key => f.write_str("KEY"),
SourceIncludeMetadataType::Timestamp => f.write_str("TIMESTAMP"),
SourceIncludeMetadataType::Partition => f.write_str("PARTITION"),
SourceIncludeMetadataType::Topic => f.write_str("TOPIC"),
SourceIncludeMetadataType::Offset => f.write_str("OFFSET"),
}
}
}
impl_display!(SourceIncludeMetadataType);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct SourceIncludeMetadata {
pub ty: SourceIncludeMetadataType,
pub alias: Option<Ident>,
}
impl AstDisplay for SourceIncludeMetadata {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_node(&self.ty);
if let Some(alias) = &self.alias {
f.write_str(" AS ");
f.write_node(alias);
}
}
}
impl_display!(SourceIncludeMetadata);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Envelope {
None,
Debezium(DbzMode),
Upsert,
CdcV2,
}
impl AstDisplay for Envelope {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
Self::None => {
// this is unreachable as long as the default is None, but include it in case we ever change that
f.write_str("NONE");
}
Self::Debezium(mode) => {
f.write_str("DEBEZIUM");
f.write_node(mode);
}
Self::Upsert => {
f.write_str("UPSERT");
}
Self::CdcV2 => {
f.write_str("MATERIALIZE");
}
}
}
}
impl_display!(Envelope);
impl<T: AstInfo> AstDisplay for Format<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
Self::Bytes => f.write_str("BYTES"),
Self::Avro(inner) => {
f.write_str("AVRO ");
f.write_node(inner);
}
Self::Protobuf(inner) => {
f.write_str("PROTOBUF ");
f.write_node(inner);
}
Self::Regex(regex) => {
f.write_str("REGEX '");
f.write_node(&display::escape_single_quote_string(regex));
f.write_str("'");
}
Self::Csv { columns, delimiter } => {
f.write_str("CSV WITH ");
f.write_node(columns);
if *delimiter != ',' {
f.write_str(" DELIMITED BY '");
f.write_node(&display::escape_single_quote_string(&delimiter.to_string()));
f.write_str("'");
}
}
Self::Json => f.write_str("JSON"),
Self::Text => f.write_str("TEXT"),
}
}
}
impl_display_t!(Format);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Compression {
Gzip,
None,
}
impl AstDisplay for Compression {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
Self::Gzip => f.write_str("GZIP"),
Self::None => f.write_str("NONE"),
}
}
}
impl_display!(Compression);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum DbzMode {
/// `ENVELOPE DEBEZIUM` with no suffix
Plain,
/// `ENVELOPE DEBEZIUM UPSERT`
Upsert,
}
impl AstDisplay for DbzMode {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
Self::Plain => f.write_str(""),
Self::Upsert => f.write_str(" UPSERT"),
}
}
}
impl_display!(DbzMode);
#[derive(Debug, Clone, PartialEq, Eq, Hash, EnumKind)]
#[enum_kind(ConnectorType)]
pub enum CreateSourceConnector {
File {
path: String,
compression: Compression,
},
Kafka {
broker: String,
topic: String,
key: Option<Vec<Ident>>,
},
Kinesis {
arn: String,
},
/// Avro Object Container File
AvroOcf {
path: String,
},
S3 {
/// The arguments to `DISCOVER OBJECTS USING`: `BUCKET SCAN` or `SQS NOTIFICATIONS`
key_sources: Vec<S3KeySource>,
/// The argument to the MATCHING clause: `MATCHING 'a/**/*.json'`
pattern: Option<String>,
compression: Compression,
},
Postgres {
/// The postgres connection string
conn: String,
/// The name of the publication to sync
publication: String,
/// The replication slot name that will be created upstream
slot: Option<String>,
},
PubNub {
/// PubNub's subscribe key
subscribe_key: String,
/// The PubNub channel to subscribe to
channel: String,
},
}
impl AstDisplay for CreateSourceConnector {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
CreateSourceConnector::File { path, compression } => {
f.write_str("FILE '");
f.write_node(&display::escape_single_quote_string(path));
f.write_str("'");
f.write_str(" COMPRESSION ");
f.write_node(compression);
}
CreateSourceConnector::Kafka { broker, topic, key } => {
f.write_str("KAFKA BROKER '");
f.write_node(&display::escape_single_quote_string(broker));
f.write_str("'");
f.write_str(" TOPIC '");
f.write_node(&display::escape_single_quote_string(topic));
f.write_str("'");
if let Some(key) = key.as_ref() {
f.write_str(" KEY (");
f.write_node(&display::comma_separated(&key));
f.write_str(")");
}
}
CreateSourceConnector::Kinesis { arn } => {
f.write_str("KINESIS ARN '");
f.write_node(&display::escape_single_quote_string(arn));
f.write_str("'");
}
CreateSourceConnector::AvroOcf { path } => {
f.write_str("AVRO OCF '");
f.write_node(&display::escape_single_quote_string(path));
f.write_str("'");
}
CreateSourceConnector::S3 {
key_sources,
pattern,
compression,
} => {
f.write_str("S3 DISCOVER OBJECTS");
if let Some(pattern) = pattern {
f.write_str(" MATCHING '");
f.write_str(&display::escape_single_quote_string(pattern));
f.write_str("'");
}
f.write_str(" USING");
f.write_node(&display::comma_separated(key_sources));
f.write_str(" COMPRESSION ");
f.write_node(compression);
}
CreateSourceConnector::Postgres {
conn,
publication,
slot,
} => {
f.write_str("POSTGRES CONNECTION '");
f.write_str(&display::escape_single_quote_string(conn));
f.write_str("' PUBLICATION '");
f.write_str(&display::escape_single_quote_string(publication));
if let Some(slot) = slot {
f.write_str("' SLOT '");
f.write_str(&display::escape_single_quote_string(slot));
}
f.write_str("'");
}
CreateSourceConnector::PubNub {
subscribe_key,
channel,
} => {
f.write_str("PUBNUB SUBSCRIBE KEY '");
f.write_str(&display::escape_single_quote_string(subscribe_key));
f.write_str("' CHANNEL '");
f.write_str(&display::escape_single_quote_string(channel));
f.write_str("'");
}
}
}
}
impl_display!(CreateSourceConnector);
impl<T: AstInfo> From<&CreateSinkConnector<T>> for ConnectorType {
fn from(connector: &CreateSinkConnector<T>) -> ConnectorType {
match connector {
CreateSinkConnector::Kafka { .. } => ConnectorType::Kafka,
CreateSinkConnector::AvroOcf { .. } => ConnectorType::AvroOcf,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, EnumKind)]
#[enum_kind(CreateSinkConnectorKind)]
pub enum CreateSinkConnector<T: AstInfo> {
Kafka {
broker: String,
topic: String,
key: Option<Vec<Ident>>,
consistency: Option<KafkaConsistency<T>>,
},
/// Avro Object Container File
AvroOcf { path: String },
}
impl<T: AstInfo> AstDisplay for CreateSinkConnector<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
CreateSinkConnector::Kafka {
broker,
topic,
key,
consistency,
} => {
f.write_str("KAFKA BROKER '");
f.write_node(&display::escape_single_quote_string(broker));
f.write_str("'");
f.write_str(" TOPIC '");
f.write_node(&display::escape_single_quote_string(topic));
f.write_str("'");
if let Some(key) = key.as_ref() {
f.write_str(" KEY (");
f.write_node(&display::comma_separated(&key));
f.write_str(")");
}
if let Some(consistency) = consistency.as_ref() {
f.write_node(consistency);
}
}
CreateSinkConnector::AvroOcf { path } => {
f.write_str("AVRO OCF '");
f.write_node(&display::escape_single_quote_string(path));
f.write_str("'");
}
}
}
}
impl_display_t!(CreateSinkConnector);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct KafkaConsistency<T: AstInfo> {
pub topic: String,
pub topic_format: Option<Format<T>>,
}
impl<T: AstInfo> AstDisplay for KafkaConsistency<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_str(" CONSISTENCY (TOPIC '");
f.write_node(&display::escape_single_quote_string(&self.topic));
f.write_str("'");
if let Some(format) = self.topic_format.as_ref() {
f.write_str(" FORMAT ");
f.write_node(format);
}
f.write_str(")");
}
}
impl_display_t!(KafkaConsistency);
/// Information about upstream Postgres tables used for replication sources
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct PgTable<T: AstInfo> {
/// The name of the table to sync
pub name: UnresolvedObjectName,
/// The name for the table in Materialize
pub alias: T::ObjectName,
/// The expected column schema of the synced table
pub columns: Vec<ColumnDef<T>>,
}
impl<T: AstInfo> AstDisplay for PgTable<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_node(&self.name);
f.write_str(" AS ");
f.write_str(self.alias.to_ast_string());
if !self.columns.is_empty() {
f.write_str(" (");
f.write_node(&display::comma_separated(&self.columns));
f.write_str(")");
}
}
}
impl_display_t!(PgTable);
/// The key sources specified in the S3 source's `DISCOVER OBJECTS` clause.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum S3KeySource {
/// `SCAN BUCKET '<bucket>'`
Scan { bucket: String },
/// `SQS NOTIFICATIONS '<queue-name>'`
SqsNotifications { queue: String },
}
impl AstDisplay for S3KeySource {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
S3KeySource::Scan { bucket } => {
f.write_str(" BUCKET SCAN '");
f.write_str(&display::escape_single_quote_string(bucket));
f.write_str("'");
}
S3KeySource::SqsNotifications { queue } => {
f.write_str(" SQS NOTIFICATIONS '");
f.write_str(&display::escape_single_quote_string(queue));
f.write_str("'");
}
}
}
}
impl_display!(S3KeySource);
/// A table-level constraint, specified in a `CREATE TABLE` or an
/// `ALTER TABLE ADD <constraint>` statement.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TableConstraint<T: AstInfo> {
/// `[ CONSTRAINT <name> ] { PRIMARY KEY | UNIQUE } (<columns>)`
Unique {
name: Option<Ident>,
columns: Vec<Ident>,
/// Whether this is a `PRIMARY KEY` or just a `UNIQUE` constraint
is_primary: bool,
},
/// A referential integrity constraint (`[ CONSTRAINT <name> ] FOREIGN KEY (<columns>)
/// REFERENCES <foreign_table> (<referred_columns>)`)
ForeignKey {
name: Option<Ident>,
columns: Vec<Ident>,
foreign_table: UnresolvedObjectName,
referred_columns: Vec<Ident>,
},
/// `[ CONSTRAINT <name> ] CHECK (<expr>)`
Check {
name: Option<Ident>,
expr: Box<Expr<T>>,
},
}
impl<T: AstInfo> AstDisplay for TableConstraint<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
TableConstraint::Unique {
name,
columns,
is_primary,
} => {
f.write_node(&display_constraint_name(name));
if *is_primary {
f.write_str("PRIMARY KEY ");
} else {
f.write_str("UNIQUE ");
}
f.write_str("(");
f.write_node(&display::comma_separated(columns));
f.write_str(")");
}
TableConstraint::ForeignKey {
name,
columns,
foreign_table,
referred_columns,
} => {
f.write_node(&display_constraint_name(name));
f.write_str("FOREIGN KEY (");
f.write_node(&display::comma_separated(columns));
f.write_str(") REFERENCES ");
f.write_node(foreign_table);
f.write_str("(");
f.write_node(&display::comma_separated(referred_columns));
f.write_str(")");
}
TableConstraint::Check { name, expr } => {
f.write_node(&display_constraint_name(name));
f.write_str("CHECK (");
f.write_node(&expr);
f.write_str(")");
}
}
}
}
impl_display_t!(TableConstraint);
/// A key constraint, specified in a `CREATE SOURCE`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum KeyConstraint {
// PRIMARY KEY (<columns>) NOT ENFORCED
PrimaryKeyNotEnforced { columns: Vec<Ident> },
}
impl AstDisplay for KeyConstraint {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
match self {
KeyConstraint::PrimaryKeyNotEnforced { columns } => {
f.write_str("PRIMARY KEY ");
f.write_str("(");
f.write_node(&display::comma_separated(columns));
f.write_str(") ");
f.write_str("NOT ENFORCED");
}
}
}
}
impl_display!(KeyConstraint);
/// SQL column definition
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ColumnDef<T: AstInfo> {
pub name: Ident,
pub data_type: DataType<T>,
pub collation: Option<UnresolvedObjectName>,
pub options: Vec<ColumnOptionDef<T>>,
}
impl<T: AstInfo> AstDisplay for ColumnDef<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_node(&self.name);
f.write_str(" ");
f.write_node(&self.data_type);
for option in &self.options {
f.write_str(" ");
f.write_node(option);
}
}
}
impl_display_t!(ColumnDef);
/// An optionally-named `ColumnOption`: `[ CONSTRAINT <name> ] <column-option>`.
///
/// Note that implementations are substantially more permissive than the ANSI
/// specification on what order column options can be presented in, and whether
/// they are allowed to be named. The specification distinguishes between
/// constraints (NOT NULL, UNIQUE, PRIMARY KEY, and CHECK), which can be named
/// and can appear in any order, and other options (DEFAULT, GENERATED), which
/// cannot be named and must appear in a fixed order. PostgreSQL, however,
/// allows preceding any option with `CONSTRAINT <name>`, even those that are
/// not really constraints, like NULL and DEFAULT. MSSQL is less permissive,
/// allowing DEFAULT, UNIQUE, PRIMARY KEY and CHECK to be named, but not NULL or
/// NOT NULL constraints (the last of which is in violation of the spec).
///
/// For maximum flexibility, we don't distinguish between constraint and
/// non-constraint options, lumping them all together under the umbrella of
/// "column options," and we allow any column option to be named.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ColumnOptionDef<T: AstInfo> {
pub name: Option<Ident>,
pub option: ColumnOption<T>,
}
impl<T: AstInfo> AstDisplay for ColumnOptionDef<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
f.write_node(&display_constraint_name(&self.name));
f.write_node(&self.option);
}
}
impl_display_t!(ColumnOptionDef);
/// `ColumnOption`s are modifiers that follow a column definition in a `CREATE
/// TABLE` statement.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ColumnOption<T: AstInfo> {
/// `NULL`
Null,
/// `NOT NULL`
NotNull,
/// `DEFAULT <restricted-expr>`
Default(Expr<T>),
/// `{ PRIMARY KEY | UNIQUE }`
Unique {
is_primary: bool,
},
/// A referential integrity constraint (`[FOREIGN KEY REFERENCES
/// <foreign_table> (<referred_columns>)`).
ForeignKey {
foreign_table: UnresolvedObjectName,
referred_columns: Vec<Ident>,
},
// `CHECK (<expr>)`
Check(Expr<T>),
}
impl<T: AstInfo> AstDisplay for ColumnOption<T> {
fn fmt<W: fmt::Write>(&self, f: &mut AstFormatter<W>) {
use ColumnOption::*;
match self {
Null => f.write_str("NULL"),
NotNull => f.write_str("NOT NULL"),
Default(expr) => {
f.write_str("DEFAULT ");
f.write_node(expr);
}
Unique { is_primary } => {
if *is_primary {
f.write_str("PRIMARY KEY");
} else {
f.write_str("UNIQUE");
}
}
ForeignKey {
foreign_table,
referred_columns,
} => {
f.write_str("REFERENCES ");
f.write_node(foreign_table);
f.write_str(" (");
f.write_node(&display::comma_separated(referred_columns));
f.write_str(")");
}
Check(expr) => {
f.write_str("CHECK (");
f.write_node(expr);
f.write_str(")");
}
}
}
}
impl_display_t!(ColumnOption);
fn display_constraint_name<'a>(name: &'a Option<Ident>) -> impl AstDisplay + 'a {
struct ConstraintName<'a>(&'a Option<Ident>);
impl<'a> AstDisplay for ConstraintName<'a> {
fn fmt<W>(&self, f: &mut AstFormatter<W>)
where
W: fmt::Write,
{
if let Some(name) = self.0 {
f.write_str("CONSTRAINT ");
f.write_node(name);
f.write_str(" ");
}
}
}
ConstraintName(name)
}
| Schema |
_dynamics_deprecated.py | import warnings
import numpy as np
from .utils_moments import moments
from .velocity import velocity, ss_estimation
from .utils import (
get_mapper,
get_valid_bools,
get_data_for_kin_params_estimation,
get_U_S_for_velocity_estimation,
)
from .utils import set_velocity, set_param_ss, set_param_kinetic
from .moments import moment_model
# incorporate the model selection code soon
def | (
adata,
tkey=None,
filter_gene_mode="final",
mode="moment",
use_smoothed=True,
group=None,
protein_names=None,
experiment_type=None,
assumption_mRNA=None,
assumption_protein="ss",
NTR_vel=True,
concat_data=False,
log_unnormalized=True,
one_shot_method="combined",
):
"""Inclusive model of expression dynamics considers splicing, metabolic labeling and protein translation. It supports
learning high-dimensional velocity vector samples for droplet based (10x, inDrop, drop-seq, etc), scSLAM-seq, NASC-seq
sci-fate, scNT-seq or cite-seq datasets.
Parameters
----------
adata: :class:`~anndata.AnnData`
AnnData object.
tkey: `str` or None (default: None)
The column key for the time label of cells in .obs. Used for either "steady_state" or non-"steady_state" mode or `moment`
mode with labeled data.
filter_gene_mode: `str` (default: `final`)
The string for indicating which mode (one of, {'final', 'basic', 'no'}) of gene filter will be used.
mode: `str` (default: `deterministic`)
String indicates which estimation mode will be used. This parameter should be used in conjunction with assumption_mRNA.
* Available options when the `assumption_mRNA` is 'ss' include:
(1) 'linear_regression': The canonical method from the seminar RNA velocity paper based on deterministic ordinary
differential equations;
(2) 'gmm': The new generalized methods of moments from us that is based on master equations, similar to the
"moment" mode in the excellent scvelo package;
(3) 'negbin': The new method from us that models steady state RNA expression as a negative binomial distribution,
also built upons on master equations.
Note that all those methods require using extreme data points (except negbin) for the estimation. Extreme data points
are defined as the data from cells where the expression of unspliced / spliced or new / total RNA, etc. are in the
top or bottom, 5%, for example. `linear_regression` only considers the mean of RNA species (based on the deterministic
ordinary different equations) while moment based methods (`gmm`, `negbin`) considers both first moment (mean) and
second moment (uncentered variance) of RNA species (based on the stochastic master equations).
* Available options when the `assumption_mRNA` is 'kinetic' include:
(1) 'deterministic': The method based on deterministic ordinary differential equations;
(2) 'stochastic' or `moment`: The new method from us that is based on master equations;
Note that `kinetic` model implicitly assumes the `experiment_type` is not `conventional`. Thus `deterministic`,
`stochastic` (equivalent to `moment`) models are only possible for the labeling experiments.
A "model_selection" mode will be supported soon in which alpha, beta and gamma will be modeled as a function of time.
use_smoothed: `bool` (default: `True`)
Whether to use the smoothed data when calculating velocity for each gene. `use_smoothed` is only relevant when
mode is `linear_regression` (and experiment_type and assumption_mRNA correspond to `conventional` and `ss` implicitly).
group: `str` or None (default: `None`)
The column key/name that identifies the grouping information (for example, clusters that correspond to different cell types)
of cells. This will be used to estimate group-specific (i.e cell-type specific) kinetic parameters.
protein_names: `List`
A list of gene names corresponds to the rows of the measured proteins in the `X_protein` of the `obsm` attribute.
The names have to be included in the adata.var.index.
experiment_type: `str`
single cell RNA-seq experiment type. Available options are:
(1) 'conventional': conventional single-cell RNA-seq experiment;
(2) 'deg': chase/degradation experiment;
(3) 'kin': pulse/synthesis/kinetics experiment;
(4) 'one-shot': one-shot kinetic experiment.
assumption_mRNA: `str`
Parameter estimation assumption for mRNA. Available options are:
(1) 'ss': pseudo steady state;
(2) 'kinetic' or None: degradation and kinetic data without steady state assumption.
If no labelling data exists, assumption_mRNA will automatically set to be 'ss'. For one-shot experiment, assumption_mRNA
is set to be None. However we will use steady state assumption to estimate parameters alpha and gamma either by a deterministic
linear regression or the first order decay approach in line of the sci-fate paper.
assumption_protein: `str`
Parameter estimation assumption for protein. Available options are:
(1) 'ss': pseudo steady state;
NTR_vel: `bool` (default: `True`)
Whether to use NTR (new/total ratio) velocity for labeling datasets.
concat_data: `bool` (default: `False`)
Whether to concatenate data before estimation. If your data is a list of matrices for each time point, this need to be set as True.
log_unnormalized: `bool` (default: `True`)
Whether to log transform the unnormalized data.
Returns
-------
adata: :class:`~anndata.AnnData`
A updated AnnData object with estimated kinetic parameters and inferred velocity included.
"""
if (
"use_for_dynamics" not in adata.var.columns
and "pass_basic_filter" not in adata.var.columns
):
filter_gene_mode = "no"
valid_ind = get_valid_bools(adata, filter_gene_mode)
if mode == "moment" or (
use_smoothed and len([i for i in adata.layers.keys() if i.startswith("M_")]) < 2
):
if experiment_type == "kin":
use_smoothed = False
else:
moments(adata)
valid_adata = adata[:, valid_ind].copy()
if group is not None and group in adata.obs[group]:
_group = adata.obs[group].unique()
else:
_group = ["_all_cells"]
for cur_grp in _group:
if cur_grp == "_all_cells":
kin_param_pre = ""
cur_cells_bools = np.ones(valid_adata.shape[0], dtype=bool)
subset_adata = valid_adata[cur_cells_bools]
else:
kin_param_pre = group + "_" + cur_grp + "_"
cur_cells_bools = (valid_adata.obs[group] == cur_grp).values
subset_adata = valid_adata[cur_cells_bools]
(
U,
Ul,
S,
Sl,
P,
US,
S2,
t,
normalized,
has_splicing,
has_labeling,
has_protein,
ind_for_proteins,
assumption_mRNA,
exp_type,
) = get_data_for_kin_params_estimation(
subset_adata,
mode,
use_smoothed,
tkey,
protein_names,
experiment_type,
log_unnormalized,
NTR_vel,
)
if exp_type is not None:
if experiment_type != exp_type:
warnings.warn(
"dynamo detects the experiment type of your data as {}, but your input experiment_type "
"is {}".format(exp_type, experiment_type)
)
experiment_type = exp_type
assumption_mRNA = (
"ss" if exp_type == "conventional" and mode == "deterministic" else None
)
NTR_vel = False
if mode == "moment" and experiment_type not in ["conventional", "kin"]:
"""
# temporially convert to deterministic mode as moment mode for one-shot,
degradation and other types of labeling experiment is ongoing."""
mode = "deterministic"
if mode == "deterministic" or (
experiment_type != "kin" and mode == "moment"
):
est = ss_estimation(
U=U,
Ul=Ul,
S=S,
Sl=Sl,
P=P,
US=US,
S2=S2,
t=t,
ind_for_proteins=ind_for_proteins,
experiment_type=experiment_type,
assumption_mRNA=assumption_mRNA,
assumption_protein=assumption_protein,
concat_data=concat_data,
)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
if experiment_type in ["one-shot", "one_shot"]:
est.fit(one_shot_method=one_shot_method)
else:
est.fit()
alpha, beta, gamma, eta, delta = est.parameters.values()
U, S = get_U_S_for_velocity_estimation(
subset_adata,
use_smoothed,
has_splicing,
has_labeling,
log_unnormalized,
NTR_vel,
)
vel = velocity(estimation=est)
vel_U = vel.vel_u(U)
vel_S = vel.vel_s(U, S)
vel_P = vel.vel_p(S, P)
adata = set_velocity(
adata,
vel_U,
vel_S,
vel_P,
_group,
cur_grp,
cur_cells_bools,
valid_ind,
ind_for_proteins,
)
adata = set_param_ss(
adata,
est,
alpha,
beta,
gamma,
eta,
delta,
experiment_type,
_group,
cur_grp,
kin_param_pre,
valid_ind,
ind_for_proteins,
)
elif mode == "moment":
adata, Est, t_ind = moment_model(
adata, subset_adata, _group, cur_grp, log_unnormalized, tkey
)
t_ind += 1
params, costs = Est.fit()
a, b, alpha_a, alpha_i, beta, gamma = (
params[:, 0],
params[:, 1],
params[:, 2],
params[:, 3],
params[:, 4],
params[:, 5],
)
def fbar(x_a, x_i, a, b):
return b / (a + b) * x_a + a / (a + b) * x_i
alpha = fbar(alpha_a, alpha_i, a, b)[:, None]
params = {"alpha": alpha, "beta": beta, "gamma": gamma, "t": t}
vel = velocity(**params)
U, S = get_U_S_for_velocity_estimation(
subset_adata,
use_smoothed,
has_splicing,
has_labeling,
log_unnormalized,
NTR_vel,
)
vel_U = vel.vel_u(U)
vel_S = vel.vel_s(U, S)
vel_P = vel.vel_p(S, P)
adata = set_velocity(
adata,
vel_U,
vel_S,
vel_P,
_group,
cur_grp,
cur_cells_bools,
valid_ind,
ind_for_proteins,
)
adata = set_param_kinetic(
adata,
alpha,
a,
b,
alpha_a,
alpha_i,
beta,
gamma,
kin_param_pre,
_group,
cur_grp,
valid_ind,
)
# add protein related parameters in the moment model below:
elif mode == "model_selection":
warnings.warn("Not implemented yet.")
if group is not None and group in adata.obs[group]:
uns_key = group + "_dynamics"
else:
uns_key = "dynamics"
if has_splicing and has_labeling:
adata.layers['X_U'], adata.layers['X_S'] = adata.layers['X_uu'] + adata.layers['X_ul'], adata.layers['X_su'] + adata.layers['X_sl']
adata.uns[uns_key] = {
"t": t,
"group": group,
"asspt_mRNA": assumption_mRNA,
"experiment_type": experiment_type,
"normalized": normalized,
"mode": mode,
"has_splicing": has_splicing,
"has_labeling": has_labeling,
"has_protein": has_protein,
"use_smoothed": use_smoothed,
"NTR_vel": NTR_vel,
"log_unnormalized": log_unnormalized,
}
return adata
| _dynamics |
privacy.go | // Copyright 2019-present Facebook Inc. All rights reserved.
// This source code is licensed under the Apache 2.0 license found
// in the LICENSE file in the root directory of this source tree.
// Code generated by entc, DO NOT EDIT.
package privacy
import (
"context"
"fmt"
"github.com/storskegg/ent/entc/integration/privacy/ent"
"github.com/storskegg/ent/entql"
"github.com/storskegg/ent/privacy"
)
var (
// Allow may be returned by rules to indicate that the policy
// evaluation should terminate with an allow decision.
Allow = privacy.Allow
// Deny may be returned by rules to indicate that the policy
// evaluation should terminate with an deny decision.
Deny = privacy.Deny
// Skip may be returned by rules to indicate that the policy
// evaluation should continue to the next rule.
Skip = privacy.Skip
)
// Allowf returns an formatted wrapped Allow decision.
func Allowf(format string, a ...interface{}) error {
return fmt.Errorf(format+": %w", append(a, Allow)...)
}
// Denyf returns an formatted wrapped Deny decision.
func | (format string, a ...interface{}) error {
return fmt.Errorf(format+": %w", append(a, Deny)...)
}
// Skipf returns an formatted wrapped Skip decision.
func Skipf(format string, a ...interface{}) error {
return fmt.Errorf(format+": %w", append(a, Skip)...)
}
// DecisionContext creates a new context from the given parent context with
// a policy decision attach to it.
func DecisionContext(parent context.Context, decision error) context.Context {
return privacy.DecisionContext(parent, decision)
}
// DecisionFromContext retrieves the policy decision from the context.
func DecisionFromContext(ctx context.Context) (error, bool) {
return privacy.DecisionFromContext(ctx)
}
type (
// QueryRule defines the interface deciding whether a
// query is allowed and optionally modify it.
QueryRule = privacy.QueryRule
// QueryPolicy combines multiple query rules into a single policy.
QueryPolicy = privacy.QueryPolicy
)
// QueryRuleFunc type is an adapter to allow the use of
// ordinary functions as query rules.
type QueryRuleFunc func(context.Context, ent.Query) error
// Eval returns f(ctx, q).
func (f QueryRuleFunc) EvalQuery(ctx context.Context, q ent.Query) error {
return f(ctx, q)
}
type (
// MutationRule defines the interface which decides whether a
// mutation is allowed and optionally modifies it.
MutationRule = privacy.MutationRule
// MutationPolicy combines multiple mutation rules into a single policy.
MutationPolicy = privacy.MutationPolicy
)
// MutationRuleFunc type is an adapter which allows the use of
// ordinary functions as mutation rules.
type MutationRuleFunc func(context.Context, ent.Mutation) error
// EvalMutation returns f(ctx, m).
func (f MutationRuleFunc) EvalMutation(ctx context.Context, m ent.Mutation) error {
return f(ctx, m)
}
// Policy groups query and mutation policies.
type Policy struct {
Query QueryPolicy
Mutation MutationPolicy
}
// EvalQuery forwards evaluation to query a policy.
func (policy Policy) EvalQuery(ctx context.Context, q ent.Query) error {
return policy.Query.EvalQuery(ctx, q)
}
// EvalMutation forwards evaluation to mutate a policy.
func (policy Policy) EvalMutation(ctx context.Context, m ent.Mutation) error {
return policy.Mutation.EvalMutation(ctx, m)
}
// QueryMutationRule is an interface which groups query and mutation rules.
type QueryMutationRule interface {
QueryRule
MutationRule
}
// AlwaysAllowRule returns a rule that returns an allow decision.
func AlwaysAllowRule() QueryMutationRule {
return fixedDecision{Allow}
}
// AlwaysDenyRule returns a rule that returns a deny decision.
func AlwaysDenyRule() QueryMutationRule {
return fixedDecision{Deny}
}
type fixedDecision struct {
decision error
}
func (f fixedDecision) EvalQuery(context.Context, ent.Query) error {
return f.decision
}
func (f fixedDecision) EvalMutation(context.Context, ent.Mutation) error {
return f.decision
}
type contextDecision struct {
eval func(context.Context) error
}
// ContextQueryMutationRule creates a query/mutation rule from a context eval func.
func ContextQueryMutationRule(eval func(context.Context) error) QueryMutationRule {
return contextDecision{eval}
}
func (c contextDecision) EvalQuery(ctx context.Context, _ ent.Query) error {
return c.eval(ctx)
}
func (c contextDecision) EvalMutation(ctx context.Context, _ ent.Mutation) error {
return c.eval(ctx)
}
// OnMutationOperation evaluates the given rule only on a given mutation operation.
func OnMutationOperation(rule MutationRule, op ent.Op) MutationRule {
return MutationRuleFunc(func(ctx context.Context, m ent.Mutation) error {
if m.Op().Is(op) {
return rule.EvalMutation(ctx, m)
}
return Skip
})
}
// DenyMutationOperationRule returns a rule denying specified mutation operation.
func DenyMutationOperationRule(op ent.Op) MutationRule {
rule := MutationRuleFunc(func(_ context.Context, m ent.Mutation) error {
return Denyf("ent/privacy: operation %s is not allowed", m.Op())
})
return OnMutationOperation(rule, op)
}
// The TaskQueryRuleFunc type is an adapter to allow the use of ordinary
// functions as a query rule.
type TaskQueryRuleFunc func(context.Context, *ent.TaskQuery) error
// EvalQuery return f(ctx, q).
func (f TaskQueryRuleFunc) EvalQuery(ctx context.Context, q ent.Query) error {
if q, ok := q.(*ent.TaskQuery); ok {
return f(ctx, q)
}
return Denyf("ent/privacy: unexpected query type %T, expect *ent.TaskQuery", q)
}
// The TaskMutationRuleFunc type is an adapter to allow the use of ordinary
// functions as a mutation rule.
type TaskMutationRuleFunc func(context.Context, *ent.TaskMutation) error
// EvalMutation calls f(ctx, m).
func (f TaskMutationRuleFunc) EvalMutation(ctx context.Context, m ent.Mutation) error {
if m, ok := m.(*ent.TaskMutation); ok {
return f(ctx, m)
}
return Denyf("ent/privacy: unexpected mutation type %T, expect *ent.TaskMutation", m)
}
// The TeamQueryRuleFunc type is an adapter to allow the use of ordinary
// functions as a query rule.
type TeamQueryRuleFunc func(context.Context, *ent.TeamQuery) error
// EvalQuery return f(ctx, q).
func (f TeamQueryRuleFunc) EvalQuery(ctx context.Context, q ent.Query) error {
if q, ok := q.(*ent.TeamQuery); ok {
return f(ctx, q)
}
return Denyf("ent/privacy: unexpected query type %T, expect *ent.TeamQuery", q)
}
// The TeamMutationRuleFunc type is an adapter to allow the use of ordinary
// functions as a mutation rule.
type TeamMutationRuleFunc func(context.Context, *ent.TeamMutation) error
// EvalMutation calls f(ctx, m).
func (f TeamMutationRuleFunc) EvalMutation(ctx context.Context, m ent.Mutation) error {
if m, ok := m.(*ent.TeamMutation); ok {
return f(ctx, m)
}
return Denyf("ent/privacy: unexpected mutation type %T, expect *ent.TeamMutation", m)
}
// The UserQueryRuleFunc type is an adapter to allow the use of ordinary
// functions as a query rule.
type UserQueryRuleFunc func(context.Context, *ent.UserQuery) error
// EvalQuery return f(ctx, q).
func (f UserQueryRuleFunc) EvalQuery(ctx context.Context, q ent.Query) error {
if q, ok := q.(*ent.UserQuery); ok {
return f(ctx, q)
}
return Denyf("ent/privacy: unexpected query type %T, expect *ent.UserQuery", q)
}
// The UserMutationRuleFunc type is an adapter to allow the use of ordinary
// functions as a mutation rule.
type UserMutationRuleFunc func(context.Context, *ent.UserMutation) error
// EvalMutation calls f(ctx, m).
func (f UserMutationRuleFunc) EvalMutation(ctx context.Context, m ent.Mutation) error {
if m, ok := m.(*ent.UserMutation); ok {
return f(ctx, m)
}
return Denyf("ent/privacy: unexpected mutation type %T, expect *ent.UserMutation", m)
}
type (
// Filter is the interface that wraps the Where function
// for filtering nodes in queries and mutations.
Filter interface {
// Where applies a filter on the executed query/mutation.
Where(entql.P)
}
// The FilterFunc type is an adapter that allows the use of ordinary
// functions as filters for query and mutation types.
FilterFunc func(context.Context, Filter) error
)
// EvalQuery calls f(ctx, q) if the query implements the Filter interface, otherwise it is denied.
func (f FilterFunc) EvalQuery(ctx context.Context, q ent.Query) error {
fr, err := queryFilter(q)
if err != nil {
return err
}
return f(ctx, fr)
}
// EvalMutation calls f(ctx, q) if the mutation implements the Filter interface, otherwise it is denied.
func (f FilterFunc) EvalMutation(ctx context.Context, m ent.Mutation) error {
fr, err := mutationFilter(m)
if err != nil {
return err
}
return f(ctx, fr)
}
var _ QueryMutationRule = FilterFunc(nil)
func queryFilter(q ent.Query) (Filter, error) {
switch q := q.(type) {
case *ent.TaskQuery:
return q.Filter(), nil
case *ent.TeamQuery:
return q.Filter(), nil
case *ent.UserQuery:
return q.Filter(), nil
default:
return nil, Denyf("ent/privacy: unexpected query type %T for query filter", q)
}
}
func mutationFilter(m ent.Mutation) (Filter, error) {
switch m := m.(type) {
case *ent.TaskMutation:
return m.Filter(), nil
case *ent.TeamMutation:
return m.Filter(), nil
case *ent.UserMutation:
return m.Filter(), nil
default:
return nil, Denyf("ent/privacy: unexpected mutation type %T for mutation filter", m)
}
}
| Denyf |
app.po.ts | import { browser, element, by } from 'protractor';
export class FooPage {
navigateTo() {
return browser.get('/');
}
getParagraphText() {
return element(by.css('app-root h1')).getText();
} | } |
|
certificates.go | /*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"crypto/x509"
"crypto/x509/pkix"
"encoding/pem"
"time"
"k8s.io/api/certificates/v1beta1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/util/cert"
v1beta1client "k8s.io/kubernetes/pkg/client/clientset_generated/clientset/typed/certificates/v1beta1"
"k8s.io/kubernetes/test/e2e/framework"
. "github.com/onsi/ginkgo"
)
var _ = framework.KubeDescribe("Certificates API", func() {
f := framework.NewDefaultFramework("certificates")
It("should support building a client with a CSR", func() {
const commonName = "tester-csr"
pk, err := cert.NewPrivateKey()
framework.ExpectNoError(err)
pkder := x509.MarshalPKCS1PrivateKey(pk)
pkpem := pem.EncodeToMemory(&pem.Block{
Type: "RSA PRIVATE KEY",
Bytes: pkder,
})
csrb, err := cert.MakeCSR(pk, &pkix.Name{CommonName: commonName, Organization: []string{"system:masters"}}, nil, nil)
framework.ExpectNoError(err)
csr := &v1beta1.CertificateSigningRequest{
ObjectMeta: metav1.ObjectMeta{
GenerateName: commonName + "-",
},
Spec: v1beta1.CertificateSigningRequestSpec{
Request: csrb,
Usages: []v1beta1.KeyUsage{
v1beta1.UsageSigning,
v1beta1.UsageKeyEncipherment,
v1beta1.UsageClientAuth,
},
},
}
csrs := f.ClientSet.CertificatesV1beta1().CertificateSigningRequests()
framework.Logf("creating CSR")
csr, err = csrs.Create(csr)
framework.ExpectNoError(err)
csrName := csr.Name
framework.Logf("approving CSR")
framework.ExpectNoError(wait.Poll(5*time.Second, time.Minute, func() (bool, error) {
csr.Status.Conditions = []v1beta1.CertificateSigningRequestCondition{
{
Type: v1beta1.CertificateApproved,
Reason: "E2E",
Message: "Set from an e2e test",
},
}
csr, err = csrs.UpdateApproval(csr)
if err != nil {
csr, _ = csrs.Get(csrName, metav1.GetOptions{})
framework.Logf("err updating approval: %v", err)
return false, nil
}
return true, nil
}))
framework.Logf("waiting for CSR to be signed")
framework.ExpectNoError(wait.Poll(5*time.Second, time.Minute, func() (bool, error) {
csr, _ = csrs.Get(csrName, metav1.GetOptions{})
if err != nil {
return false, err
}
if len(csr.Status.Certificate) == 0 |
return true, nil
}))
framework.Logf("testing the client")
rcfg, err := framework.LoadConfig()
framework.ExpectNoError(err)
rcfg.TLSClientConfig.CertData = csr.Status.Certificate
rcfg.TLSClientConfig.KeyData = pkpem
rcfg.TLSClientConfig.CertFile = ""
rcfg.BearerToken = ""
rcfg.AuthProvider = nil
rcfg.Username = ""
rcfg.Password = ""
newClient, err := v1beta1client.NewForConfig(rcfg)
framework.ExpectNoError(err)
framework.ExpectNoError(newClient.CertificateSigningRequests().Delete(csrName, nil))
})
})
| {
framework.Logf("csr not signed yet")
return false, nil
} |
ImmutableQuery.d.ts | import { SelectedFilter } from "./SelectedFilter";
export declare type SourceFilterType = string | Array<string> | boolean;
export declare class ImmutableQuery {
index: any;
query: any;
static defaultIndex: any;
constructor(index?: any);
buildQuery(): void;
hasFilters(): boolean;
hasFiltersOrQuery(): boolean;
addQuery(query: any): ImmutableQuery;
setQueryString(queryString: any): ImmutableQuery;
getQueryString(): any;
addSelectedFilter(selectedFilter: SelectedFilter): ImmutableQuery;
addSelectedFilters(selectedFilters: Array<SelectedFilter>): ImmutableQuery; | setAggs(aggs: any): ImmutableQuery;
getFilters(keys?: any[]): any;
_getFilters(keys: any, method: any): any;
getFiltersWithKeys(keys: any): any;
getFiltersWithoutKeys(keys: any): any;
setSize(size: number): ImmutableQuery;
setSort(sort: any): ImmutableQuery;
setSource(_source: SourceFilterType): ImmutableQuery;
setHighlight(highlight: any): ImmutableQuery;
getSize(): any;
setFrom(from: number): ImmutableQuery;
getFrom(): any;
getPage(): number;
deepUpdate(key: any, ob: any): ImmutableQuery;
setSuggestions(suggestions: any): ImmutableQuery;
update(updateDef: any): ImmutableQuery;
getJSON(): any;
printJSON(): void;
} | getSelectedFilters(): any;
addAnonymousFilter(bool: any): ImmutableQuery;
addFilter(key: any, filter: any): ImmutableQuery; |
home.component.ts | import { Component, OnInit } from '@angular/core';
import { FormControl } from '@angular/forms';
import { Observable } from 'rxjs/Observable';
import { Router } from "@angular/router";
import { environment } from '../../../environments/environment';
declare function playAudio(): void;
declare function stopAudio(): void;
@Component({
selector: 'app-home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.scss']
})
export class | implements OnInit {
public Reservation: boolean = false;
myControl: FormControl = new FormControl();
options = [
'One',
'Two',
'Three'
];
filteredOptions: Observable<string[]>;
constructor(public router: Router) { }
ngOnInit() {
this.filteredOptions = this.myControl.valueChanges
}
filter(val: string): string[] {
return this.options.filter(option =>
option.toLowerCase().indexOf(val.toLowerCase()) === 0);
}
goToLogin() {
this.router.navigate(['/login']);
playAudio();
console.log('login clicked');
setTimeout(function () {
stopAudio();
}, 100);
}
showResrvation() {
this.Reservation = true;
}
hideResrvation() {
this.Reservation = false;
}
foods = [
{ value: 'steak-0', viewValue: 'Steak' },
{ value: 'pizza-1', viewValue: 'Pizza' },
{ value: 'tacos-2', viewValue: 'Tacos' }
];
}
| HomeComponent |
DeleteReadinessCheckCommand.ts | import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
MiddlewareStack,
SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
import { DeleteReadinessCheckRequest } from "../models/models_0";
import {
deserializeAws_restJson1DeleteReadinessCheckCommand,
serializeAws_restJson1DeleteReadinessCheckCommand,
} from "../protocols/Aws_restJson1";
import {
Route53RecoveryReadinessClientResolvedConfig,
ServiceInputTypes,
ServiceOutputTypes,
} from "../Route53RecoveryReadinessClient";
export interface DeleteReadinessCheckCommandInput extends DeleteReadinessCheckRequest {}
export interface DeleteReadinessCheckCommandOutput extends __MetadataBearer {}
/**
* Deletes an existing Readiness Check.
* @example
* Use a bare-bones client and the command you need to make an API call.
* ```javascript
* import { Route53RecoveryReadinessClient, DeleteReadinessCheckCommand } from "@aws-sdk/client-route53-recovery-readiness"; // ES Modules import
* // const { Route53RecoveryReadinessClient, DeleteReadinessCheckCommand } = require("@aws-sdk/client-route53-recovery-readiness"); // CommonJS import
* const client = new Route53RecoveryReadinessClient(config);
* const command = new DeleteReadinessCheckCommand(input);
* const response = await client.send(command);
* ```
*
* @see {@link DeleteReadinessCheckCommandInput} for command's `input` shape.
* @see {@link DeleteReadinessCheckCommandOutput} for command's `response` shape.
* @see {@link Route53RecoveryReadinessClientResolvedConfig | config} for command's `input` shape.
*
*/
export class | extends $Command<
DeleteReadinessCheckCommandInput,
DeleteReadinessCheckCommandOutput,
Route53RecoveryReadinessClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: DeleteReadinessCheckCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: Route53RecoveryReadinessClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<DeleteReadinessCheckCommandInput, DeleteReadinessCheckCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "Route53RecoveryReadinessClient";
const commandName = "DeleteReadinessCheckCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: DeleteReadinessCheckRequest.filterSensitiveLog,
outputFilterSensitiveLog: (output: any) => output,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: DeleteReadinessCheckCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_restJson1DeleteReadinessCheckCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<DeleteReadinessCheckCommandOutput> {
return deserializeAws_restJson1DeleteReadinessCheckCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
}
| DeleteReadinessCheckCommand |
calendar-component.js | /*jslint nomen: true*/
/*global define*/
define(function (require) {
'use strict';
var _ = require('underscore'),
BaseComponent = require('oroui/js/app/components/base/component'),
Calendar = require('orocalendar/js/calendar-view'),
EventCollection = require('orocalendar/js/calendar/event/collection'),
ConnectionCollection = require('orocalendar/js/calendar/connection/collection');
/**
* Creates calendar
*/
var CalendarComponent = BaseComponent.extend({
/**
* @type {orocalendar.js.calendar}
*/
calendar: null,
/**
* @type {EventCollection}
*/
eventCollection: null,
/**
* @type {ConnectionCollection}
*/
connectionCollection: null,
/**
* @constructor
* @param {Object} options
*/
initialize: function (options) {
this.options = options;
if (!this.options.el) {
this.options.el = this.options._sourceElement;
}
this.eventCollection = new EventCollection(JSON.parse(this.options.eventsItemsJson)); | delete this.options.connectionsItemsJson;
this.prepareOptions();
this.renderCalendar();
},
prepareOptions: function () {
var options = this.options;
options.collection = this.eventCollection;
options.scrollToCurrentTime = true;
options.connectionsOptions.collection = this.connectionCollection;
options.eventsOptions.date = options.date;
options.eventsOptions.header = {
left: options.eventsOptions.leftHeader || '',
center: options.eventsOptions.centerHeader || '',
right: options.eventsOptions.rightHeader || ''
};
_.extend(options.eventsOptions, options.calendarOptions);
delete options.calendarOptions;
delete options.date;
delete options.eventsOptions.centerHeader;
delete options.eventsOptions.leftHeader;
delete options.eventsOptions.rightHeader;
},
renderCalendar: function () {
this.calendar = new Calendar(this.options);
this.calendar.render();
}
});
return CalendarComponent;
}); | this.connectionCollection = new ConnectionCollection(JSON.parse(this.options.connectionsItemsJson));
delete this.options.eventsItemsJson; |
prompt.rs | use crate::common::*;
pub(crate) fn | (message: &str) -> Result<String> {
eprint!("{} › ", Style::new().apply_to(message).bold());
let mut input = String::new();
io::stdout().flush()?;
io::stdin().read_line(&mut input)?;
Ok(input.as_str().to_lowercase().trim().to_owned())
}
| prompt |
deployment.go | package testserver
import (
"github.com/pkg/errors"
"github.com/kumahq/kuma/test/framework"
)
type DeploymentOpts struct {
Name string
Namespace string
Mesh string
WithStatefulSet bool
ServiceAccount string
Args []string
Replicas int32
WaitingToBeReady bool
}
func | () DeploymentOpts {
return DeploymentOpts{
Mesh: "default",
Args: []string{},
Name: "test-server",
Namespace: framework.TestNamespace,
Replicas: 1,
WaitingToBeReady: true,
}
}
type DeploymentOptsFn = func(*DeploymentOpts)
func WithMesh(mesh string) DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.Mesh = mesh
}
}
func WithName(name string) DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.Name = name
}
}
func WithNamespace(namespace string) DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.Namespace = namespace
}
}
func WithReplicas(n int32) DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.Replicas = n
}
}
func WithStatefulSet(apply bool) DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.WithStatefulSet = apply
}
}
func WithServiceAccount(serviceAccountName string) DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.ServiceAccount = serviceAccountName
}
}
func WithoutWaitingToBeReady() DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.WaitingToBeReady = false
}
}
func WithArgs(args ...string) DeploymentOptsFn {
return func(opts *DeploymentOpts) {
opts.Args = args
}
}
type TestServer interface {
}
type Deployment interface {
framework.Deployment
TestServer
}
func Install(fn ...DeploymentOptsFn) framework.InstallFunc {
opts := DefaultDeploymentOpts()
for _, f := range fn {
f(&opts)
}
return func(cluster framework.Cluster) error {
var deployment Deployment
switch cluster.(type) {
case *framework.K8sCluster:
deployment = &k8SDeployment{
opts: opts,
}
default:
return errors.New("invalid cluster")
}
return cluster.Deploy(deployment)
}
}
| DefaultDeploymentOpts |
0019_allow_null_precision.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-04-20 18:17
from __future__ import unicode_literals
from django.db import migrations
import mep.accounts.models
class Migration(migrations.Migration):
| dependencies = [
('accounts', '0018_merge_20180418_1607'),
]
operations = [
migrations.AlterField(
model_name='borrow',
name='end_date_precision',
field=mep.accounts.partial_date.DatePrecisionField(blank=True, null=True),
),
migrations.AlterField(
model_name='borrow',
name='start_date_precision',
field=mep.accounts.partial_date.DatePrecisionField(blank=True, null=True),
),
] |
|
sqllogictest.rs | // Copyright 2022 RisingLight Project Authors. Licensed under Apache-2.0.
//! RisingLight sqllogictest
use libtest_mimic::{run_tests, Arguments, Outcome, Test};
use risinglight_sqllogictest::{test_disk, test_mem};
use tokio::runtime::Runtime;
fn main() {
const PATTERN: &str = "../sql/**/[!_]*.slt"; // ignore files start with '_'
const MEM_BLOCKLIST: &[&str] = &["statistics.slt"];
const DISK_BLOCKLIST: &[&str] = &[];
let paths = glob::glob(PATTERN).expect("failed to find test files");
let args = Arguments::from_args();
let mut tests = vec![];
for entry in paths {
let path = entry.expect("failed to read glob entry");
let subpath = path.strip_prefix("../sql").unwrap().to_str().unwrap();
if !MEM_BLOCKLIST.iter().any(|p| subpath.contains(p)) {
tests.push(Test {
name: format!(
"mem_{}",
subpath.strip_suffix(".slt").unwrap().replace('/', "_")
),
kind: "".into(),
is_ignored: false,
is_bench: false,
data: ("mem", subpath.to_string()),
});
}
if !DISK_BLOCKLIST.iter().any(|p| subpath.contains(p)) {
tests.push(Test {
name: format!(
"disk_{}",
subpath.strip_suffix(".slt").unwrap().replace('/', "_")
),
kind: "".into(),
is_ignored: false,
is_bench: false,
data: ("disk", subpath.to_string()),
});
}
}
if tests.is_empty() {
panic!(
"no test found for sqllogictest! pwd: {:?}",
std::env::current_dir().unwrap()
);
}
fn build_runtime() -> Runtime |
run_tests(&args, tests, |test| match &test.data {
("mem", case) => {
build_runtime().block_on(test_mem(case));
Outcome::Passed
}
("disk", case) => {
build_runtime().block_on(test_disk(case));
Outcome::Passed
}
_ => unreachable!(),
})
.exit();
}
| {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
} |
test.rs | use assert_cmd::prelude::*;
use std::process::Command;
#[test]
fn arg_only_10() -> Result<(), Box<dyn std::error::Error>> {
let mut cmd = Command::cargo_bin("rtimer")?;
cmd.arg("10");
cmd.assert().success();
Ok(())
}
| let mut cmd = Command::cargo_bin("rtimer")?;
cmd.arg("5").arg("sec");
cmd.assert().success();
Ok(())
}
#[test]
fn arg_5_sec_with_progress() -> Result<(), Box<dyn std::error::Error>> {
let mut cmd = Command::cargo_bin("rtimer")?;
cmd.arg("5").arg("sec").arg("-p").arg("01");
cmd.assert().success();
Ok(())
} | #[test]
fn arg_5_sec() -> Result<(), Box<dyn std::error::Error>> { |
upstream_test.go | //go:build !windows
// +build !windows
package server
import (
"archive/tar"
"compress/gzip"
"context"
"io"
"io/ioutil"
"log"
"math/rand"
"os"
"path/filepath"
"testing"
"github.com/loft-sh/devspace/helper/remote"
"github.com/loft-sh/devspace/helper/util"
"github.com/pkg/errors"
)
var pool = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789%(&)°=?!§ _:$%&/()"
// Generate a random string of A-Z chars with len = l
func random(l int) []byte {
bytes := make([]byte, l)
for i := 0; i < l; i++ {
bytes[i] = pool[rand.Intn(len(pool))]
}
return bytes
}
type testFile struct {
Data []byte
Children map[string]testFile
}
var fileStructure = testFile{
Children: map[string]testFile{
"test.txt": {
Data: random(10),
},
"emptydir": {
Children: map[string]testFile{},
},
"emptydir2": {
Children: map[string]testFile{},
},
"dir1": {
Children: map[string]testFile{
"dir1-child": {
Children: map[string]testFile{
"test": {
Data: random(100),
},
"test-123": {
Data: []byte{},
},
},
},
},
},
},
}
var overwriteFileStructure = testFile{
Children: map[string]testFile{
"test.txt": {
Data: random(10),
},
},
}
func compareFiles(dir string, file testFile) error {
files, err := ioutil.ReadDir(dir)
if err != nil {
return err
}
if len(file.Children) != len(files) {
return errors.Errorf("dir %s expected %d children, got %d", dir, len(file.Children), len(files))
}
// check
for childName, child := range file.Children {
found := false
for _, f := range files {
if f.Name() == childName {
if f.IsDir() != (child.Children != nil) {
return errors.Errorf("child %s in dir %s: real isDir %v != expected isDir %v", childName, dir, f.IsDir(), child.Children != nil)
}
if child.Data != nil {
data, err := ioutil.ReadFile(filepath.Join(dir, f.Name()))
if err != nil {
return err
}
if string(data) != string(child.Data) {
return errors.Errorf("child %s in dir %s: expected data %s, got %s", childName, dir, string(child.Data), string(data))
}
}
if child.Children != nil {
err := compareFiles(filepath.Join(dir, childName), child)
if err != nil {
return err
}
}
found = true
break
}
}
if found == false {
return errors.Errorf("dir %s: path %s not found", dir, childName)
}
}
return nil
}
func createFiles(dir string, file testFile) error {
| func TestUpstreamServer(t *testing.T) {
fromDir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal(err)
}
toDir, err := ioutil.TempDir("", "")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(fromDir)
defer os.RemoveAll(toDir)
err = createFiles(fromDir, fileStructure)
if err != nil {
t.Fatal(err)
}
err = createFiles(toDir, overwriteFileStructure)
if err != nil {
t.Fatal(err)
}
// Create Upload Tar
// Open tar
r, w, err := os.Pipe()
if err != nil {
t.Fatal(err)
}
// Use compression
gw := gzip.NewWriter(w)
tarWriter := tar.NewWriter(gw)
writtenFiles := make(map[string]bool)
err = recursiveTar(fromDir, "", writtenFiles, tarWriter, false)
if err != nil {
t.Fatal(err)
}
// Close writer
tarWriter.Close()
gw.Close()
w.Close()
log.Println("Wrote tar")
// Upload tar with client
clientReader, clientWriter := io.Pipe()
serverReader, serverWriter := io.Pipe()
go func() {
err := StartUpstreamServer(serverReader, clientWriter, &UpstreamOptions{
UploadPath: toDir,
ExludePaths: nil,
ExitOnClose: false,
})
if err != nil {
panic(err)
}
}()
conn, err := util.NewClientConnection(clientReader, serverWriter)
if err != nil {
t.Fatal(err)
}
client := remote.NewUpstreamClient(conn)
uploadClient, err := client.Upload(context.Background())
if err != nil {
t.Fatal(err)
}
log.Println("Created server and client")
// Upload file
buf := make([]byte, 16*1024)
for {
n, err := r.Read(buf)
if n > 0 {
err := uploadClient.Send(&remote.Chunk{
Content: buf[:n],
})
if err != nil {
t.Fatal(err)
}
}
if err == io.EOF {
_, err := uploadClient.CloseAndRecv()
if err != nil {
t.Fatal(err)
}
break
} else if err != nil {
t.Fatal(err)
}
}
log.Println("Uploaded tar")
err = compareFiles(toDir, fileStructure)
if err != nil {
t.Fatal(err)
}
removeClient, err := client.Remove(context.Background())
if err != nil {
t.Fatal(err)
}
for path := range fileStructure.Children {
_ = removeClient.Send(&remote.Paths{
Paths: []string{path, path},
})
}
_, err = removeClient.CloseAndRecv()
if err != nil {
t.Fatal(err)
}
// Check if toDir is empty
files, err := ioutil.ReadDir(toDir)
if err != nil {
t.Fatal(err)
}
if len(files) > 0 {
t.Fatalf("Expected empty toDir, but still has %d entries", len(files))
}
}
| for name, child := range file.Children {
if child.Children != nil {
err := os.Mkdir(filepath.Join(dir, name), 0755)
if err != nil {
return err
}
err = createFiles(filepath.Join(dir, name), child)
if err != nil {
return err
}
} else {
err := ioutil.WriteFile(filepath.Join(dir, name), child.Data, 0666)
if err != nil {
return err
}
}
}
return nil
}
|
api_op_ListNamedQueries.go | // Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package athena
import (
"context"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/internal/awsutil"
)
type ListNamedQueriesInput struct {
_ struct{} `type:"structure"`
// The maximum number of queries to return in this request.
MaxResults *int64 `type:"integer"`
// The token that specifies where to start pagination if a previous request
// was truncated.
NextToken *string `min:"1" type:"string"`
// The name of the workgroup from which the named queries are returned. If a
// workgroup is not specified, the saved queries for the primary workgroup are
// returned.
WorkGroup *string `type:"string"`
}
// String returns the string representation
func (s ListNamedQueriesInput) String() string {
return awsutil.Prettify(s)
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *ListNamedQueriesInput) Validate() error {
invalidParams := aws.ErrInvalidParams{Context: "ListNamedQueriesInput"}
if s.NextToken != nil && len(*s.NextToken) < 1 {
invalidParams.Add(aws.NewErrParamMinLen("NextToken", 1))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
type ListNamedQueriesOutput struct {
_ struct{} `type:"structure"`
// The list of unique query IDs.
NamedQueryIds []string `min:"1" type:"list"`
// A token to be used by the next request if this request is truncated.
NextToken *string `min:"1" type:"string"`
}
// String returns the string representation
func (s ListNamedQueriesOutput) String() string {
return awsutil.Prettify(s)
}
const opListNamedQueries = "ListNamedQueries"
// ListNamedQueriesRequest returns a request value for making API operation for
// Amazon Athena.
//
// Provides a list of available query IDs only for queries saved in the specified
// workgroup. Requires that you have access to the workgroup. If a workgroup
// is not specified, lists the saved queries for the primary workgroup.
//
// For code samples using the AWS SDK for Java, see Examples and Code Samples
// (http://docs.aws.amazon.com/athena/latest/ug/code-samples.html) in the Amazon
// Athena User Guide.
//
// // Example sending a request using ListNamedQueriesRequest.
// req := client.ListNamedQueriesRequest(params)
// resp, err := req.Send(context.TODO())
// if err == nil {
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/athena-2017-05-18/ListNamedQueries
func (c *Client) ListNamedQueriesRequest(input *ListNamedQueriesInput) ListNamedQueriesRequest {
op := &aws.Operation{
Name: opListNamedQueries,
HTTPMethod: "POST",
HTTPPath: "/",
Paginator: &aws.Paginator{
InputTokens: []string{"NextToken"},
OutputTokens: []string{"NextToken"},
LimitToken: "MaxResults",
TruncationToken: "",
},
}
if input == nil {
input = &ListNamedQueriesInput{}
}
req := c.newRequest(op, input, &ListNamedQueriesOutput{})
return ListNamedQueriesRequest{Request: req, Input: input, Copy: c.ListNamedQueriesRequest}
}
// ListNamedQueriesRequest is the request type for the
// ListNamedQueries API operation.
type ListNamedQueriesRequest struct {
*aws.Request
Input *ListNamedQueriesInput
Copy func(*ListNamedQueriesInput) ListNamedQueriesRequest
}
// Send marshals and sends the ListNamedQueries API request.
func (r ListNamedQueriesRequest) Send(ctx context.Context) (*ListNamedQueriesResponse, error) {
r.Request.SetContext(ctx)
err := r.Request.Send()
if err != nil {
return nil, err
}
resp := &ListNamedQueriesResponse{
ListNamedQueriesOutput: r.Request.Data.(*ListNamedQueriesOutput),
response: &aws.Response{Request: r.Request},
}
return resp, nil
}
// NewListNamedQueriesRequestPaginator returns a paginator for ListNamedQueries.
// Use Next method to get the next page, and CurrentPage to get the current
// response page from the paginator. Next will return false, if there are
// no more pages, or an error was encountered.
//
// Note: This operation can generate multiple requests to a service.
//
// // Example iterating over pages.
// req := client.ListNamedQueriesRequest(input)
// p := athena.NewListNamedQueriesRequestPaginator(req)
//
// for p.Next(context.TODO()) {
// page := p.CurrentPage()
// }
//
// if err := p.Err(); err != nil {
// return err
// }
//
func NewListNamedQueriesPaginator(req ListNamedQueriesRequest) ListNamedQueriesPaginator |
// ListNamedQueriesPaginator is used to paginate the request. This can be done by
// calling Next and CurrentPage.
type ListNamedQueriesPaginator struct {
aws.Pager
}
func (p *ListNamedQueriesPaginator) CurrentPage() *ListNamedQueriesOutput {
return p.Pager.CurrentPage().(*ListNamedQueriesOutput)
}
// ListNamedQueriesResponse is the response type for the
// ListNamedQueries API operation.
type ListNamedQueriesResponse struct {
*ListNamedQueriesOutput
response *aws.Response
}
// SDKResponseMetdata returns the response metadata for the
// ListNamedQueries request.
func (r *ListNamedQueriesResponse) SDKResponseMetdata() *aws.Response {
return r.response
}
| {
return ListNamedQueriesPaginator{
Pager: aws.Pager{
NewRequest: func(ctx context.Context) (*aws.Request, error) {
var inCpy *ListNamedQueriesInput
if req.Input != nil {
tmp := *req.Input
inCpy = &tmp
}
newReq := req.Copy(inCpy)
newReq.SetContext(ctx)
return newReq.Request, nil
},
},
}
} |
go-destroy-session.go | package main
import (
"fmt"
"net/http"
"net/http/cgi"
)
func main() | {
cgi.Serve(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
header := w.Header()
cookie := http.Cookie{Name: "username", Value: "None"} // create cookie
http.SetCookie(w, &cookie) // set cookie to response
header.Set("Cache-Control", "no-cache")
header.Set("Content-Type", "text/html")
fmt.Fprintf(w, "<html><head><title>Go Session Destroyed</title></head><body><h1>Go Session Destroyed</h1><hr/>")
fmt.Fprintf(w, "<a href=\"/hw2/go-cgiform.html\">Back to the Go CGI Form</a><br/>")
fmt.Fprintf(w, "<a href=\"/cgi-bin/go/go-sessions-1.cgi\">Session Page 1</a><br />")
fmt.Fprintf(w, "<a href=\"/cgi-bin/go/go-sessions-2.cgi\">Session Page 2</a><br/>")
fmt.Fprintf(w, "</body>")
fmt.Fprintf(w, "</html>")
}))
} |
|
HeaderSearch.js | import React from 'react';
import { useRouter } from 'next/router';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faSearch } from '@fortawesome/free-solid-svg-icons';
import HeaderSearchModal from './HeaderSearchStyle';
import InputSearch from './searchBox';
function HeaderSearch() {
const router = useRouter();
const [modalVisibility, setModalVisibility] = React.useState(false);
const handleKeyPress = (e) => {
if (e.key === 'Enter') {
setModalVisibility(false);
const url = `/search/${e.target.value}`;
router.push(url);
}
};
return (
<div className="header__search--container">
<FontAwesomeIcon
className="search__icon" icon={faSearch}
onClick={() => setModalVisibility(true)} | style={{ top: 0 }}
visible={modalVisibility}
onOk={() => setModalVisibility(false)}
onCancel={() => setModalVisibility(false)}
footer={null}
className="header__search--modal"
width="100%"
closable={false}
destroyOnClose={true}
>
<FontAwesomeIcon className="search__icon" icon={faSearch} />
<InputSearch
handleKeyPress={handleKeyPress}
/>
<button className="ant-modal-close" onClick={() => setModalVisibility(false)}>
<span className="ant-modal-close-x"></span>
</button>
</HeaderSearchModal>
</div>
);
}
export default HeaderSearch; | />
<HeaderSearchModal |
tests.rs | // This file is part of Substrate.
// Copyright (C) 2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::*;
use codec::{Encode, Decode};
use frame_support::{
assert_ok, impl_outer_origin, parameter_types,
weights::Weight,
};
use sp_core::{
H256,
offchain::{OffchainExt, TransactionPoolExt, testing},
sr25519::Signature,
testing::KeyStore,
traits::KeystoreExt,
};
use sp_runtime::{
Perbill, RuntimeAppPublic,
testing::{Header, TestXt},
traits::{
BlakeTwo256, IdentityLookup, Extrinsic as ExtrinsicT,
IdentifyAccount, Verify,
},
};
impl_outer_origin! {
pub enum Origin for Test where system = frame_system {}
}
// For testing the module, we construct most of a mock runtime. This means
// first constructing a configuration type (`Test`) which `impl`s each of the
// configuration traits of modules we want to use.
#[derive(Clone, Eq, PartialEq, Encode, Decode)]
pub struct Test;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::one();
}
impl frame_system::Trait for Test {
type BaseCallFilter = ();
type Origin = Origin;
type Call = ();
type Index = u64;
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = sp_core::sr25519::Public;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = ();
type BlockHashCount = BlockHashCount;
type MaximumBlockWeight = MaximumBlockWeight;
type DbWeight = ();
type BlockExecutionWeight = ();
type ExtrinsicBaseWeight = ();
type MaximumExtrinsicWeight = MaximumBlockWeight;
type MaximumBlockLength = MaximumBlockLength;
type AvailableBlockRatio = AvailableBlockRatio;
type Version = ();
type PalletInfo = ();
type AccountData = ();
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
}
type Extrinsic = TestXt<Call<Test>, ()>;
type AccountId = <<Signature as Verify>::Signer as IdentifyAccount>::AccountId;
impl frame_system::offchain::SigningTypes for Test {
type Public = <Signature as Verify>::Signer;
type Signature = Signature;
}
impl<LocalCall> frame_system::offchain::SendTransactionTypes<LocalCall> for Test where
Call<Test>: From<LocalCall>,
{
type OverarchingCall = Call<Test>;
type Extrinsic = Extrinsic;
}
impl<LocalCall> frame_system::offchain::CreateSignedTransaction<LocalCall> for Test where
Call<Test>: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: Call<Test>,
_public: <Signature as Verify>::Signer,
_account: AccountId,
nonce: u64,
) -> Option<(Call<Test>, <Extrinsic as ExtrinsicT>::SignaturePayload)> {
Some((call, (nonce, ())))
}
}
parameter_types! {
pub const GracePeriod: u64 = 5;
pub const UnsignedInterval: u64 = 128;
pub const UnsignedPriority: u64 = 1 << 20;
}
impl Trait for Test {
type Event = ();
type AuthorityId = crypto::TestAuthId;
type Call = Call<Test>;
type GracePeriod = GracePeriod;
type UnsignedInterval = UnsignedInterval;
type UnsignedPriority = UnsignedPriority;
}
type Example = Module<Test>;
#[test]
fn it_aggregates_the_price() {
sp_io::TestExternalities::default().execute_with(|| {
assert_eq!(Example::average_price(), None);
assert_ok!(Example::submit_price(Origin::signed(Default::default()), 27));
assert_eq!(Example::average_price(), Some(27));
assert_ok!(Example::submit_price(Origin::signed(Default::default()), 43));
assert_eq!(Example::average_price(), Some(35));
});
}
//cargo test --color=always --package pallet-ocw --lib tests::should_make_http_call_and_parse_result -- --exact --nocapture
#[test]
fn should_make_http_call_and_parse_result() {
let (offchain, state) = testing::TestOffchainExt::new();
let mut t = sp_io::TestExternalities::default();
t.register_extension(OffchainExt::new(offchain));
price_oracle_response(&mut state.write());
t.execute_with(|| {
// when
let price = Example::fetch_price().unwrap();
// then
assert_eq!(price, 15523);
});
}
#[test]
fn knows_how_to_mock_several_http_calls() {
let (offchain, state) = testing::TestOffchainExt::new();
let mut t = sp_io::TestExternalities::default();
t.register_extension(OffchainExt::new(offchain));
{
let mut state = state.write();
state.expect_request(testing::PendingRequest {
method: "GET".into(),
uri: "https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD".into(),
response: Some(br#"{"USD": 1}"#.to_vec()),
sent: true,
..Default::default()
});
state.expect_request(testing::PendingRequest {
method: "GET".into(),
uri: "https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD".into(),
response: Some(br#"{"USD": 2}"#.to_vec()),
sent: true,
..Default::default()
});
state.expect_request(testing::PendingRequest {
method: "GET".into(),
uri: "https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD".into(),
response: Some(br#"{"USD": 3}"#.to_vec()),
sent: true,
..Default::default()
});
}
t.execute_with(|| {
let price1 = Example::fetch_price().unwrap();
let price2 = Example::fetch_price().unwrap();
let price3 = Example::fetch_price().unwrap();
assert_eq!(price1, 100);
assert_eq!(price2, 200);
assert_eq!(price3, 300);
})
}
//cargo test --color=always --package pallet-ocw --lib tests::should_submit_signed_transaction_on_chain -- --exact --nocapture
#[test]
fn should_submit_signed_transaction_on_chain() {
const PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten";
let (offchain, offchain_state) = testing::TestOffchainExt::new();
let (pool, pool_state) = testing::TestTransactionPoolExt::new();
let keystore = KeyStore::new();
keystore.write().sr25519_generate_new(
crate::crypto::Public::ID,
Some(&format!("{}/hunter1", PHRASE))
).unwrap();
let mut t = sp_io::TestExternalities::default();
t.register_extension(OffchainExt::new(offchain));
t.register_extension(TransactionPoolExt::new(pool));
t.register_extension(KeystoreExt(keystore));
price_oracle_response(&mut offchain_state.write());
t.execute_with(|| {
// when
Example::fetch_price_and_send_signed().unwrap();
// then
let tx = pool_state.write().transactions.pop().unwrap();
assert!(pool_state.read().transactions.is_empty());
let tx = Extrinsic::decode(&mut &*tx).unwrap();
assert_eq!(tx.signature.unwrap().0, 0);
assert_eq!(tx.call, Call::submit_price(15523));
});
}
#[test]
fn should_submit_unsigned_transaction_on_chain_for_any_account() {
const PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten";
let (offchain, offchain_state) = testing::TestOffchainExt::new();
let (pool, pool_state) = testing::TestTransactionPoolExt::new();
let keystore = KeyStore::new();
keystore.write().sr25519_generate_new(
crate::crypto::Public::ID,
Some(&format!("{}/hunter1", PHRASE))
).unwrap();
let mut t = sp_io::TestExternalities::default();
t.register_extension(OffchainExt::new(offchain));
t.register_extension(TransactionPoolExt::new(pool));
t.register_extension(KeystoreExt(keystore.clone()));
price_oracle_response(&mut offchain_state.write());
let public_key = keystore.read()
.sr25519_public_keys(crate::crypto::Public::ID)
.get(0)
.unwrap()
.clone();
let price_payload = PricePayload {
block_number: 1,
price: 15523,
public: <Test as SigningTypes>::Public::from(public_key),
};
// let signature = price_payload.sign::<crypto::TestAuthId>().unwrap();
t.execute_with(|| {
// when
Example::fetch_price_and_send_unsigned_for_any_account(1).unwrap();
// then
let tx = pool_state.write().transactions.pop().unwrap();
let tx = Extrinsic::decode(&mut &*tx).unwrap();
assert_eq!(tx.signature, None);
if let Call::submit_price_unsigned_with_signed_payload(body, signature) = tx.call {
assert_eq!(body, price_payload);
let signature_valid = <PricePayload<
<Test as SigningTypes>::Public,
<Test as frame_system::Trait>::BlockNumber
> as SignedPayload<Test>>::verify::<crypto::TestAuthId>(&price_payload, signature);
assert!(signature_valid);
}
});
}
#[test]
fn should_submit_unsigned_transaction_on_chain_for_all_accounts() {
const PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten";
let (offchain, offchain_state) = testing::TestOffchainExt::new();
let (pool, pool_state) = testing::TestTransactionPoolExt::new();
let keystore = KeyStore::new();
keystore.write().sr25519_generate_new(
crate::crypto::Public::ID,
Some(&format!("{}/hunter1", PHRASE))
).unwrap();
let mut t = sp_io::TestExternalities::default();
t.register_extension(OffchainExt::new(offchain));
t.register_extension(TransactionPoolExt::new(pool));
t.register_extension(KeystoreExt(keystore.clone()));
price_oracle_response(&mut offchain_state.write());
let public_key = keystore.read()
.sr25519_public_keys(crate::crypto::Public::ID)
.get(0)
.unwrap()
.clone();
let price_payload = PricePayload {
block_number: 1,
price: 15523,
public: <Test as SigningTypes>::Public::from(public_key),
};
// let signature = price_payload.sign::<crypto::TestAuthId>().unwrap();
t.execute_with(|| {
// when
Example::fetch_price_and_send_unsigned_for_all_accounts(1).unwrap();
// then
let tx = pool_state.write().transactions.pop().unwrap();
let tx = Extrinsic::decode(&mut &*tx).unwrap();
assert_eq!(tx.signature, None);
if let Call::submit_price_unsigned_with_signed_payload(body, signature) = tx.call {
assert_eq!(body, price_payload);
let signature_valid = <PricePayload<
<Test as SigningTypes>::Public,
<Test as frame_system::Trait>::BlockNumber
> as SignedPayload<Test>>::verify::<crypto::TestAuthId>(&price_payload, signature);
assert!(signature_valid);
}
});
}
#[test]
fn should_submit_raw_unsigned_transaction_on_chain() {
let (offchain, offchain_state) = testing::TestOffchainExt::new();
let (pool, pool_state) = testing::TestTransactionPoolExt::new();
let keystore = KeyStore::new();
let mut t = sp_io::TestExternalities::default();
t.register_extension(OffchainExt::new(offchain));
t.register_extension(TransactionPoolExt::new(pool));
t.register_extension(KeystoreExt(keystore));
price_oracle_response(&mut offchain_state.write());
t.execute_with(|| {
// when
Example::fetch_price_and_send_raw_unsigned(1).unwrap();
// then
let tx = pool_state.write().transactions.pop().unwrap();
assert!(pool_state.read().transactions.is_empty());
let tx = Extrinsic::decode(&mut &*tx).unwrap();
assert_eq!(tx.signature, None);
assert_eq!(tx.call, Call::submit_price_unsigned(1, 15523));
});
}
fn price_oracle_response(state: &mut testing::OffchainState) |
#[test]
fn parse_price_works() {
let test_data = vec![
("{\"USD\":6536.92}", Some(653692)),
("{\"USD\":65.92}", Some(6592)),
("{\"USD\":6536.924565}", Some(653692)),
("{\"USD\":6536}", Some(653600)),
("{\"USD2\":6536}", None),
("{\"USD\":\"6432\"}", None),
];
for (json, expected) in test_data {
assert_eq!(expected, Example::parse_price(json));
}
}
//cargo test --color=always --package pallet-ocw --lib tests::height_should_make_http_call_and_parse_result -- --exact --nocapture
#[test]
fn height_should_make_http_call_and_parse_result() {
let (offchain, state) = testing::TestOffchainExt::new();
let mut t = sp_io::TestExternalities::default();
t.register_extension(OffchainExt::new(offchain));
price_oracle_response(&mut state.write());
t.execute_with(|| {
// when
let price = Example::fetch_price().unwrap();
// then
assert_eq!(price, 15523);
});
}
#[test]
fn parse_hight_works() {
let test_data = vec![
("{\"Height\":216,\"Cids\":[{\"/\":\"bafy2bzaceb4a7u7y5rfwgdhmxz776r2zhye5b4bvufrgmobxebsmywt2eln4y\"}]}", Some((216,vec![b"bafy2bzaceb4a7u7y5rfwgdhmxz776r2zhye5b4bvufrgmobxebsmywt2eln4y".to_vec()]))),
("{\"Cids\":[{\"/\":\"aaaaaaa\"},{\"/\":\"bbbbbbb\"}],\"Height\":217}", Some((217,vec![b"aaaaaaa".to_vec(),b"bbbbbbb".to_vec()]))),
("{\"Cids\":[{\"/\":\"aa123\"},{\"/\":\"ewfdsc\"}],\"Height\":218}", Some((218,vec![b"aa123".to_vec(),b"ewfdsc".to_vec()]))),
("{\"Cidd\":[{\"/\":\"123\"},{\"/\":\"123\"}],\"Height\":998}", None),
("{\"Cids\":[{\"/\":\"aa123\"},{\"/\":\"ewfdsc\"}],\"FFF\":218}", None),
];
for (json, expected) in test_data {
let data = Example::parse_height(json);
assert_eq!(expected, data);
}
} | {
state.expect_request(testing::PendingRequest {
method: "GET".into(),
uri: "https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD".into(),
response: Some(br#"{"USD": 155.23}"#.to_vec()),
sent: true,
..Default::default()
});
} |
prince_region2_iv_code7.rs | #[doc = "Register `PRINCE_REGION2_IV_CODE7` reader"]
pub struct R(crate::R<PRINCE_REGION2_IV_CODE7_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PRINCE_REGION2_IV_CODE7_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PRINCE_REGION2_IV_CODE7_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PRINCE_REGION2_IV_CODE7_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PRINCE_REGION2_IV_CODE7` writer"]
pub struct W(crate::W<PRINCE_REGION2_IV_CODE7_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PRINCE_REGION2_IV_CODE7_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PRINCE_REGION2_IV_CODE7_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PRINCE_REGION2_IV_CODE7_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `FIELD` reader - ."]
pub struct FIELD_R(crate::FieldReader<u32, u32>);
impl FIELD_R {
#[inline(always)]
pub(crate) fn new(bits: u32) -> Self {
FIELD_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for FIELD_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `FIELD` writer - ."]
pub struct FIELD_W<'a> {
w: &'a mut W,
}
impl<'a> FIELD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W |
}
impl R {
#[doc = "Bits 0:31 - ."]
#[inline(always)]
pub fn field(&self) -> FIELD_R {
FIELD_R::new(self.bits as u32)
}
}
impl W {
#[doc = "Bits 0:31 - ."]
#[inline(always)]
pub fn field(&mut self) -> FIELD_W {
FIELD_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = ".\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [prince_region2_iv_code7](index.html) module"]
pub struct PRINCE_REGION2_IV_CODE7_SPEC;
impl crate::RegisterSpec for PRINCE_REGION2_IV_CODE7_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [prince_region2_iv_code7::R](R) reader structure"]
impl crate::Readable for PRINCE_REGION2_IV_CODE7_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [prince_region2_iv_code7::W](W) writer structure"]
impl crate::Writable for PRINCE_REGION2_IV_CODE7_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PRINCE_REGION2_IV_CODE7 to value 0"]
impl crate::Resettable for PRINCE_REGION2_IV_CODE7_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| {
self.w.bits = value as u32;
self.w
} |
main.go | // +build !windows
package pasori
import (
"errors"
"time"
"github.com/google/gousb"
)
type packet struct {
inep *gousb.InEndpoint
outep *gousb.OutEndpoint
}
func (p *packet) checksum(cmd byte, buf []byte) byte {
for _, b := range buf {
cmd += b
}
return ^cmd + 1
}
func (p *packet) send(buf []byte) ([]byte, error) {
_, err := p.outep.Write(buf)
if err != nil {
return nil, err
}
rcv := make([]byte, 255)
_, err = p.inep.Read(rcv)
if err != nil {
return nil, err
}
rbuf := make([]byte, 255)
_, err = p.inep.Read(rbuf)
if err != nil {
return nil, err
}
return rbuf, nil
}
func (p *packet) write(buf []byte) ([]byte, error) {
n := len(buf)
cmd := []byte{0x00, 0x00, 0xff, 0xff, 0xff}
cmd = append(cmd, byte(n+1))
cmd = append(cmd, byte(((n+1)&0xff00)>>8))
cmd = append(cmd, p.checksum(0x00, cmd[5:7]))
cmd = append(cmd, 0xd6)
cmd = append(cmd, buf...)
cmd = append(cmd, p.checksum(0xd6, buf))
cmd = append(cmd, 0x00)
return p.send(cmd)
}
func (p *packet) init() error {
cmd := []byte{0x00, 0x00, 0xff, 0x00, 0xff, 0x00}
_, err := p.outep.Write(cmd)
if err != nil {
return err
}
return nil
}
func (p *packet) setcommandtype() ([]byte, error) {
cmd := []byte{0x2A, 0x01}
return p.write(cmd)
}
func (p *packet) switch_rf() ([]byte, error) {
cmd := []byte{0x06, 0x00}
return p.write(cmd)
}
func (p *packet) inset_rf(nfc_type byte) ([]byte, error) {
var cmd []byte
switch nfc_type {
case 'F':
cmd = []byte{0x00, 0x01, 0x01, 0x0f, 0x01}
case 'A':
cmd = []byte{0x00, 0x02, 0x03, 0x0f, 0x03}
case 'B':
cmd = []byte{0x00, 0x03, 0x07, 0x0f, 0x07}
}
return p.write(cmd)
}
func (p *packet) inset_protocol_1() ([]byte, error) {
cmd := []byte{0x02, 0x00, 0x18, 0x01, 0x01, 0x02, 0x01, 0x03, 0x00, 0x04, 0x00, 0x05, 0x00, 0x06, 0x00, 0x07, 0x08, 0x08, 0x00, 0x09, 0x00, 0x0a, 0x00, 0x0b, 0x00, 0x0c, 0x00, 0x0e, 0x04, 0x0f, 0x00, 0x10, 0x00, 0x11, 0x00, 0x12, 0x00, 0x13, 0x06}
return p.write(cmd)
}
func (p *packet) inset_protocol_2(nfc_type byte) ([]byte, error) {
var cmd []byte
switch nfc_type {
case 'F':
cmd = []byte{0x02, 0x00, 0x18}
case 'A':
cmd = []byte{0x02, 0x00, 0x06, 0x01, 0x00, 0x02, 0x00, 0x05, 0x01, 0x07, 0x07}
case 'B':
cmd = []byte{0x02, 0x00, 0x14, 0x09, 0x01, 0x0a, 0x01, 0x0b, 0x01, 0x0c, 0x01}
}
return p.write(cmd)
}
func (p *packet) sens_req(nfc_type byte) ([]byte, error) {
var cmd []byte
switch nfc_type {
case 'F':
cmd = []byte{0x04, 0x6e, 0x00, 0x06, 0x00, 0xff, 0xff, 0x01, 0x00}
case 'A':
cmd = []byte{0x04, 0x6e, 0x00, 0x26}
case 'B':
cmd = []byte{0x04, 0x6e, 0x00, 0x05, 0x00, 0x10}
}
return p.write(cmd)
}
func (p *packet) parse(buf []byte) []byte {
return buf[9:len(buf)]
}
func | (ctx *gousb.Context, dev *gousb.Device) (*packet, error) {
intf, done, err := dev.DefaultInterface()
if err != nil {
return nil, err
}
defer done()
var in *gousb.InEndpoint
var out *gousb.OutEndpoint
for _, v := range intf.Setting.Endpoints {
if v.Direction == gousb.EndpointDirectionIn && in == nil {
in, err = intf.InEndpoint(v.Number)
if err != nil {
return nil, err
}
}
if v.Direction == gousb.EndpointDirectionOut && out == nil {
out, err = intf.OutEndpoint(v.Number)
if err != nil {
return nil, err
}
}
if in != nil && out != nil {
break
}
}
return &packet{
inep: in,
outep: out,
}, nil
}
func GetID(vid, pid uint16) ([]byte, error) {
ctx := gousb.NewContext()
defer ctx.Close()
dev, err := ctx.OpenDeviceWithVIDPID(gousb.ID(vid), gousb.ID(pid))
if err != nil {
return nil, err
}
defer dev.Close()
p, err := newPacket(ctx, dev)
if err != nil {
return nil, err
}
err = p.init()
if err != nil {
return nil, err
}
_, err = p.setcommandtype()
if err != nil {
return nil, err
}
_, err = p.switch_rf()
if err != nil {
return nil, err
}
var nfc_type byte
nfc_type = 'F'
_, err = p.inset_rf(nfc_type)
if err != nil {
return nil, err
}
_, err = p.inset_protocol_1()
if err != nil {
return nil, err
}
_, err = p.inset_protocol_2(nfc_type)
if err != nil {
return nil, err
}
isloop := true
for isloop {
rbuf, err := p.sens_req(nfc_type)
if err != nil {
return nil, err
}
if rbuf[9] == 0x05 && rbuf[10] == 0x00 {
rbuf := p.parse(rbuf)
if rbuf[6] == 0x14 && rbuf[7] == 0x01 {
idm := rbuf[8 : 8+8]
// pmm := rbuf[16 : 16+8]
return idm, nil
}
if rbuf[6] == 0x50 {
nfcid := rbuf[7 : 7+4]
// appdata := rbuf[11 : 11+4]
// pinfo := rbuf[15 : 15+4]
// fmt.Printf(" NFCID: %v\n", nfcid)
// fmt.Printf(" Application Data: %v\n", appdata)
// fmt.Printf(" Protocol Info: %v\n", pinfo)
return nfcid, nil
}
isloop = false
}
time.Sleep(1 * time.Millisecond)
}
return nil, errors.New("ID not found")
}
| newPacket |
mazda3_joystick.py | #!/usr/bin/env python3
import uinput
from elm327 import ELM327, PROTOCOLS
from mrcrowbar import models as mrc
import math
import time
from optparse import OptionParser
class OptParser( OptionParser ):
def format_epilog( self, formatter ):
return '\n{}\n'.format( '\n'.join( [formatter._format_text( x ) for x in self.epilog.split( '\n' )] ) )
class Steering( mrc.Block ):
RANGE = 0x00D2
axis_raw = mrc.UInt16_BE( 0x00 )
@property
def axis( self ):
return min( max( (255*(self.axis_raw - 0x8000)//self.RANGE), -255 ), 255 )
class | ( mrc.Block ):
RANGE = 0xC8
axis_raw = mrc.UInt8( 0x06 )
@property
def axis( self ):
return min( max( (255*(self.axis_raw)//self.RANGE), 0 ), 255 )
class Brake( mrc.Block ):
button = mrc.Bits( 0x02, 0b01000000 )
class Cruise( mrc.Block ):
button = mrc.Bits( 0x00, 0b10000000 )
class Controls( mrc.Block ):
driver_door = mrc.Bits( 0x00, 0b10000000 )
high_beams = mrc.Bits( 0x03, 0b01000000 )
class Mazda3:
LATCH_TIME = 0.1
PRESS_THRESHOLD = 32
STEER_THRESHOLD = 64
SHOVE_THRESHOLD = 128
def __init__( self, name, mapping ):
print( 'Creating uinput device "{}"...'.format( name ) )
self.device = uinput.Device( mapping, name )
self.steering = 0
self.accelerator = 0
self.brake = 0
self.high_beams = 0
self.cruise_t = self.driver_door_t = time.time() + self.LATCH_TIME
self.cruise = 0
self.driver_door = 0
self.cruise_prev = 0
self.driver_door_prev = 0
def update( self, msg_id, msg_b ):
t = time.time()
self.cruise_prev = self.cruise
self.driver_door_prev = self.driver_door
if msg_id == 0x4da:
self.steering = Steering( msg_b ).axis
elif msg_id == 0x201:
self.accelerator = Accelerator( msg_b ).axis
elif msg_id == 0x205:
self.brake = Brake( msg_b ).button
elif msg_id == 0x4ec:
self.cruise = Cruise( msg_b ).button
elif msg_id == 0x433:
obj = Controls( msg_b )
self.high_beams = obj.high_beams
self.driver_door = obj.driver_door
else:
return
if self.cruise != self.cruise_prev:
self.cruise_t = t
if self.driver_door != self.driver_door_prev:
self.driver_door_t = t
self.set_controls()
return
def set_controls( self ):
pass
class Mazda3Joystick( Mazda3 ):
NAME = 'Mazda 3 Joystick'
DEVICE = [
uinput.ABS_WHEEL + (-255, 255, 0, 0),
uinput.ABS_GAS + (0, 255, 0, 0),
uinput.BTN_0,
uinput.BTN_1,
uinput.BTN_2,
uinput.BTN_3
]
def __init__( self ):
super().__init__( name=self.NAME, mapping=self.DEVICE )
def set_controls( self ):
t = time.time()
self.device.emit( uinput.ABS_WHEEL, self.steering )
self.device.emit( uinput.ABS_GAS, self.accelerator )
self.device.emit( uinput.BTN_0, self.brake )
self.device.emit( uinput.BTN_1, self.high_beams )
self.device.emit( uinput.BTN_2, 1 if t < (self.cruise_t + self.LATCH_TIME) else 0 )
self.device.emit( uinput.BTN_3, 1 if t < (self.driver_door_t + self.LATCH_TIME) else 0 )
return
class Mazda3Doom( Mazda3Joystick ):
NAME = 'Mazda 3 Doom'
DEVICE = [
uinput.ABS_WHEEL + (-255, 255, 0, 0),
uinput.ABS_GAS + (-255, 255, 0, 0),
uinput.BTN_0,
uinput.BTN_1,
uinput.BTN_2,
uinput.BTN_3
]
class Mazda3DOS( Mazda3Joystick ):
NAME = 'Mazda 3 DOS'
DEVICE = [
uinput.ABS_WHEEL + (-255, 255, 0, 0),
uinput.ABS_GAS + (-255, 255, 0, 0),
uinput.BTN_0,
uinput.BTN_1,
uinput.BTN_2,
uinput.BTN_3
]
def set_controls( self ):
t = time.time()
self.device.emit( uinput.ABS_WHEEL, self.steering )
self.device.emit( uinput.ABS_GAS, self.accelerator*2-255 )
self.device.emit( uinput.BTN_0, self.brake )
self.device.emit( uinput.BTN_1, self.high_beams )
self.device.emit( uinput.BTN_2, 1 if t < (self.cruise_t + self.LATCH_TIME) else 0 )
self.device.emit( uinput.BTN_3, 1 if t < (self.driver_door_t + self.LATCH_TIME) else 0 )
return
class Mazda3Descent( Mazda3 ):
NAME = 'Mazda 3 Descent'
DEVICE = [
uinput.ABS_WHEEL + (-255, 255, 0, 0),
uinput.ABS_GAS + (-255, 255, 0, 0),
uinput.BTN_0,
uinput.BTN_1,
uinput.BTN_2,
uinput.BTN_3,
uinput.KEY_UP,
uinput.KEY_DOWN
]
DOUBLE_TAP = 0.5
def __init__( self ):
super().__init__( name=self.NAME, mapping=self.DEVICE )
self.high_beams_prev = 0
self.high_beams_t = time.time()
self.high_beams_key = uinput.KEY_DOWN
def update( self, msg_id, msg_b ):
t = time.time()
self.high_beams_prev = self.high_beams
super().update( msg_id, msg_b )
if self.high_beams != self.high_beams_prev:
if self.high_beams:
self.high_beams_key = uinput.KEY_UP if (t - self.high_beams_t < self.DOUBLE_TAP) else uinput.KEY_DOWN
self.device.emit( self.high_beams_key, 1 )
self.high_beams_t = t
else:
self.device.emit( self.high_beams_key, 0 )
def set_controls( self ):
t = time.time()
self.device.emit( uinput.ABS_WHEEL, self.steering )
self.device.emit( uinput.ABS_GAS, self.accelerator )
self.device.emit( uinput.BTN_0, self.brake )
self.device.emit( uinput.BTN_2, 1 if t < (self.cruise_t + self.LATCH_TIME) else 0 )
self.device.emit( uinput.BTN_3, 1 if t < (self.driver_door_t + self.LATCH_TIME) else 0 )
return
class Mazda3Grim( Mazda3 ):
NAME = 'Mazda 3 Grim Fandango'
DEVICE = [
uinput.KEY_LEFT,
uinput.KEY_UP,
uinput.KEY_RIGHT,
uinput.KEY_U,
uinput.KEY_LEFTSHIFT,
uinput.KEY_E,
uinput.KEY_P,
uinput.KEY_I
]
def __init__( self ):
super().__init__( name=self.NAME, mapping=self.DEVICE )
def set_controls( self ):
t = time.time()
self.device.emit( uinput.KEY_LEFT, 1 if self.steering < -self.STEER_THRESHOLD else 0 )
self.device.emit( uinput.KEY_RIGHT, 1 if self.steering > self.STEER_THRESHOLD else 0 )
self.device.emit( uinput.KEY_UP, 1 if self.accelerator > self.PRESS_THRESHOLD else 0 )
self.device.emit( uinput.KEY_LEFTSHIFT, 1 if self.accelerator > self.SHOVE_THRESHOLD else 0 )
self.device.emit( uinput.KEY_U, self.brake )
self.device.emit( uinput.KEY_E, self.high_beams )
self.device.emit( uinput.KEY_P, 1 if t < self.cruise_t + self.LATCH_TIME else 0 )
self.device.emit( uinput.KEY_I, 1 if t < self.driver_door_t + self.LATCH_TIME else 0 )
return
class Mazda3Sonic( Mazda3 ):
NAME = 'Mazda 3 Sonic'
DEVICE = [
uinput.KEY_LEFT,
uinput.KEY_UP,
uinput.KEY_RIGHT,
uinput.KEY_DOWN,
uinput.KEY_Z,
uinput.KEY_ENTER
]
def __init__( self ):
super().__init__( name=self.NAME, mapping=self.DEVICE )
def set_controls( self ):
t = time.time()
self.device.emit( uinput.KEY_LEFT, 1 if self.steering < -self.STEER_THRESHOLD else 0 )
self.device.emit( uinput.KEY_RIGHT, 1 if self.steering > self.STEER_THRESHOLD else 0 )
self.device.emit( uinput.KEY_Z, 1 if self.accelerator > self.PRESS_THRESHOLD else 0 )
self.device.emit( uinput.KEY_DOWN, self.brake )
self.device.emit( uinput.KEY_UP, self.high_beams )
self.device.emit( uinput.KEY_ENTER, 1 if t < self.cruise_t + self.LATCH_TIME else 0 )
return
CONTROLLERS = {
'joystick': Mazda3Joystick,
'grim': Mazda3Grim,
'descent': Mazda3Descent,
'doom': Mazda3Doom,
'dos': Mazda3DOS,
'sonic': Mazda3Sonic,
}
if __name__ == '__main__':
usage = 'Usage: %prog [options]'
parser = OptParser( epilog='Protocols supported by the ELM327:\n{}'.format( PROTOCOLS ) )
parser.add_option( '-g', '--game', dest='game', help='Game configuration to use (choices: {})'.format( ' '.join( CONTROLLERS.keys() ) ) )
parser.add_option( '-d', '--device', dest='device', help='Path to ELM327 serial device' )
parser.add_option( '-b', '--baudrate', dest='baud_rate', help='Baud rate' )
parser.add_option( '-p', '--protocol', dest='protocol', help='ELM327 message protocol to use' )
(options, argv) = parser.parse_args()
args = {}
controller_type = 'joystick'
if options.game and options.game in CONTROLLERS:
controller_type = options.game
elif len( argv ) >= 1 and argv[0] in CONTROLLERS:
controller_type = argv[0]
controller = CONTROLLERS[controller_type]()
if options.device:
args['device'] = options.device
elif len( argv ) >= 2:
args['device'] = argv[1]
if options.baud_rate:
args['baud_rate'] = options.baud_rate
elif len( argv ) >= 3:
args['baud_rate'] = argv[2]
if options.protocol:
args['protocol'] = options.protocol
elif len( argv ) >= 4:
args['protocol'] = argv[3]
elm = ELM327( **args )
elm.reset()
elm.set_can_whitelist( [0x4da, 0x201, 0x205, 0x4ec, 0x433] )
elm.start_can()
try:
while True:
msg_id, msg_b = elm.recv_can()
if msg_id >= 0:
controller.update( msg_id, msg_b )
else:
print('-- Miss: {}'.format( msg_b ))
except EOFError:
print('-- Hit the end')
except KeyboardInterrupt:
pass
elm.get_prompt()
| Accelerator |
validate.go | // Copyright 2019 The Kubernetes Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package index
import (
"regexp"
"strings"
"github.com/pkg/errors"
"sigs.k8s.io/krew/pkg/constants"
)
var (
safePluginRegexp = regexp.MustCompile(`^[\w-]+$`)
// windowsForbidden is taken from https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file
windowsForbidden = []string{"CON", "PRN", "AUX", "NUL", "COM1", "COM2",
"COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2",
"LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"}
)
// IsSafePluginName checks if the plugin Name is safe to use.
func IsSafePluginName(name string) bool {
if !safePluginRegexp.MatchString(name) {
return false
}
for _, forbidden := range windowsForbidden {
if strings.ToLower(forbidden) == strings.ToLower(name) {
return false
}
}
return true
}
func | (apiVersion string) bool {
return apiVersion == constants.CurrentAPIVersion
}
// Validate TODO(lbb)
func (p Plugin) Validate(name string) error {
if !isSupportedAPIVersion(p.APIVersion) {
return errors.Errorf("plugin manifest has apiVersion=%q, not supported in this version of krew (try updating plugin index or install a newer version of krew)", p.APIVersion)
}
if p.Kind != constants.PluginKind {
return errors.Errorf("plugin manifest has kind=%q, but only %q is supported", p.Kind, constants.PluginKind)
}
if !IsSafePluginName(name) {
return errors.Errorf("the plugin name %q is not allowed, must match %q", name, safePluginRegexp.String())
}
if p.Name != name {
return errors.Errorf("plugin should be named %q, not %q", name, p.Name)
}
if p.Spec.ShortDescription == "" {
return errors.New("should have a short description")
}
if len(p.Spec.Platforms) == 0 {
return errors.New("should have a platform specified")
}
for _, pl := range p.Spec.Platforms {
if err := pl.Validate(); err != nil {
return errors.Wrapf(err, "platform (%+v) is badly constructed", pl)
}
}
return nil
}
// Validate TODO(lbb)
func (p Platform) Validate() error {
if (p.Sha256 != "") != (p.URI != "") {
return errors.New("can't get version URI and sha have both to be set or unset")
}
if p.Head == "" && p.URI == "" {
return errors.New("head or URI have to be set")
}
if p.Bin == "" {
return errors.New("bin has to be set")
}
if len(p.Files) == 0 {
return errors.New("can't have a plugin without specifying file operations")
}
return nil
}
| isSupportedAPIVersion |
aexpr.rs | use crate::logical_plan::Context;
use crate::prelude::*;
use crate::utils::rename_field;
use polars_arrow::prelude::QuantileInterpolOptions;
use polars_core::frame::groupby::{fmt_groupby_column, GroupByMethod};
use polars_core::prelude::*;
use polars_core::utils::get_supertype;
use polars_utils::arena::{Arena, Node};
use std::sync::Arc;
#[derive(Clone, Debug)]
pub enum AAggExpr {
Min(Node),
Max(Node),
Median(Node),
NUnique(Node),
First(Node),
Last(Node),
Mean(Node),
List(Node),
Quantile {
expr: Node,
quantile: f64,
interpol: QuantileInterpolOptions,
},
Sum(Node),
Count(Node),
Std(Node),
Var(Node),
AggGroups(Node),
}
// AExpr representation of Nodes which are allocated in an Arena
#[derive(Clone, Debug)]
pub enum AExpr {
IsUnique(Node),
Duplicated(Node),
Reverse(Node),
Explode(Node),
Alias(Node, Arc<str>),
Column(Arc<str>),
Literal(LiteralValue),
BinaryExpr {
left: Node,
op: Operator,
right: Node,
},
Not(Node),
IsNotNull(Node),
IsNull(Node),
Cast {
expr: Node,
data_type: DataType,
strict: bool,
},
Sort {
expr: Node,
options: SortOptions,
},
Take {
expr: Node,
idx: Node,
},
SortBy {
expr: Node,
by: Vec<Node>,
reverse: Vec<bool>,
},
Filter {
input: Node,
by: Node,
},
Agg(AAggExpr),
Ternary {
predicate: Node,
truthy: Node,
falsy: Node,
},
Function {
input: Vec<Node>,
function: NoEq<Arc<dyn SeriesUdf>>,
output_type: GetOutput,
options: FunctionOptions,
},
Shift {
input: Node,
periods: i64,
},
Window {
function: Node,
partition_by: Vec<Node>,
order_by: Option<Node>,
options: WindowOptions,
},
Wildcard,
Slice {
input: Node,
offset: i64,
length: usize,
},
BinaryFunction {
input_a: Node,
input_b: Node,
function: NoEq<Arc<dyn SeriesBinaryUdf>>,
/// Delays output type evaluation until input schema is known.
output_field: NoEq<Arc<dyn BinaryUdfOutputField>>,
},
}
impl Default for AExpr {
fn default() -> Self {
AExpr::Wildcard
}
}
impl AExpr {
/// This should be a 1 on 1 copy of the get_type method of Expr until Expr is completely phased out.
pub(crate) fn get_type(
&self,
schema: &Schema,
ctxt: Context,
arena: &Arena<AExpr>,
) -> Result<DataType> {
self.to_field(schema, ctxt, arena)
.map(|f| f.data_type().clone())
}
/// Get Field result of the expression. The schema is the input data.
pub(crate) fn to_field(
&self,
schema: &Schema,
ctxt: Context,
arena: &Arena<AExpr>,
) -> Result<Field> {
use AExpr::*;
match self {
Window { function, .. } => {
let e = arena.get(*function);
let field = e.to_field(schema, ctxt, arena);
match e {
Agg(_) => field,
_ => {
let field = field?;
Ok(Field::new(
field.name(),
DataType::List(Box::new(field.data_type().clone())),
))
}
}
}
IsUnique(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
Ok(Field::new(field.name(), DataType::Boolean))
}
Duplicated(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
Ok(Field::new(field.name(), DataType::Boolean))
}
Reverse(expr) => arena.get(*expr).to_field(schema, ctxt, arena),
Explode(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
if let DataType::List(inner) = field.data_type() {
Ok(Field::new(field.name(), *inner.clone()))
} else {
Ok(field)
}
}
Alias(expr, name) => Ok(Field::new(
name,
arena.get(*expr).get_type(schema, ctxt, arena)?,
)),
Column(name) => {
let field = schema.field_with_name(name).map(|f| f.clone())?;
Ok(field)
}
Literal(sv) => Ok(Field::new("literal", sv.get_datatype())),
BinaryExpr { left, right, op } => {
let left_type = arena.get(*left).get_type(schema, ctxt, arena)?;
let right_type = arena.get(*right).get_type(schema, ctxt, arena)?;
let expr_type = match op {
Operator::Lt
| Operator::Gt
| Operator::Eq
| Operator::NotEq
| Operator::And
| Operator::LtEq
| Operator::GtEq
| Operator::Or => DataType::Boolean,
_ => get_supertype(&left_type, &right_type)?,
};
let out_field;
let out_name = {
out_field = arena.get(*left).to_field(schema, ctxt, arena)?;
out_field.name().as_str()
};
Ok(Field::new(out_name, expr_type))
}
Not(_) => Ok(Field::new("not", DataType::Boolean)),
IsNull(_) => Ok(Field::new("is_null", DataType::Boolean)),
IsNotNull(_) => Ok(Field::new("is_not_null", DataType::Boolean)),
Sort { expr, .. } => arena.get(*expr).to_field(schema, ctxt, arena),
Take { expr, .. } => arena.get(*expr).to_field(schema, ctxt, arena),
SortBy { expr, .. } => arena.get(*expr).to_field(schema, ctxt, arena),
Filter { input, .. } => arena.get(*input).to_field(schema, ctxt, arena),
Agg(agg) => {
use AAggExpr::*;
let field = match agg {
Min(expr) => field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::Min,
),
Max(expr) => field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::Max,
),
Median(expr) => {
let mut field = field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::Median,
);
if field.data_type() != &DataType::Utf8 {
field.coerce(DataType::Float64);
}
field
}
Mean(expr) => {
let mut field = field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::Mean,
);
field.coerce(DataType::Float64);
field
}
First(expr) => field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::First,
),
Last(expr) => field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::Last,
),
List(expr) => field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::List,
),
Std(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
let field = Field::new(field.name(), DataType::Float64);
let mut field = field_by_context(field, ctxt, GroupByMethod::Std);
field.coerce(DataType::Float64);
field
}
Var(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
let field = Field::new(field.name(), DataType::Float64);
let mut field = field_by_context(field, ctxt, GroupByMethod::Var);
field.coerce(DataType::Float64);
field
}
NUnique(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
let field = Field::new(field.name(), DataType::UInt32);
match ctxt {
Context::Default => field,
Context::Aggregation => {
let new_name =
fmt_groupby_column(field.name(), GroupByMethod::NUnique);
rename_field(&field, &new_name)
}
}
}
Sum(expr) => field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::Sum,
),
Count(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
let field = Field::new(field.name(), DataType::UInt32);
match ctxt {
Context::Default => field,
Context::Aggregation => {
let new_name =
fmt_groupby_column(field.name(), GroupByMethod::Count);
rename_field(&field, &new_name)
}
}
}
AggGroups(expr) => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
let new_name = fmt_groupby_column(field.name(), GroupByMethod::Groups);
Field::new(&new_name, DataType::List(DataType::UInt32.into()))
}
Quantile {
expr,
quantile,
interpol,
} => {
let mut field = field_by_context(
arena.get(*expr).to_field(schema, ctxt, arena)?,
ctxt,
GroupByMethod::Quantile(*quantile, *interpol),
);
field.coerce(DataType::Float64);
field
}
};
Ok(field)
}
Cast {
expr, data_type, ..
} => {
let field = arena.get(*expr).to_field(schema, ctxt, arena)?;
Ok(Field::new(field.name(), data_type.clone()))
}
Ternary { truthy, falsy, .. } => {
let mut truthy = arena.get(*truthy).to_field(schema, ctxt, arena)?;
let falsy = arena.get(*falsy).to_field(schema, ctxt, arena)?;
if let DataType::Null = *truthy.data_type() {
truthy.coerce(falsy.data_type().clone());
Ok(truthy)
} else {
let st = get_supertype(truthy.data_type(), falsy.data_type())?;
truthy.coerce(st);
Ok(truthy)
}
}
Function {
output_type, input, ..
} => {
let fields = input
.iter()
.map(|node| arena.get(*node).to_field(schema, ctxt, arena))
.collect::<Result<Vec<_>>>()?;
Ok(output_type.get_field(schema, ctxt, &fields))
}
BinaryFunction {
input_a,
input_b,
output_field,
..
} => {
let field_a = arena.get(*input_a).to_field(schema, ctxt, arena)?;
let field_b = arena.get(*input_b).to_field(schema, ctxt, arena)?;
let out = output_field.get_field(schema, ctxt, &field_a, &field_b);
// TODO: remove Option?
Ok(out.expect("field should be set"))
}
Shift { input, .. } => arena.get(*input).to_field(schema, ctxt, arena),
Slice { input, .. } => arena.get(*input).to_field(schema, ctxt, arena),
Wildcard => panic!("should be no wildcard at this point"),
}
}
}
pub(crate) fn | (
mut field: Field,
ctxt: Context,
groupby_method: GroupByMethod,
) -> Field {
if &DataType::Boolean == field.data_type() {
field = Field::new(field.name(), DataType::UInt32)
}
match ctxt {
Context::Default => field,
Context::Aggregation => {
let new_name = fmt_groupby_column(field.name(), groupby_method);
rename_field(&field, &new_name)
}
}
}
| field_by_context |
CSSModel.js | /*
* Copyright (C) 2010 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// @ts-nocheck
// TODO(crbug.com/1011811): Enable TypeScript compiler checks
import * as Common from '../common/common.js';
import * as HostModule from '../host/host.js';
import * as Platform from '../platform/platform.js';
import * as ProtocolClient from '../protocol_client/protocol_client.js';
import {CSSMatchedStyles} from './CSSMatchedStyles.js';
import {CSSMedia} from './CSSMedia.js';
import {CSSStyleRule} from './CSSRule.js';
import {CSSStyleDeclaration, Type} from './CSSStyleDeclaration.js';
import {CSSStyleSheetHeader} from './CSSStyleSheetHeader.js';
import {DOMModel, DOMNode} from './DOMModel.js'; // eslint-disable-line no-unused-vars
import {Events as ResourceTreeModelEvents, ResourceTreeModel} from './ResourceTreeModel.js';
import {Capability, SDKModel, Target} from './SDKModel.js'; // eslint-disable-line no-unused-vars
import {SourceMapManager} from './SourceMapManager.js';
/**
* @unrestricted
*/
export class CSSModel extends SDKModel {
/**
* @param {!Target} target
*/
constructor(target) {
super(target);
this._domModel = /** @type {!DOMModel} */ (target.model(DOMModel));
/** @type {!SourceMapManager<!CSSStyleSheetHeader>} */
this._sourceMapManager = new SourceMapManager(target);
this._agent = target.cssAgent();
this._styleLoader = new ComputedStyleLoader(this);
this._resourceTreeModel = target.model(ResourceTreeModel);
if (this._resourceTreeModel) {
this._resourceTreeModel.addEventListener(
ResourceTreeModelEvents.MainFrameNavigated, this._resetStyleSheets, this);
}
target.registerCSSDispatcher(new CSSDispatcher(this));
if (!target.suspended()) {
this._enable();
}
/** @type {!Map.<string, !CSSStyleSheetHeader>} */
this._styleSheetIdToHeader = new Map();
/** @type {!Map.<string, !Map.<!Protocol.Page.FrameId, !Set.<!Protocol.CSS.StyleSheetId>>>} */
this._styleSheetIdsForURL = new Map();
/** @type {!Map.<!CSSStyleSheetHeader, !Promise<?string>>} */
this._originalStyleSheetText = new Map();
/** @type {boolean} */
this._isRuleUsageTrackingEnabled = false;
this._sourceMapManager.setEnabled(Common.Settings.Settings.instance().moduleSetting('cssSourceMapsEnabled').get());
Common.Settings.Settings.instance()
.moduleSetting('cssSourceMapsEnabled')
.addChangeListener(event => this._sourceMapManager.setEnabled(/** @type {boolean} */ (event.data)));
}
/**
* @param {string} sourceURL
* @return {!Array<!CSSStyleSheetHeader>}
*/
headersForSourceURL(sourceURL) {
const headers = [];
for (const headerId of this.styleSheetIdsForURL(sourceURL)) {
const header = this.styleSheetHeaderForId(headerId);
if (header) {
headers.push(header);
}
}
return headers;
}
/**
* @param {string} sourceURL
* @param {number} lineNumber
* @param {number} columnNumber
* @return {!Array<!CSSLocation>}
*/
createRawLocationsByURL(sourceURL, lineNumber, columnNumber) {
const headers = this.headersForSourceURL(sourceURL);
headers.sort(stylesheetComparator);
const compareToArgLocation = (_, header) => lineNumber - header.startLine || columnNumber - header.startColumn;
const endIndex = headers.upperBound(undefined, compareToArgLocation);
if (!endIndex) {
return [];
}
const locations = [];
const last = headers[endIndex - 1];
for (let index = endIndex - 1;
index >= 0 && headers[index].startLine === last.startLine && headers[index].startColumn === last.startColumn;
--index) {
if (headers[index].containsLocation(lineNumber, columnNumber)) {
locations.push(new CSSLocation(headers[index], lineNumber, columnNumber));
}
}
return locations;
/**
* @param {!CSSStyleSheetHeader} a
* @param {!CSSStyleSheetHeader} b
* @return {number}
*/
function stylesheetComparator(a, b) {
return a.startLine - b.startLine || a.startColumn - b.startColumn || a.id.localeCompare(b.id);
}
}
/**
* @return {!SourceMapManager<!CSSStyleSheetHeader>}
*/
sourceMapManager() {
return this._sourceMapManager;
}
/**
* @param {string} text
* @return {string}
*/
static trimSourceURL(text) {
let sourceURLIndex = text.lastIndexOf('/*# sourceURL=');
if (sourceURLIndex === -1) {
sourceURLIndex = text.lastIndexOf('/*@ sourceURL=');
if (sourceURLIndex === -1) {
return text;
}
}
const sourceURLLineIndex = text.lastIndexOf('\n', sourceURLIndex);
if (sourceURLLineIndex === -1) {
return text;
}
const sourceURLLine = text.substr(sourceURLLineIndex + 1).split('\n', 1)[0];
const sourceURLRegex = /[\040\t]*\/\*[#@] sourceURL=[\040\t]*([^\s]*)[\040\t]*\*\/[\040\t]*$/;
if (sourceURLLine.search(sourceURLRegex) === -1) {
return text;
}
return text.substr(0, sourceURLLineIndex) + text.substr(sourceURLLineIndex + sourceURLLine.length + 1);
}
/**
* @return {!DOMModel}
*/
domModel() {
return this._domModel;
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {!TextUtils.TextRange} range
* @param {string} text
* @param {boolean} majorChange
* @return {!Promise<boolean>}
*/
async setStyleText(styleSheetId, range, text, majorChange) {
try {
await this._ensureOriginalStyleSheetText(styleSheetId);
const stylePayloads =
await this._agent.setStyleTexts([{styleSheetId: styleSheetId, range: range.serializeToObject(), text: text}]);
if (!stylePayloads || stylePayloads.length !== 1) {
return false;
}
this._domModel.markUndoableState(!majorChange);
const edit = new Edit(styleSheetId, range, text, stylePayloads[0]);
this._fireStyleSheetChanged(styleSheetId, edit);
return true;
} catch (e) {
return false;
}
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {!TextUtils.TextRange} range
* @param {string} text
* @return {!Promise<boolean>}
*/
async setSelectorText(styleSheetId, range, text) {
HostModule.userMetrics.actionTaken(Host.UserMetrics.Action.StyleRuleEdited);
try {
await this._ensureOriginalStyleSheetText(styleSheetId);
const selectorPayload = await this._agent.setRuleSelector(styleSheetId, range, text);
if (!selectorPayload) {
return false;
}
this._domModel.markUndoableState();
const edit = new Edit(styleSheetId, range, text, selectorPayload);
this._fireStyleSheetChanged(styleSheetId, edit);
return true;
} catch (e) {
return false;
}
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {!TextUtils.TextRange} range
* @param {string} text
* @return {!Promise<boolean>}
*/
async setKeyframeKey(styleSheetId, range, text) {
HostModule.userMetrics.actionTaken(Host.UserMetrics.Action.StyleRuleEdited);
try {
await this._ensureOriginalStyleSheetText(styleSheetId);
const payload = await this._agent.setKeyframeKey(styleSheetId, range, text);
if (!payload) {
return false;
}
this._domModel.markUndoableState();
const edit = new Edit(styleSheetId, range, text, payload);
this._fireStyleSheetChanged(styleSheetId, edit);
return true;
} catch (e) {
return false;
}
}
startCoverage() {
this._isRuleUsageTrackingEnabled = true;
return this._agent.startRuleUsageTracking();
}
/**
* @return {!Promise<{timestamp: number, coverage:!Array<!Protocol.CSS.RuleUsage>}>}
*/
async takeCoverageDelta() {
const r = await this._agent.invoke_takeCoverageDelta({});
const timestamp = (r && r.timestamp) || 0;
const coverage = (r && r.coverage) || [];
return {timestamp, coverage};
}
/**
* @return {!Promise}
*/
stopCoverage() {
this._isRuleUsageTrackingEnabled = false;
return this._agent.stopRuleUsageTracking();
}
/**
* @return {!Promise<!Array<!CSSMedia>>}
*/
async mediaQueriesPromise() {
const payload = await this._agent.getMediaQueries();
return payload ? CSSMedia.parseMediaArrayPayload(this, payload) : [];
}
/**
* @return {boolean}
*/
isEnabled() {
return this._isEnabled;
}
/**
* @return {!Promise}
*/
async _enable() {
await this._agent.enable();
this._isEnabled = true;
if (this._isRuleUsageTrackingEnabled) {
await this.startCoverage();
}
this.dispatchEventToListeners(Events.ModelWasEnabled);
}
/**
* @param {!Protocol.DOM.NodeId} nodeId
* @return {!Promise<?CSSMatchedStyles>}
*/
async matchedStylesPromise(nodeId) {
const response = await this._agent.invoke_getMatchedStylesForNode({nodeId});
if (response[ProtocolClient.InspectorBackend.ProtocolError]) {
return null;
}
const node = this._domModel.nodeForId(nodeId);
if (!node) {
return null;
}
return new CSSMatchedStyles(
this, /** @type {!DOMNode} */ (node), response.inlineStyle || null, response.attributesStyle || null,
response.matchedCSSRules || [], response.pseudoElements || [], response.inherited || [],
response.cssKeyframesRules || []);
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @return {!Promise<!Array<string>>}
*/
classNamesPromise(styleSheetId) {
return this._agent.collectClassNames(styleSheetId).then(classNames => classNames || []);
}
/**
* @param {!Protocol.DOM.NodeId} nodeId
* @return {!Promise<?Map<string, string>>}
*/
computedStylePromise(nodeId) {
return this._styleLoader.computedStylePromise(nodeId);
}
/**
* @param {number} nodeId
* @return {!Promise<?ContrastInfo>}
*/
async backgroundColorsPromise(nodeId) {
const response = this._agent.invoke_getBackgroundColors({nodeId});
if (response[ProtocolClient.InspectorBackend.ProtocolError]) {
return null;
}
return response;
}
/**
* @param {number} nodeId
* @return {!Promise<?Array<!Protocol.CSS.PlatformFontUsage>>}
*/
platformFontsPromise(nodeId) {
return this._agent.getPlatformFontsForNode(nodeId);
}
/**
* @return {!Array.<!CSSStyleSheetHeader>}
*/
allStyleSheets() {
const values = [...this._styleSheetIdToHeader.values()];
/**
* @param {!CSSStyleSheetHeader} a
* @param {!CSSStyleSheetHeader} b
* @return {number}
*/
function styleSheetComparator(a, b) {
if (a.sourceURL < b.sourceURL) {
return -1;
}
if (a.sourceURL > b.sourceURL) {
return 1;
}
return a.startLine - b.startLine || a.startColumn - b.startColumn;
}
values.sort(styleSheetComparator);
return values;
}
/**
* @param {!Protocol.DOM.NodeId} nodeId
* @return {!Promise<?InlineStyleResult>}
*/
async inlineStylesPromise(nodeId) {
const response = await this._agent.invoke_getInlineStylesForNode({nodeId});
if (response[ProtocolClient.InspectorBackend.ProtocolError] || !response.inlineStyle) {
return null;
}
const inlineStyle = new CSSStyleDeclaration(this, null, response.inlineStyle, Type.Inline);
const attributesStyle = response.attributesStyle ?
new CSSStyleDeclaration(this, null, response.attributesStyle, Type.Attributes) :
null;
return new InlineStyleResult(inlineStyle, attributesStyle);
}
/**
* @param {!DOMNode} node
* @param {string} pseudoClass
* @param {boolean} enable
* @return {boolean}
*/
forcePseudoState(node, pseudoClass, enable) {
const pseudoClasses = node.marker(PseudoStateMarker) || [];
if (enable) {
if (pseudoClasses.indexOf(pseudoClass) >= 0) {
return false;
}
pseudoClasses.push(pseudoClass);
node.setMarker(PseudoStateMarker, pseudoClasses);
} else {
if (pseudoClasses.indexOf(pseudoClass) < 0) {
return false;
}
Platform.ArrayUtilities.removeElement(pseudoClasses, pseudoClass);
if (pseudoClasses.length) {
node.setMarker(PseudoStateMarker, pseudoClasses);
} else {
node.setMarker(PseudoStateMarker, null);
}
}
this._agent.forcePseudoState(node.id, pseudoClasses);
this.dispatchEventToListeners(Events.PseudoStateForced, {node: node, pseudoClass: pseudoClass, enable: enable});
return true;
}
/**
* @param {!DOMNode} node
* @return {?Array<string>} state
*/
pseudoState(node) {
return node.marker(PseudoStateMarker) || [];
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {!TextUtils.TextRange} range
* @param {string} newMediaText
* @return {!Promise<boolean>}
*/
async setMediaText(styleSheetId, range, newMediaText) {
HostModule.userMetrics.actionTaken(Host.UserMetrics.Action.StyleRuleEdited);
try {
await this._ensureOriginalStyleSheetText(styleSheetId);
const mediaPayload = await this._agent.setMediaText(styleSheetId, range, newMediaText);
if (!mediaPayload) {
return false;
}
this._domModel.markUndoableState();
const edit = new Edit(styleSheetId, range, newMediaText, mediaPayload);
this._fireStyleSheetChanged(styleSheetId, edit);
return true;
} catch (e) {
return false;
}
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {string} ruleText
* @param {!TextUtils.TextRange} ruleLocation
* @return {!Promise<?CSSStyleRule>}
*/
async addRule(styleSheetId, ruleText, ruleLocation) {
try {
await this._ensureOriginalStyleSheetText(styleSheetId);
const rulePayload = await this._agent.addRule(styleSheetId, ruleText, ruleLocation);
if (!rulePayload) {
return null;
}
this._domModel.markUndoableState();
const edit = new Edit(styleSheetId, ruleLocation, ruleText, rulePayload);
this._fireStyleSheetChanged(styleSheetId, edit);
return new CSSStyleRule(this, rulePayload);
} catch (e) {
return null;
}
}
/**
* @param {!DOMNode} node
* @return {!Promise<?CSSStyleSheetHeader>}
*/
async requestViaInspectorStylesheet(node) {
const frameId = node.frameId() || (this._resourceTreeModel ? this._resourceTreeModel.mainFrame.id : '');
const headers = [...this._styleSheetIdToHeader.values()];
const styleSheetHeader = headers.find(header => header.frameId === frameId && header.isViaInspector());
if (styleSheetHeader) {
return styleSheetHeader;
}
try {
const styleSheetId = await this._agent.createStyleSheet(frameId);
return styleSheetId && this._styleSheetIdToHeader.get(styleSheetId) || null;
} catch (e) {
return null;
}
}
mediaQueryResultChanged() {
this.dispatchEventToListeners(Events.MediaQueryResultChanged);
}
fontsUpdated() {
this.dispatchEventToListeners(Events.FontsUpdated);
}
/**
* @param {!Protocol.CSS.StyleSheetId} id
* @return {?CSSStyleSheetHeader}
*/
styleSheetHeaderForId(id) {
return this._styleSheetIdToHeader.get(id) || null;
}
/**
* @return {!Array.<!CSSStyleSheetHeader>}
*/
styleSheetHeaders() {
return [...this._styleSheetIdToHeader.values()];
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {!Edit=} edit
*/
_fireStyleSheetChanged(styleSheetId, edit) {
this.dispatchEventToListeners(Events.StyleSheetChanged, {styleSheetId: styleSheetId, edit: edit});
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @return {!Promise<?string>}
*/
_ensureOriginalStyleSheetText(styleSheetId) {
const header = this.styleSheetHeaderForId(styleSheetId);
if (!header) {
return Promise.resolve(/** @type {?string} */ (null));
}
let promise = this._originalStyleSheetText.get(header);
if (!promise) {
promise = this.getStyleSheetText(header.id);
this._originalStyleSheetText.set(header, promise);
this._originalContentRequestedForTest(header);
}
return promise;
}
/**
* @param {!CSSStyleSheetHeader} header
*/
_originalContentRequestedForTest(header) {
}
/**
* @param {!CSSStyleSheetHeader} header
* @return {!Promise<?string>}
*/
originalStyleSheetText(header) {
return this._ensureOriginalStyleSheetText(header.id);
}
/**
*
* @return {!Iterable<!CSSStyleSheetHeader>}
*/
getAllStyleSheetHeaders() {
return this._styleSheetIdToHeader.values();
}
/**
* @param {!Protocol.CSS.CSSStyleSheetHeader} header
*/
_styleSheetAdded(header) {
console.assert(!this._styleSheetIdToHeader.get(header.styleSheetId));
const styleSheetHeader = new CSSStyleSheetHeader(this, header);
this._styleSheetIdToHeader.set(header.styleSheetId, styleSheetHeader);
const url = styleSheetHeader.resourceURL();
if (!this._styleSheetIdsForURL.get(url)) {
this._styleSheetIdsForURL.set(url, new Map());
}
const frameIdToStyleSheetIds = this._styleSheetIdsForURL.get(url);
let styleSheetIds = frameIdToStyleSheetIds.get(styleSheetHeader.frameId);
if (!styleSheetIds) {
styleSheetIds = new Set();
frameIdToStyleSheetIds.set(styleSheetHeader.frameId, styleSheetIds);
}
styleSheetIds.add(styleSheetHeader.id);
this._sourceMapManager.attachSourceMap(styleSheetHeader, styleSheetHeader.sourceURL, styleSheetHeader.sourceMapURL);
this.dispatchEventToListeners(Events.StyleSheetAdded, styleSheetHeader);
}
/**
* @param {!Protocol.CSS.StyleSheetId} id
*/
_styleSheetRemoved(id) {
const header = this._styleSheetIdToHeader.get(id);
console.assert(header);
if (!header) {
return;
}
this._styleSheetIdToHeader.delete(id);
const url = header.resourceURL();
const frameIdToStyleSheetIds = this._styleSheetIdsForURL.get(url);
console.assert(frameIdToStyleSheetIds, 'No frameId to styleSheetId map is available for given style sheet URL.');
frameIdToStyleSheetIds.get(header.frameId).delete(id);
if (!frameIdToStyleSheetIds.get(header.frameId).size) {
frameIdToStyleSheetIds.delete(header.frameId);
if (!frameIdToStyleSheetIds.size) {
this._styleSheetIdsForURL.delete(url);
}
}
this._originalStyleSheetText.delete(header);
this._sourceMapManager.detachSourceMap(header);
this.dispatchEventToListeners(Events.StyleSheetRemoved, header);
}
/**
* @param {string} url
* @return {!Array.<!Protocol.CSS.StyleSheetId>}
*/
styleSheetIdsForURL(url) {
const frameIdToStyleSheetIds = this._styleSheetIdsForURL.get(url);
if (!frameIdToStyleSheetIds) {
return [];
}
const result = [];
for (const styleSheetIds of frameIdToStyleSheetIds.values()) {
result.push(...styleSheetIds);
}
return result;
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {string} newText
* @param {boolean} majorChange
* @return {!Promise<?string>}
*/
async setStyleSheetText(styleSheetId, newText, majorChange) {
const header = /** @type {!CSSStyleSheetHeader} */ (this._styleSheetIdToHeader.get(styleSheetId));
console.assert(header);
newText = CSSModel.trimSourceURL(newText);
if (header.hasSourceURL) {
newText += '\n/*# sourceURL=' + header.sourceURL + ' */';
}
await this._ensureOriginalStyleSheetText(styleSheetId);
const response = await this._agent.invoke_setStyleSheetText({styleSheetId: header.id, text: newText});
const sourceMapURL = response.sourceMapURL;
this._sourceMapManager.detachSourceMap(header);
header.setSourceMapURL(sourceMapURL);
this._sourceMapManager.attachSourceMap(header, header.sourceURL, header.sourceMapURL);
if (sourceMapURL === null) {
return 'Error in CSS.setStyleSheetText';
}
this._domModel.markUndoableState(!majorChange);
this._fireStyleSheetChanged(styleSheetId);
return null;
}
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @return {!Promise<?string>}
*/
async getStyleSheetText(styleSheetId) {
try {
const text = await this._agent.getStyleSheetText(styleSheetId);
return text && CSSModel.trimSourceURL(text);
} catch (e) {
return null;
}
}
_resetStyleSheets() {
const headers = [...this._styleSheetIdToHeader.values()];
this._styleSheetIdsForURL.clear();
this._styleSheetIdToHeader.clear();
for (const header of headers) {
this._sourceMapManager.detachSourceMap(header);
this.dispatchEventToListeners(Events.StyleSheetRemoved, header);
}
}
/**
* @override
* @return {!Promise}
*/
suspendModel() {
this._isEnabled = false;
return this._agent.disable().then(this._resetStyleSheets.bind(this));
}
/**
* @override
* @return {!Promise}
*/
async resumeModel() {
return this._enable();
}
/**
* @param {number} nodeId
* @param {string} name
* @param {string} value
*/
setEffectivePropertyValueForNode(nodeId, name, value) {
this._agent.setEffectivePropertyValueForNode(nodeId, name, value);
}
/**
* @param {!DOMNode} node
* @return {!Promise.<?CSSMatchedStyles>}
*/
cachedMatchedCascadeForNode(node) {
if (this._cachedMatchedCascadeNode !== node) {
this.discardCachedMatchedCascade();
}
this._cachedMatchedCascadeNode = node;
if (!this._cachedMatchedCascadePromise) {
this._cachedMatchedCascadePromise = this.matchedStylesPromise(node.id);
}
return this._cachedMatchedCascadePromise;
}
discardCachedMatchedCascade() {
delete this._cachedMatchedCascadeNode;
delete this._cachedMatchedCascadePromise;
}
/**
* @override
*/
dispose() {
super.dispose();
this._sourceMapManager.dispose();
}
}
/** @enum {symbol} */
export const Events = {
FontsUpdated: Symbol('FontsUpdated'),
MediaQueryResultChanged: Symbol('MediaQueryResultChanged'),
ModelWasEnabled: Symbol('ModelWasEnabled'),
PseudoStateForced: Symbol('PseudoStateForced'),
StyleSheetAdded: Symbol('StyleSheetAdded'),
StyleSheetChanged: Symbol('StyleSheetChanged'),
StyleSheetRemoved: Symbol('StyleSheetRemoved')
};
const PseudoStateMarker = 'pseudo-state-marker';
/**
* @unrestricted
*/
export class Edit {
/**
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
* @param {!TextUtils.TextRange} oldRange
* @param {string} newText
* @param {?Object} payload
*/
constructor(styleSheetId, oldRange, newText, payload) {
this.styleSheetId = styleSheetId;
this.oldRange = oldRange;
this.newRange = TextUtils.TextRange.fromEdit(oldRange, newText);
this.newText = newText;
this.payload = payload;
}
}
export class CSSLocation {
/**
* @param {!CSSStyleSheetHeader} header
* @param {number} lineNumber
* @param {number=} columnNumber
*/
constructor(header, lineNumber, columnNumber) {
this._cssModel = header.cssModel();
this.styleSheetId = header.id;
this.url = header.resourceURL();
this.lineNumber = lineNumber;
this.columnNumber = columnNumber || 0;
}
/**
* @return {!CSSModel}
*/
cssModel() {
return this._cssModel;
}
/**
* @return {?CSSStyleSheetHeader}
*/
header() {
return this._cssModel.styleSheetHeaderForId(this.styleSheetId);
}
}
/**
* @implements {Protocol.CSSDispatcher}
* @unrestricted
*/
class CSSDispatcher {
/**
* @param {!CSSModel} cssModel
*/
constructor(cssModel) {
this._cssModel = cssModel;
}
/**
* @override
*/
mediaQueryResultChanged() {
this._cssModel.mediaQueryResultChanged();
}
/**
* @override
*/
fontsUpdated() {
this._cssModel.fontsUpdated();
}
/**
* @override
* @param {!Protocol.CSS.StyleSheetId} styleSheetId
*/
styleSheetChanged(styleSheetId) {
this._cssModel._fireStyleSheetChanged(styleSheetId);
}
/**
* @override
* @param {!Protocol.CSS.CSSStyleSheetHeader} header
*/
styleSheetAdded(header) {
this._cssModel._styleSheetAdded(header);
}
/**
* @override
* @param {!Protocol.CSS.StyleSheetId} id
*/
styleSheetRemoved(id) {
this._cssModel._styleSheetRemoved(id);
}
}
/**
* @unrestricted
*/
class | {
/**
* @param {!CSSModel} cssModel
*/
constructor(cssModel) {
this._cssModel = cssModel;
/** @type {!Map<!Protocol.DOM.NodeId, !Promise<?Map<string, string>>>} */
this._nodeIdToPromise = new Map();
}
/**
* @param {!Protocol.DOM.NodeId} nodeId
* @return {!Promise<?Map<string, string>>}
*/
computedStylePromise(nodeId) {
let promise = this._nodeIdToPromise.get(nodeId);
if (promise) {
return promise;
}
promise = this._cssModel._agent.getComputedStyleForNode(nodeId).then(parsePayload.bind(this));
this._nodeIdToPromise.set(nodeId, promise);
return promise;
/**
* @param {?Array<!Protocol.CSS.CSSComputedStyleProperty>} computedPayload
* @return {?Map<string, string>}
* @this {ComputedStyleLoader}
*/
function parsePayload(computedPayload) {
this._nodeIdToPromise.delete(nodeId);
if (!computedPayload || !computedPayload.length) {
return null;
}
const result = new Map();
for (const property of computedPayload) {
result.set(property.name, property.value);
}
return result;
}
}
}
/**
* @unrestricted
*/
export class InlineStyleResult {
/**
* @param {?CSSStyleDeclaration} inlineStyle
* @param {?CSSStyleDeclaration} attributesStyle
*/
constructor(inlineStyle, attributesStyle) {
this.inlineStyle = inlineStyle;
this.attributesStyle = attributesStyle;
}
}
SDKModel.register(CSSModel, Capability.DOM, true);
/** @typedef {{backgroundColors: ?Array<string>, computedFontSize: string, computedFontWeight: string}} */
export let ContrastInfo;
| ComputedStyleLoader |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.