content
stringlengths 0
894k
| type
stringclasses 2
values |
---|---|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from azure_provisioning_e2e.service_helper import Helper, connection_string_to_hostname
from azure.iot.device import ProvisioningDeviceClient
from azure.iot.device.common import X509
from provisioningserviceclient import (
ProvisioningServiceClient,
IndividualEnrollment,
EnrollmentGroup,
)
from provisioningserviceclient.protocol.models import AttestationMechanism, ReprovisionPolicy
import pytest
import logging
import os
import uuid
from scripts.create_x509_chain_crypto import (
before_cert_creation_from_pipeline,
call_intermediate_cert_and_device_cert_creation_from_pipeline,
delete_directories_certs_created_from_pipeline,
)
logging.basicConfig(level=logging.DEBUG)
intermediate_common_name = "e2edpswingardium"
intermediate_password = "leviosa"
device_common_name = "e2edpsexpecto" + str(uuid.uuid4())
device_password = "patronum"
service_client = ProvisioningServiceClient.create_from_connection_string(
os.getenv("PROVISIONING_SERVICE_CONNECTION_STRING")
)
device_registry_helper = Helper(os.getenv("IOTHUB_CONNECTION_STRING"))
linked_iot_hub = connection_string_to_hostname(os.getenv("IOTHUB_CONNECTION_STRING"))
PROVISIONING_HOST = os.getenv("PROVISIONING_DEVICE_ENDPOINT")
ID_SCOPE = os.getenv("PROVISIONING_DEVICE_IDSCOPE")
certificate_count = 8
type_to_device_indices = {
"individual_with_device_id": [1],
"individual_no_device_id": [2],
"group_intermediate": [3, 4, 5],
"group_ca": [6, 7, 8],
}
@pytest.fixture(scope="module", autouse=True)
def before_all_tests(request):
logging.info("set up certificates before cert related tests")
before_cert_creation_from_pipeline()
call_intermediate_cert_and_device_cert_creation_from_pipeline(
intermediate_common_name=intermediate_common_name,
device_common_name=device_common_name,
ca_password=os.getenv("PROVISIONING_ROOT_PASSWORD"),
intermediate_password=intermediate_password,
device_password=device_password,
device_count=8,
)
def after_module():
logging.info("tear down certificates after cert related tests")
delete_directories_certs_created_from_pipeline()
request.addfinalizer(after_module)
@pytest.mark.it(
"A device gets provisioned to the linked IoTHub with the user supplied device_id different from the registration_id of the individual enrollment that has been created with a selfsigned X509 authentication"
)
@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"])
def test_device_register_with_device_id_for_a_x509_individual_enrollment(protocol):
device_id = "e2edpsflyingfeather"
device_index = type_to_device_indices.get("individual_with_device_id")[0]
try:
individual_enrollment_record = create_individual_enrollment_with_x509_client_certs(
device_index=device_index, device_id=device_id
)
registration_id = individual_enrollment_record.registration_id
device_cert_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem"
device_key_file = "demoCA/private/device_key" + str(device_index) + ".pem"
registration_result = result_from_register(
registration_id, device_cert_file, device_key_file, protocol
)
assert device_id != registration_id
assert_device_provisioned(device_id=device_id, registration_result=registration_result)
device_registry_helper.try_delete_device(device_id)
finally:
service_client.delete_individual_enrollment_by_param(registration_id)
@pytest.mark.it(
"A device gets provisioned to the linked IoTHub with device_id equal to the registration_id of the individual enrollment that has been created with a selfsigned X509 authentication"
)
@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"])
def test_device_register_with_no_device_id_for_a_x509_individual_enrollment(protocol):
device_index = type_to_device_indices.get("individual_no_device_id")[0]
try:
individual_enrollment_record = create_individual_enrollment_with_x509_client_certs(
device_index=device_index
)
registration_id = individual_enrollment_record.registration_id
device_cert_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem"
device_key_file = "demoCA/private/device_key" + str(device_index) + ".pem"
registration_result = result_from_register(
registration_id, device_cert_file, device_key_file, protocol
)
assert_device_provisioned(
device_id=registration_id, registration_result=registration_result
)
device_registry_helper.try_delete_device(registration_id)
finally:
service_client.delete_individual_enrollment_by_param(registration_id)
@pytest.mark.it(
"A group of devices get provisioned to the linked IoTHub with device_ids equal to the individual registration_ids inside a group enrollment that has been created with intermediate X509 authentication"
)
@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"])
def test_group_of_devices_register_with_no_device_id_for_a_x509_intermediate_authentication_group_enrollment(
protocol
):
group_id = "e2e-intermediate-hogwarts" + str(uuid.uuid4())
common_device_id = device_common_name
devices_indices = type_to_device_indices.get("group_intermediate")
device_count_in_group = len(devices_indices)
reprovision_policy = ReprovisionPolicy(migrate_device_data=True)
try:
intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem"
with open(intermediate_cert_filename, "r") as intermediate_pem:
intermediate_cert_content = intermediate_pem.read()
attestation_mechanism = AttestationMechanism.create_with_x509_signing_certs(
intermediate_cert_content
)
enrollment_group_provisioning_model = EnrollmentGroup.create(
group_id, attestation=attestation_mechanism, reprovision_policy=reprovision_policy
)
service_client.create_or_update(enrollment_group_provisioning_model)
count = 0
common_device_key_input_file = "demoCA/private/device_key"
common_device_cert_input_file = "demoCA/newcerts/device_cert"
common_device_inter_cert_chain_file = "demoCA/newcerts/out_inter_device_chain_cert"
for index in devices_indices:
count = count + 1
device_id = common_device_id + str(index)
device_key_input_file = common_device_key_input_file + str(index) + ".pem"
device_cert_input_file = common_device_cert_input_file + str(index) + ".pem"
device_inter_cert_chain_file = common_device_inter_cert_chain_file + str(index) + ".pem"
filenames = [device_cert_input_file, intermediate_cert_filename]
with open(device_inter_cert_chain_file, "w") as outfile:
for fname in filenames:
with open(fname) as infile:
outfile.write(infile.read())
registration_result = result_from_register(
registration_id=device_id,
device_cert_file=device_inter_cert_chain_file,
device_key_file=device_key_input_file,
protocol=protocol,
)
assert_device_provisioned(device_id=device_id, registration_result=registration_result)
device_registry_helper.try_delete_device(device_id)
# Make sure space is okay. The following line must be outside for loop.
assert count == device_count_in_group
finally:
service_client.delete_enrollment_group_by_param(group_id)
@pytest.mark.skip(
reason="The enrollment is never properly created on the pipeline and it is always created without any CA reference and eventually the registration fails"
)
@pytest.mark.it(
"A group of devices get provisioned to the linked IoTHub with device_ids equal to the individual registration_ids inside a group enrollment that has been created with an already uploaded ca cert X509 authentication"
)
@pytest.mark.parametrize("protocol", ["mqtt", "mqttws"])
def test_group_of_devices_register_with_no_device_id_for_a_x509_ca_authentication_group_enrollment(
protocol
):
group_id = "e2e-ca-beauxbatons" + str(uuid.uuid4())
common_device_id = device_common_name
devices_indices = type_to_device_indices.get("group_ca")
device_count_in_group = len(devices_indices)
reprovision_policy = ReprovisionPolicy(migrate_device_data=True)
try:
DPS_GROUP_CA_CERT = os.getenv("PROVISIONING_ROOT_CERT")
attestation_mechanism = AttestationMechanism.create_with_x509_ca_refs(
ref1=DPS_GROUP_CA_CERT
)
enrollment_group_provisioning_model = EnrollmentGroup.create(
group_id, attestation=attestation_mechanism, reprovision_policy=reprovision_policy
)
service_client.create_or_update(enrollment_group_provisioning_model)
count = 0
intermediate_cert_filename = "demoCA/newcerts/intermediate_cert.pem"
common_device_key_input_file = "demoCA/private/device_key"
common_device_cert_input_file = "demoCA/newcerts/device_cert"
common_device_inter_cert_chain_file = "demoCA/newcerts/out_inter_device_chain_cert"
for index in devices_indices:
count = count + 1
device_id = common_device_id + str(index)
device_key_input_file = common_device_key_input_file + str(index) + ".pem"
device_cert_input_file = common_device_cert_input_file + str(index) + ".pem"
device_inter_cert_chain_file = common_device_inter_cert_chain_file + str(index) + ".pem"
filenames = [device_cert_input_file, intermediate_cert_filename]
with open(device_inter_cert_chain_file, "w") as outfile:
for fname in filenames:
with open(fname) as infile:
logging.debug("Filename is {}".format(fname))
content = infile.read()
logging.debug(content)
outfile.write(content)
registration_result = result_from_register(
registration_id=device_id,
device_cert_file=device_inter_cert_chain_file,
device_key_file=device_key_input_file,
protocol=protocol,
)
assert_device_provisioned(device_id=device_id, registration_result=registration_result)
device_registry_helper.try_delete_device(device_id)
# Make sure space is okay. The following line must be outside for loop.
assert count == device_count_in_group
finally:
service_client.delete_enrollment_group_by_param(group_id)
def assert_device_provisioned(device_id, registration_result):
"""
Assert that the device has been provisioned correctly to iothub from the registration result as well as from the device registry
:param device_id: The device id
:param registration_result: The registration result
"""
assert registration_result.status == "assigned"
assert registration_result.registration_state.device_id == device_id
assert registration_result.registration_state.assigned_hub == linked_iot_hub
device = device_registry_helper.get_device(device_id)
assert device is not None
assert device.authentication.type == "selfSigned"
assert device.device_id == device_id
def create_individual_enrollment_with_x509_client_certs(device_index, device_id=None):
registration_id = device_common_name + str(device_index)
reprovision_policy = ReprovisionPolicy(migrate_device_data=True)
device_cert_input_file = "demoCA/newcerts/device_cert" + str(device_index) + ".pem"
with open(device_cert_input_file, "r") as in_device_cert:
device_cert_content = in_device_cert.read()
attestation_mechanism = AttestationMechanism.create_with_x509_client_certs(device_cert_content)
individual_provisioning_model = IndividualEnrollment.create(
attestation=attestation_mechanism,
registration_id=registration_id,
reprovision_policy=reprovision_policy,
device_id=device_id,
)
return service_client.create_or_update(individual_provisioning_model)
def result_from_register(registration_id, device_cert_file, device_key_file, protocol):
x509 = X509(cert_file=device_cert_file, key_file=device_key_file, pass_phrase=device_password)
protocol_boolean_mapping = {"mqtt": False, "mqttws": True}
provisioning_device_client = ProvisioningDeviceClient.create_from_x509_certificate(
provisioning_host=PROVISIONING_HOST,
registration_id=registration_id,
id_scope=ID_SCOPE,
x509=x509,
websockets=protocol_boolean_mapping[protocol],
)
return provisioning_device_client.register()
|
python
|
import myhdl
from myhdl import intbv
from rhea import Constants
from rhea.system import Bit, Byte
from rhea.system import ControlStatusBase
from ..misc import assign
class ControlStatus(ControlStatusBase):
def __init__(self):
""" The control-status object for the SPI controller
Attributes:
enable: enable the SPI controller
freeze: freeze the current state
bypass_fifo: the write_data and read_data sink and source
the FIFO instead of the FIFOBus
clock_polarity:
clock_phase:
manual_slave_select:
clock_divisor:
rx_empty:
rx_full:
tx_empty:
tx_full:
tx_byte:
rx_byte:
tx_fifo_count:
rx_fifo_count:
slave_select:
slave_select_fault:
The following cso attributes use the pre-defined hardware types,
these are used to give "hints" to the automated register-file
construction. Mark the status (read-only) signals as driven,
then the tools know these are read-only signals.
"""
self.enable = Bit(1)
self.freeze = Bit(0)
self.bypass_fifo = Bit(config=True)
self.loopback = Bit(config=True)
self.clock_polarity = Bit(config=True)
self.clock_phase = Bit(config=True)
self.manual_slave_select = Bit(0)
self.clock_divisor = Byte(config=True)
self.slave_select = Byte(config=True)
self.slave_select_fault = Bit(driven=True)
self.tx_empty = Bit()
self.tx_full = Bit()
self.tx_byte = Byte()
self.tx_write = Bit() # WriteStrobe(self.tx_byte)
self.tx_fifo_count = Byte()
self.rx_empty = Bit()
self.rx_full = Bit()
self.rx_byte = Byte()
self.rx_read = Bit() # ReadStrobe(self.rx_byte)
self.rx_byte_valid = Bit()
self.rx_fifo_count = Byte()
super(ControlStatus, self).__init__()
@myhdl.block
def default_assign(self):
cfgbits = self.get_config_bits()
cfg = Constants(**cfgbits)
insts = []
# In the static configuration case only one value makes sense
# for certain configuration signals, those are set here
insts += [assign(self.enable, True)]
insts += [assign(self.freeze, False)]
for ii, k in enumerate(cfgbits):
configsig = getattr(self, k)
configval = getattr(cfg, k)
assert isinstance(configval, (bool, int, intbv))
if isinstance(configsig.val, bool):
configval = bool(configval)
insts += [assign(configsig, configval)]
return myhdl.instances()
@myhdl.block
def instances(self):
if self.isstatic:
inst = self.default_assign()
else:
inst = []
return inst
|
python
|
from django.forms.widgets import Widget, Media
from django.utils.safestring import mark_safe
import django.utils.copycompat as copy
class MultiWidgetLayout(Widget):
"""
Django's built-in MultiWidget is a widget that is composed of multiple widgets.
MutliWidtetLayout implements the same concept but the rendering of the composed
output can be controlled using a layout.
When subclassing it, you need to call parent constructor passing:
* layout: A list that contains the layout you want to be rendered. i.e:
layout = [
"<label for='%(id)s'>Street:</label>", TextInput(),
"<label for='%(id)s'>Number:</label>", TextInput(),
"<label for='%(id)s'>Zip Code:</label>", TextInput()
]
The constructor builds a list of widgets named self.widgets iterating over
the layout.
Its render() method is different than other widgets', because it has to
figure out how to split a single value for display in multiple widgets.
The ``value`` argument can be one of two things:
* A list.
* A normal value (e.g., a string) that has been "compressed" from
a list of values.
The render() method calls:
* render_setup(): This marks self.widgets as localized if necessary,
if the value is NOT a list it calls decompress to turn it into a list.
MultiWidgetLayout subclasses must implement decompress(), which takes a
single "compressed" value and returns a list.
render_setup() returns a tuple used for rendering the layout in the next step.
* render_layout(): It iterates over self.layout. If the field is a widget,
it renders the widget with its corresponding value. Otherwise, it adds the
string formatted using final_attrs as its context.
You'll probably want to use this class with MultiValueField.
"""
def __init__(self, layout, attrs=None):
self.layout = layout
self.widgets = []
for field in self.layout:
if not isinstance(field, basestring):
self.widgets.append(field)
super(MultiWidgetLayout, self).__init__(attrs)
def render_setup(self, name, value, attrs=None):
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
# value should be a list of values mapping to self.widgets
if not isinstance(value, list):
value = self.decompress(value)
final_attrs = self.build_attrs(attrs)
id_ = final_attrs.get('id', None)
return (value, final_attrs, id_)
def render_layout(self, name, value, final_attrs=None, id_=None):
html = ""
i = 0
for field in self.layout:
if id_ and final_attrs.get('id', '') != "%s_%s" % (id_, i):
final_attrs = dict(final_attrs, id='%s_%s' % (id_, i))
if not isinstance(field, basestring):
try:
widget_value = value[i]
except IndexError:
widget_value = None
html += self.widgets[i].render(name + '_%s' % i, widget_value, final_attrs)
i += 1
else:
html += field % final_attrs
return html
def render(self, name, value, attrs=None):
value, final_attrs, id_ = self.render_setup(name, value, attrs)
return mark_safe(self.render_layout(name, value, final_attrs, id_))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
id_for_label = classmethod(id_for_label)
def value_from_datadict(self, data, files, name):
return [widget.value_from_datadict(data, files, name + '_%s' % i) for i, widget in enumerate(self.widgets)]
def _has_changed(self, initial, data):
if initial is None:
initial = [u'' for x in range(0, len(data))]
else:
if not isinstance(initial, list):
initial = self.decompress(initial)
for widget, initial, data in zip(self.widgets, initial, data):
if widget._has_changed(initial, data):
return True
return False
def decompress(self, value):
"""
Returns a list of decompressed values for the given compressed value.
The given value can be assumed to be valid, but not necessarily
non-empty.
"""
raise NotImplementedError('Subclasses must implement this method.')
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = Media()
for w in self.widgets:
media = media + w.media
return media
media = property(_get_media)
def __deepcopy__(self, memo):
obj = super(MultiWidgetLayout, self).__deepcopy__(memo)
obj.widgets = copy.deepcopy(self.widgets)
obj.layout = copy.deepcopy(self.layout)
return obj
|
python
|
#!/usr/bin/env python2
# coding: utf-8
# Parses decompiled Java source and re-generates a Thrift interface file.
import re
import sys
import os
from os import path
thrift_type_names = [
None,
"void",
"bool",
"byte",
"double",
None,
"i16",
None,
"i32",
None,
"i64",
"string",
"struct",
"map",
"set",
"list",
"enum",
]
show_oname = False
class TwoWayMap:
def __init__(self, *args):
self._map = {}
self._map_back = {}
for a, b in args:
self.add(a, b)
def add(self, a, b):
self._map[a] = b
self._map_back[b] = a
def map(self, a):
return self._map[a]
def map_back(self, b):
return self._map_back[b]
def map_back_fallback(self, b, default):
return self._map_back[b] if b in self._map_back else default
def items(self):
return self._map.items()
def keys(self):
return self._map.keys()
def back_keys(self):
return self._back_map.keys()
class ThriftField:
def __init__(self, name, index):
self.name = name
self.index = index
self.type = None
def serialize(self):
return "{0}: {1} {2}".format(self.index, self.type.tname(), self.name)
class ThriftPrimitive:
def __init__(self, type_id, binary=False):
self.primitive_name = thrift_type_names[type_id]
self.binary = binary
self.typedef_name = None
def tname(self):
return self.typedef_name if self.typedef_name else self.primitive_name
class ThriftMap:
def __init__(self, ktype, etype):
self.ktype = ktype
self.etype = etype
def tname(self):
return "map<{0}, {1}>".format(
self.ktype.tname(),
self.etype.tname())
class ThriftSet:
def __init__(self, etype):
self.etype = etype
def tname(self):
return "set<{0}>".format(self.etype.tname())
class ThriftList:
def __init__(self, etype):
self.etype = etype
def tname(self):
return "list<{0}>".format(self.etype.tname())
class ThriftStruct:
def __init__(self, oname, name, virtual, exception):
self.oname = oname
self.name = name
self.virtual = virtual
self.exception = exception
self.fields = {}
self.deps = set()
def tname(self):
return self.name
def serialize(self):
r = "{0} {1} {{".format(
"exception" if self.exception else "struct",
self.tname())
if show_oname:
r += " // " + self.oname
r += "\n"
for f in sorted(self.fields.values(), key=lambda f: f.index):
r += " " + f.serialize() + ";\n"
r += "}"
return r
class ThriftEnum:
def __init__(self, oname, name):
self.oname = oname
self.name = name
self.values = []
self.name_candidates = {}
def tname(self):
return self.name
def best_name_candidate(self):
return sorted(self.name_candidates.items(), key=lambda i: i[1], reverse=True)[0][0]
def serialize(self):
r = "enum {0} {{".format(
self.tname())
if show_oname:
r += " // " + self.oname
r += "\n"
for name, value in self.values:
r += " {0} = {1};\n".format(name, value)
r += "}"
return r
class ThriftMethod:
def __init__(self, name, args, return_, exceptions):
self.name = name
self.args = args
self.return_ = return_
self.exceptions = exceptions
def serialize(self):
r = " " + self.return_.tname() + " " + self.name + "(\n"
for f in self.args:
r += " " + f.serialize() + ",\n"
r = r.rstrip(",\n")
r += ")"
if self.exceptions:
r += " throws ("
for f in self.exceptions:
r += f.serialize()
r = r.rstrip(",")
r += ")"
r += ";"
return r
class ThriftService:
def __init__(self, fileroot, indexfile, name):
self.fileroot = fileroot
self.indexfile = indexfile
self.name = name
self.methods = {}
self.structs = {}
self.enums = {}
self._enum_name_hints = {}
self._code = {}
# Based on version "?"
self.names = TwoWayMap(
("TStruct", "j"),
("TField", "b"),
("EnumMetaData", "dtd"),
("FieldMetaData", "dte"),
("FieldValueMetaData", "dtf"),
("ListMetaData", "dtg"),
("MapMetaData", "dth"),
("SetMetaData", "dti"),
("StructMetaData", "dtj"),
)
self._parse()
def serialize(self):
r = "// This file was generated with rethrift.py\n"
r += "// enum names were guessed, but the rest of the names should be accurate.\n\n"
for e in sorted(self.enums.values(), key=lambda e: e.tname()):
r += e.serialize() + "\n\n"
structs = sorted((s for s in self.structs.values() if not s.virtual), key=lambda s: s.tname())
for s in self._struct_dep_sort(structs):
r += s.serialize() + "\n\n"
r += "service " + self.name + " {"
for m in sorted(self.methods.values(), key=lambda m: m.name):
r += "\n" + m.serialize() + "\n"
r += "}\n"
return r
def obfuscate(self, name):
for o, u in self.name_map:
if u == name:
return o
return name
def unobfuscate(self, name):
for o, u in self.name_map:
if o == name:
return u
return name
def _get_code(self, fn):
if fn in self._code:
return self._code[fn]
code = open(path.join(self.fileroot, fn + ".jad"), "r").read()
self._code[fn] = code
return code
def _get_enum(self, fn):
if fn in self.enums:
return self.enums[fn]
e = ThriftEnum(fn, self.names.map_back_fallback(fn, None))
self.enums[fn] = e
for m in re.finditer("new " + fn + "\\(\"([^\"]+)\", \\d+, (\\d+)\\);", self._get_code(fn)):
e.values.append((m.group(1), int(m.group(2))))
return e
def _parse_meta(self, meta, context):
# TODO: typedefs
m = re.match("new " + self.names.map("EnumMetaData") + "\\(([a-z]+)\)", meta)
if m:
enum = self._get_enum(m.group(1))
context["enums"].add(enum)
return enum, meta[len(m.group(0)):]
m = re.match("new " + self.names.map("FieldValueMetaData") + "\\(\\(byte\\)(\\d+)(?:, \"([^\"]+)\")?\\)", meta)
if m:
return ThriftPrimitive(int(m.group(1)), True), meta[len(m.group(0)):]
# binary strings?
m = re.match("new " + self.names.map("FieldValueMetaData") + "\\(\\(byte\\)(\\d+), true\\)", meta)
if m:
return ThriftPrimitive(int(m.group(1))), meta[len(m.group(0)):]
m = re.match("new " + self.names.map("ListMetaData") + "\\(", meta)
if m:
etype, meta = self._parse_meta(meta[len(m.group(0)):], context)
return ThriftList(etype), meta[1:]
m = re.match("new " + self.names.map("MapMetaData") + "\\(", meta)
if m:
ktype, meta = self._parse_meta(meta[len(m.group(0)):], context)
meta = meta[2:]
etype, meta = self._parse_meta(meta, context)
return ThriftMap(ktype, etype), meta[1:]
m = re.match("new " + self.names.map("SetMetaData") + "\\(", meta)
if m:
etype, meta = self._parse_meta(meta[len(m.group(0)):], context)
return ThriftList(etype), meta[1:]
m = re.match("new " + self.names.map("StructMetaData") + "\\(([a-z]+)\)", meta)
if m:
struct, meta = self._get_struct(m.group(1)), meta[len(m.group(0)):]
context["deps"].add(struct)
return struct, meta
raise Exception("Can't parse metadata: " + meta)
def _get_struct(self, fn, virtual=False, result_struct=False, exception=False):
if fn in self.structs:
return self.structs[fn]
code = self._get_code(fn)
m = re.search("new " + self.names.map("TStruct") + "\\(\"(.+?)\"\\);", code)
s = ThriftStruct(fn, m.group(1), virtual, exception)
self.structs[fn] = s
for m in re.finditer("new " + self.names.map("TField") + "\\(\"(.+?)\", \\(byte\\)\\d+, \\(short\\)(\\d+)\\);", code):
s.fields[m.group(1)] = ThriftField(m.group(1), int(m.group(2)))
for m in re.finditer("\\.put\\(\\w+\\.(\\w+), new " + self.names.map("FieldMetaData") + "\\(\"(.+?)\", ([^;]+)\\);", code):
ofname = m.group(1)
f = s.fields[m.group(2)]
context = { "deps": set(), "enums": set() }
f.type, remaining = self._parse_meta(m.group(3), context)
if remaining != ")":
raise Exception("Couldn't fully parse: " + m.group(3))
s.deps.update(context["deps"])
for e in context["enums"]:
sname = s.name[0].upper() + s.name[1:].replace("_args", "")
if s.name.endswith("_result"):
key = (sname.replace("_result", ""), "Result")
else:
key = (sname, f.name[0].upper() + f.name[1:])
nc = e.name_candidates
if key in nc:
nc[key] += 1
else:
nc[key] = 1
if isinstance(f.type, ThriftPrimitive) and f.type.tname() == "struct":
m = re.search("\n public (\\w+) " + ofname + ";", code)
f.type = self._get_struct(m.group(1), exception=result_struct)
return s
def _guess_enum_names(self):
counts = {}
for s in self.structs.values():
counts[s.tname()] = 1
for e in self.enums.values():
sname, fname = e.best_name_candidate()
if fname in counts:
counts[fname] += 1
else:
counts[fname] = 1
for e in self.enums.values():
sname, fname = e.best_name_candidate()
if not e.name:
if counts[fname] == 1:
e.name = fname
else:
e.name = sname + fname
# http://stackoverflow.com/questions/4106862/how-to-sort-depended-objects-by-dependency
def _struct_dep_sort(self, structs):
sorted_structs = []
visited = set()
for s in structs:
self._visit_struct(s, visited, sorted_structs)
return sorted_structs
def _visit_struct(self, s, visited, sorted_structs):
if s in visited:
return
visited.add(s)
for dep in s.deps:
self._visit_struct(dep, visited, sorted_structs)
sorted_structs.append(s)
def _parse(self):
index = self._get_code(self.indexfile)
snames = TwoWayMap()
for fn in (
path.splitext(fn)[0]
for fn
in os.listdir(self.fileroot)
if fn.endswith(".jad")
):
m = re.search("new " + self.names.map("TStruct") + "\\(\"(.+?)\"\\);",
self._get_code(fn))
if m:
snames.add(m.group(1), fn)
for aname, afn in (i for i in snames.items() if i[0].endswith("_args")):
name = aname.replace("_args", "")
if not "\"" + name + "\"" in index:
continue
args = self._get_struct(afn, True)
result = self._get_struct(snames.map(name + "_result"), True, True)
rfields = sorted(result.fields.values(), key=lambda f: f.index)
return_ = ThriftPrimitive(1)
if len(rfields) and rfields[0].name == "success":
return_ = rfields[0].type
exceptions = rfields[1:]
else:
exceptions = rfields
self.methods[name] = ThriftMethod(
name,
sorted(args.fields.values(), key=lambda f: f.index),
return_,
exceptions)
self._guess_enum_names()
print ThriftService(*sys.argv[1:]).serialize()
|
python
|
from collections import namedtuple
class FilePath(
namedtuple('FilePath', [
'subject', 'filetype', 'load_path', 'save_path', 'bad_channel_path'
])):
__slots__ = ()
def __new__(cls,
subject,
filetype,
load_path,
save_path=None,
bad_channel_path=None):
return super(FilePath, cls).__new__(cls, subject, filetype, load_path,
save_path, bad_channel_path)
|
python
|
"""
"A Simple Domain Shifting Network for Generating Low Quality Images" implementation
Step 2: Training simple convolutional regressor to mimic Cozmo camera.
"""
import torch
from torchvision import datasets
import numpy as np
import torchvision.transforms as transforms
from PIL import Image
import os
from torch.utils.data import Dataset, DataLoader
import glob
import torch.nn as nn
import torch.nn.functional as F
file_seperator = "\\"
class CustomDataset(Dataset):
def __init__(self, input_folder, output_folder):
self.samples = []
for file_path in glob.glob(input_folder+file_seperator+'*'):
self.samples.append((file_path, output_folder+file_seperator+file_path.split(file_seperator)[-1].split('.')[0]+'_copy.jpg'))
def __len__(self):
return len(self.samples)
def __getitem__(self, idx):
return self.samples[idx]
# Simple convolution regression network
class ConvReg(nn.Module):
def __init__(self):
super().__init__()
self.conv1 = nn.Conv2d(3, 64, 3, stride = 2,padding=1)
self.conv2 = nn.Conv2d(64, 128, 3, stride = 2,padding=1)
self.relu = nn.ReLU()
self.sigmoid = nn.Sigmoid()
self.t_conv1 = nn.ConvTranspose2d(128, 64, 2, stride=2)
self.t_conv2 = nn.ConvTranspose2d(64, 3, 2, stride=2)
def forward(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
x = self.t_conv1(x)
x = self.relu(x)
x = self.t_conv2(x)
x = self.sigmoid(x)
return x
def run():
transform = transforms.ToTensor()
dataset = CustomDataset(file_seperator.join(['..','Dataset','original_pascal_voc_images_15_classes']), file_seperator.join(['..','Dataset','cozmo_captured_pascal_voc_images_15_classes']))
dataloader = DataLoader(dataset, batch_size=32, shuffle=True, num_workers=0)
convreg = torch.nn.DataParallel(ConvReg())
#MSE loss function for final image reconstruction
loss_function_MSE = nn.MSELoss()
lr=0.01
optimise = torch.optim.Adam(convreg.parameters(),lr=lr)
n_epochs = 100
loss_value = 0.0
for epoch in range(1,n_epochs+1):
loss_value = 0.0
for data in dataloader:
image_in = []
image_out = []
# data[0] contains a list of high resolution images
for i in data[0]:
image_in.append(np.asarray(Image.open(i).resize((224,224))).reshape(3,224,224)/255.)
image_in = torch.Tensor(np.array(image_in))
image_in_var = torch.autograd.Variable(image_in)
# data[1] contains a list of corresponding low resolution images
for j in data[1]:
image_out.append(np.asarray(Image.open(j).resize((224,224))).reshape(3,224,224)/255.)
image_out = torch.Tensor(np.array(image_out))
image_out_var = torch.autograd.Variable(image_out)
optimise.zero_grad()
final = convreg(image_in_var)
loss_AE = loss_function_MSE(final,image_out_var)
loss_AE.backward()
optimise.step()
loss_value += loss_AE.item()
if (epoch+1)%30==0:
lr=lr*0.8
optimise = torch.optim.Adam(convreg.parameters(),lr=lr)
print('Epoch: {} Loss value:{:.6f}'.format(epoch,loss_value/len(dataloader)))
torch.save(convreg.state_dict(), file_seperator.join(['..','Models','convreg_'+str(epoch)+'.pth']))
if __name__ == '__main__':
run()
|
python
|
def digitize(num: int) -> list:
if num == 0:
return [0]
my_digitize = []
while num != 0:
l = num % 10
my_digitize.append(l)
num = (num - l) // 10
return list(reversed(my_digitize))
|
python
|
#================================Params.py=====================================#
# Created by Ciaran O'Hare 2019
# Description:
# This file just sets up some of the parameters that are used throughout the
# project. and some classes that link things together.
#==============================================================================#
from __future__ import print_function
from numpy import array, sqrt, pi, exp, interp, loadtxt, zeros, shape, ones
from numpy import logspace, linspace, log10
from scipy.special import erf, erfi
# Constants
m_p = 0.9315*1e6
m_p_keV = 0.9315*1e6
m_e = 511.0 # keV
c_m = 2.99792458e8 # speed of light in m/s
c_cm = c_m*100.0 # speed of light in cm/s
c_km = c_m/1000.0 # speed of light in km/s
GeV_2_kg = 1.0e6*1.783e-33 # convert GeV to kg
alph = 1.0/137.0 # fine structure constant
m_p_kg = 1.660538782e-27 # amu in kg
a0 = 0.268173 # Bohr radius keV^-1
N_A = 6.02214e23 # Avocado's constant
sinTheta_Wsq = 0.2387e0 # sin^2(Theta_W) weinberg angle
G_F_GeV = 1.16637e-5 # GeV**-2 ! Fermi constan in GeV
Jan1 = 2458849.5 # January 1st 2020
seconds2year = 365.25*3600*24
#==============================================================================#
# Set Nucleus params
class Atom:
def __init__(self,xi,N,Z,J,Sp,Sn,fion,E_B_vals,E_gap, eps, Vfactor):
self.IsotopicFraction = xi
self.NumberOfNeutrons = N
self.NumberOfProtons = Z
self.MassNumber = N+Z
self.NuclearSpin = J
self.ExpProtonSpin = Sp
self.ExpNeutronSpin = Sp
if J>0.0:
self.SDEnhancement = (4.0/3.0)*((J+1.0)/J)*(Sp-Sn)**2.0
self.IonisationFormFactor = fion
self.BindingEnergies = E_B_vals
self.BandGapEnergy = E_gap
self.ElectronHoleMeanEnergy = eps
self.VCellFactor = Vfactor
#==============================================================================#
#==============================================================================#
# Set parameters of halo models and streams
class Halo:
def __init__(self,rho_0,v_LSR,sig_v,v_esc,v_pec,beta,eta):
self.LocalDensity = rho_0
self.RotationSpeed = v_LSR
self.Dispersion = sig_v
self.EscapeSpeed = v_esc
self.PeculiarVelocity = v_pec
self.Normalisation = erf(v_esc/(sqrt(2.0)*sig_v))-\
sqrt(2.0/pi)*(v_esc/sig_v)*\
exp(-v_esc**2.0/(2.0*sig_v**2.0))
self.SausageEta = eta
if eta>0.0:
self.SausageBeta = beta
sigr=sqrt(3*v_LSR**2.0/(2.0*(3-2.0*beta)))
sigphi=sqrt(3*v_LSR**2.0*(1-beta)/(2.0*(3-2.0*beta)))
sigz=sqrt(3*v_LSR**2.0*(1-beta)/(2.0*(3-2.0*beta)))
self.SausageDispersionTensor = array([sigr,sigphi,sigz])
self.Normalisation = erf(v_esc/(sqrt(2.0)*sigr)) \
- sqrt((1.0-beta)/beta)\
*exp(-v_esc**2.0/(2.0*sigphi**2.0))\
*erfi(v_esc/(sqrt(2)*sigr)*sqrt(beta/(1-beta)))
# Standard Halo Model (old parameters)
SHM = Halo(0.3,
220.0,
156.0,
544.0,
array([11.1,12.2,7.3]),
0.0,
0.0)
# Standard Halo Model++
SHMpp = Halo(0.55,
233.0,
164.8,
528.0,
array([11.1,12.2,7.3]),
0.9,
0.2)
####
class Stream:
def __init__(self,v1,v2,v3,sig1,sig2,sig3):
self.Velocity = array([v1,v2,v2])
self.Dispersion = array([sig1,sig2,sig3])
S1stream = Stream(-29.6,-297.4,-72.8,82.6, 26.9, 58.5)
S2stream = Stream(6.0, 166.7, -242.8,48.6, 13.5, 26.0)
#S2stream_b = Stream(-70.9, 153.3, 161.5, 83.9, 29.6, 71.5)
#==============================================================================#
#==============================================================================#
# Current number of neutrinos sources:
n_nu_tot = 15
# Neutrino files names:
nufile_root = ".txt"
nufile_dir = "../data/neutrinos/"
nuname = ["" for x in range(0,n_nu_tot)]
nuname[0] = "pp"
nuname[1] = "pep"
nuname[2] = "hep"
nuname[3] = "7Be1"
nuname[4] = "7Be2"
nuname[5] = "8B"
nuname[6] = "13N"
nuname[7] = "15O"
nuname[8] = "17F"
nuname[9] = "DSNB"
nuname[10] = "Atm"
nuname[11] = "GeoU"
nuname[12] = "GeoTh"
nuname[13] = "GeoK"
nuname[14] = "Reactor"
n_Enu_vals = 1000
# Mark which neutrinos are monochromatic
mono = zeros(n_nu_tot,dtype=bool)
mono[[1,3,4]] = True
# Set which neutrinos are Solar
whichsolar = zeros(n_nu_tot,dtype=bool)
whichsolar[0:8] = True
# Neutrino max energies (MeV):
NuMaxEnergy = array([0.42341,1.44,18.765,0.3843,0.8613,16.34,1.193,\
1.7285,1.7365,91.201,981.75
,4.54,2.33,1.3572,\
1.1418e1])
# Neutrino fluxes (cm-2 s-1 MeV-1) and uncertainties (%):
# (from Vinyoles et al (2017) Barcelona GS98 SSM)
NuFlux = array([5.98e10,1.44e8,7.98e3,4.93e8,4.50e9,5.16e6,\
2.78e8,2.05e8,5.29e6,85.7,10.54,\
3808776.91874,3352686.94783,21639789.2056,\
208537.673299])
NuUnc = array([0.006, 0.01, 0.3,0.06, 0.06, 0.02, 0.15 ,\
0.17 ,0.2 ,0.5, 0.25,\
0.2,0.257,0.168,\
0.08])
# Collect neutrino parameters:
class Neutrinos:
def __init__(self,n_nu,solar_label,energies,fluxes,\
normlisations,uncertainties):
self.Flux = fluxes
self.Energy = energies
self.Uncertainties = uncertainties*normlisations
self.Normalisations = normlisations
self.NumberOfNeutrinos = n_nu
self.SolarLabel = solar_label
def RecoilDistribution(self,RD):
self.RD = RD
#==============================================================================#
#==============================================================================#
# Location class only has latitude and longitude at the moment
class Location:
def __init__(self,lat,lon):
self.Latitude = lat
self.Longitude = lon
Boulby = Location(54.5591,0.8310)
GranSasso = Location(42.4691, 13.5654)
Kamioka = Location(36.2381, 137.1863)
SNOlab = Location(46.4719, -81.1868)
Stawell = Location(-37.0576, 142.7754)
Oahu = Location(21.4389, -158.0001)
GuantanamoBay = Location(20.0117, -75.1216)
Pyongyang = Location(39.0392, 125.7625)
#------------------------------------------------------------------------------#
|
python
|
num1 = 111
num2 = 222
num3 = 3333
num4 = 444444444444
num4 = 44444
num5 = 5555
|
python
|
"""empty message
Revision ID: 5c63a89ee7b7
Revises: 9afbd55082a0
Create Date: 2021-09-29 10:24:20.413807
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5c63a89ee7b7'
down_revision = '9afbd55082a0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key(None, 'user', 'ldap_model', ['uid'], ['uid'])
op.drop_column('user', 'affiliation')
op.drop_column('user', 'email_address')
op.drop_column('user', 'eppn')
op.drop_column('user', 'title')
op.drop_column('user', 'first_name')
op.drop_column('user', 'last_name')
op.drop_column('user', 'display_name')
op.add_column('workflow_spec_category', sa.Column('admin', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('workflow_spec_category', 'admin')
op.add_column('user', sa.Column('display_name', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('last_name', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('first_name', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('eppn', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('email_address', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column('affiliation', sa.VARCHAR(), autoincrement=False, nullable=True))
# ### end Alembic commands ###
|
python
|
import unittest
from model.shsamodel import *
class SHSAModelTestCase(unittest.TestCase):
"""Tests SHSA model."""
def setUp(self):
self.__graph_dict = {
'a': ['d'],
'b': ['c'],
'c': ['b', 'c', 'd'],
'd': ['a', 'c'],
# unconnected nodes are not allowed, no edge can be created for the
# networkx graph structure
# 'e' : [],
}
self.__properties = {
'type': {'a': SHSANodeType.V, 'b': SHSANodeType.R,
'c': SHSANodeType.V, 'd': SHSANodeType.R},
'need': {'a': True, 'c': False},
'provided': {'a': True, 'c': True},
}
def tearDown(self):
self.__graph_dict = None
self.__properties = None
def test_setup_model(self):
m = SHSAModel(self.__graph_dict, self.__properties)
self.assertEqual(len(m.nodes()), 4,
"incorrect number of nodes")
self.assertEqual(len(m.edges()), 7,
"incorrect number of edges")
# properties check
self.assertTrue(m.property_value_of('a', 'need'),
"wrong initialized property")
self.assertEqual(m.property_value_of('a', 'type'), SHSANodeType.V,
"wrong initialized property")
# nodes check
self.assertEqual(set(m.variables), set(['a', 'c']))
def test_setup_model_from_file(self):
# load with classical graph dict given in the yaml
m = SHSAModel(configfile="test/model1.yaml")
self.assertEqual(len(m.nodes()), 14,
"incorrect number of nodes")
self.assertEqual(len(m.edges()), 13,
"incorrect number of edges")
# load with relations instead of graph structure
m = SHSAModel(configfile="test/model2.yaml")
self.assertEqual(len(m.nodes()), 15,
"incorrect number of nodes")
self.assertEqual(len(m.edges()), 18,
"incorrect number of edges")
def test_set_property(self):
m = SHSAModel(self.__graph_dict, self.__properties)
self.assertTrue(m.property_value_of('a', 'need'),
"wrong initialized property")
m.set_property_to('a', 'need', False)
self.assertFalse(m.property_value_of('a', 'need'),
"wrong initialized property")
# the property 'provided' of constants cannot be set to false
m = SHSAModel(configfile="test/model_p6.yaml")
# provision is fine
m.set_property_to('a', 'provided', False)
# constant 'provided' status is not allowed to be set
with self.assertRaises(RuntimeError):
m.set_property_to('c', 'provided', False)
def test_has_property(self):
m = SHSAModel(self.__graph_dict, self.__properties)
self.assertTrue(m.has_property('a', 'need'),
"property missing")
self.assertFalse(m.has_property('a', 'dummy'),
"property available although not in config")
def test_provided(self):
m = SHSAModel(configfile="test/model_p6.yaml")
# provision vs. provided
self.assertTrue(m.has_property('a', 'provision'),
"property 'provision' missing")
self.assertFalse(m.has_property('a', 'provided'),
"property 'provided' available")
# constants and multiple provisions
self.assertTrue(m.has_property('c', 'constant'),
"property 'constant' missing")
self.assertTrue(m.provided(['c', 'd']),
"provided check failed")
# filter unprovided nodes
self.assertEqual(m.unprovided(['a', 'b', 'c']), ['a', 'b'],
"unprovided check failed")
def test_variable_to_itoms_map(self):
m = SHSAModel(configfile="test/model_p6.yaml")
# map variable to provisions
itoms = m.itoms('a')
self.assertEqual(itoms, [])
itoms = m.itoms('d')
self.assertEqual(itoms, ['/d1', '/d2'])
# map variable to constant
itoms = m.itoms('c')
self.assertEqual(itoms, 0.2, "map from variable to constant failed")
# map itoms to variables
variable = m.variable('/d1')
self.assertEqual(variable, 'd')
if __name__ == '__main__':
unittest.main()
|
python
|
# Copyright 2017 Starbot Discord Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Manage message log size.
from api import database
from api.database.table import Table, TableTypes
def message_count_get(server_id):
'''Get the current message count.'''
database.init()
table_message_count = Table('user_messages_{}'.format(server_id), TableTypes.pGlobal)
return table_message_count.getLatestID()
# Log messages to database.
def message_log(msg, server_id):
'''Log a message into the database.'''
database.init()
table_log = Table('user_messages_{}'.format(server_id), TableTypes.pGlobal)
Table.insert(table_log, dict(userid=msg.author.id, username=msg.author.name,
message=msg.content, serverid=msg.server.id,
servername=msg.server.name))
|
python
|
from minesweeper.logic import FLAG, OPEN, Square, MINE
def test_not_open():
value = FLAG | 1
s = Square.from_triple(0, 0, value)
assert not s.open
assert not s.value
def test_flag():
value = OPEN | 1
s = Square.from_triple(0, 0, value)
assert s.open
assert s.value == 1
def test_open_flag():
value = OPEN | FLAG | 1
s = Square.from_triple(0, 0, value)
assert s.open
assert s.value == 1
def test_open_mine():
value = MINE | OPEN
s = Square.from_triple(0, 0, value)
assert s.open
assert s.mine
assert not s.value
def test_status():
from minesweeper.game import Status
assert Status.ONGOING == 'ongoing'
assert Status.ONGOING is getattr(Status, 'ONGOING')
assert Status.ONGOING == Status.ONGOING
|
python
|
# model settings
model = dict(
type='Recognizer2D',
backbone=dict(
type='Central_Model',
pretrained='checkpoints/up-g/mnb4-cls-bn.pth',
backbone_name='MTB4',
task_names=('gv_patch', 'gv_global'),
main_task_name='gv_global',
trans_type='crossconvhrnetlayer',
frozen_stages=4,
task_name_to_backbone={
'gv_global': {
'repeats': [2, 3, 6, 6, 6, 12],
'frozen_stages': 4,
'expansion': [1, 4, 6, 3, 2, 5],
'channels': [32, 64, 128, 192, 192, 384],
'final_drop': 0.0,
'mtb_type': 4
},
'gv_patch': {
'repeats': [2, 3, 6, 6, 6, 12],
'frozen_stages': 4,
'expansion': [1, 4, 6, 3, 2, 5],
'channels': [32, 64, 128, 192, 192, 384],
'final_drop': 0.0,
'mtb_type': 4
}
},
layer2channel={
'layer1': 64,
'layer2': 128,
'layer3': 192
},
layer2auxlayers={
'layer1': [
'layer1',
],
'layer2': [
'layer1',
'layer2',
],
'layer3': ['layer1', 'layer2', 'layer3'],
},
trans_layers=['layer1', 'layer2', 'layer3'],
channels=[64, 128, 192],
),
cls_head=dict(type='TSNHead',
num_classes=400,
in_channels=1280,
spatial_type='avg',
consensus=dict(type='AvgConsensus', dim=1),
dropout_ratio=0.4,
init_std=0.01),
# model training and testing settings
train_cfg=None,
test_cfg=dict(average_clips=None))
custom_imports = dict(imports=[
'modelzoo.metanet',
'modelzoo.central_model',
],
allow_failed_imports=False)
|
python
|
from pathlib import Path
import argparse
import pandas as pd
import os
HERE = os.path.abspath(os.path.dirname(__file__))
DEFAULT_CONFIG_FILE = os.path.join(HERE, "config_templates",
"default_configs.txt")
DATASET_STATS = os.path.join(HERE, "dataset_stats", "dataset_stats.tsv")
def output_config(config_dict, output_dir):
device = config_dict.get("device")
if config_dict.get("dataset") is None:
ds_name = "custom"
else:
ds_name = config_dict.get("dataset")
file = Path(output_dir) / Path(str(ds_name) + "_" +
device.lower() + ".ini")
sections = ["model", "storage", "training", "training_pipeline",
"evaluation", "evaluation_pipeline", "path", "reporting"]
opts = list(config_dict.keys())
with open(file, "w+") as f:
f.write("[general]\n")
f.write("device=" + config_dict.get("general.device") + "\n")
if config_dict.get("general.gpu_ids") is not None:
f.write("gpu_ids=" + config_dict.get("general.gpu_ids") + "\n")
if config_dict.get("general.random_seed") is not None:
f.write("random_seed=" + config_dict.get("general.random_seed")
+ "\n")
f.write("num_train=" + str(config_dict.get("num_train")) + "\n")
f.write("num_nodes=" + str(config_dict.get("num_nodes")) + "\n")
f.write("num_relations=" + str(config_dict.get("num_relations"))
+ "\n")
f.write("num_valid=" + str(config_dict.get("num_valid")) + "\n")
f.write("num_test=" + str(config_dict.get("num_test")) + "\n")
f.write("experiment_name=" +
config_dict.get("general.experiment_name") + "\n")
for sec in sections:
f.write("\n[" + sec + "]\n")
for key in opts:
if key.split(".")[0] == sec:
f.write(key.split(".")[1] +
"=" + config_dict.get(key) + "\n")
def read_template(file):
with open(file, "r") as f:
lines = f.readlines()
keys = []
values = []
valid_dict = {}
for line in lines:
line = line.split("=")
line[1] = line[1].rstrip()
keys.append(line[0])
sub_line = line[1].split("*")
values.append(sub_line[0])
if len(sub_line) > 1:
valid_dict.update({line[0]: [sub_line[1:]]})
config_dict = dict(zip(keys, values))
return config_dict, valid_dict
def set_up_files(output_directory):
try:
if not Path(output_directory).exists():
Path(output_directory).mkdir(parents=False, exist_ok=False)
except FileExistsError:
print("Directory already exists.")
except FileNotFoundError:
print("Incorrect parent path given for output directory.")
def update_dataset_stats(dataset, config_dict):
datasets_stats = pd.read_csv(DATASET_STATS, sep='\t')
stats_row = datasets_stats[datasets_stats['dataset'] == dataset]
if not stats_row.empty:
stats_list = stats_row.iloc[0][['num_nodes', 'num_relations',
'num_train', 'num_valid',
'num_test']].tolist()
config_dict = update_stats(stats_list, config_dict)
else:
raise RuntimeError("Unrecognized dataset")
return config_dict
def update_stats(stats, config_dict):
config_dict.update({"num_train": str(int(stats[2]))})
config_dict.update({"num_nodes": str(int(stats[0]))})
config_dict.update({"num_relations": str(int(stats[1]))})
config_dict.update({"num_valid": str(int(stats[3]))})
config_dict.update({"num_test": str(int(stats[4]))})
return config_dict
def update_data_path(dir, config_dict):
config_dict.update({"path.train_edges": str(dir.strip("/") +
"/train_edges.pt")})
config_dict.update({"path.train_edges_partitions": str(dir.strip("/") +
"/train_edges_partitions.txt")})
config_dict.update({"path.validation_edges": str(dir.strip("/") +
"/valid_edges.pt")})
config_dict.update({"path.test_edges": str(dir.strip("/") +
"/test_edges.pt")})
config_dict.update({"path.node_labels": str(dir.strip("/") +
"/node_mapping.txt")})
config_dict.update({"path.relation_labels": str(dir.strip("/") +
"/rel_mapping.txt")})
config_dict.update({"path.node_ids": str(dir.strip("/") +
"/node_mapping.bin")})
config_dict.update({"path.relations_ids": str(dir.strip("/") +
"/rel_mapping.bin")})
return config_dict
def set_args():
parser = argparse.ArgumentParser(
description='Generate configs', prog='config_generator',
formatter_class=argparse.RawTextHelpFormatter,
epilog=('Specify certain config (optional): ' +
'[--<section>.<key>=<value>]'))
mode = parser.add_mutually_exclusive_group()
parser.add_argument('output_directory', metavar='output_directory',
type=str, help='Directory to put configs \nAlso ' +
'assumed to be the default directory of preprocessed' +
' data if --data_directory is not specified')
parser.add_argument('--data_directory', metavar='data_directory',
type=str, help='Directory of the preprocessed data')
mode.add_argument('--dataset', '-d', metavar='dataset', type=str,
help='Dataset to preprocess')
mode.add_argument('--stats', '-s',
metavar=('num_nodes', 'num_relations', 'num_train',
'num_valid', 'num_test'),
nargs=5, help='Dataset statistics\n' +
'Enter in order of num_nodes, num_relations, num_train' +
' num_valid, num_test')
parser.add_argument('--device', '-dev', metavar='generate_config',
choices=["GPU", "CPU", "multi-GPU"],
nargs='?', default='GPU',
help=('Generates configs for a single-GPU/multi-CPU' +
'/multi-GPU training configuration file by ' +
'default. \nValid options (default to GPU): ' +
'[GPU, CPU, multi-GPU]'))
config_dict, valid_dict = read_template(DEFAULT_CONFIG_FILE)
for key in list(config_dict.keys())[1:]:
if valid_dict.get(key) is not None:
parser.add_argument(str("--" + key), metavar=key, type=str,
choices=valid_dict.get(key),
default=config_dict.get(key),
help=argparse.SUPPRESS)
else:
parser.add_argument(str("--" + key), metavar=key, type=str,
default=config_dict.get(key),
help=argparse.SUPPRESS)
return parser, config_dict
def parse_args(args):
arg_dict = vars(args)
set_up_files(args.output_directory)
arg_dict.update({"general.device": arg_dict.get("device")})
if arg_dict.get("device") == "multi-GPU":
arg_dict.update({"device": "multi_GPU"})
arg_dict.update({"general.device": "GPU"})
arg_dict.update({"general.gpu_ids": "0 1"})
else:
arg_dict.update({"device": arg_dict.get("device")})
if arg_dict.get("general.random_seed") == "#":
arg_dict.pop("general.random_seed")
if arg_dict.get("dataset") is not None:
arg_dict.update({"dataset": arg_dict.get("dataset")})
arg_dict = update_dataset_stats(arg_dict.get("dataset"), arg_dict)
elif arg_dict.get("stats") is not None:
arg_dict = update_stats(arg_dict.get("stats"), arg_dict)
else:
raise RuntimeError("Must specify either dataset or dataset stats.")
return arg_dict
def main():
parser, config_dict = set_args()
args = parser.parse_args()
config_dict = parse_args(args)
dir = args.output_directory
if args.data_directory is None:
config_dict = update_data_path(dir, config_dict)
else:
config_dict = update_data_path(args.data_directory, config_dict)
output_config(config_dict, dir)
if __name__ == "__main__":
main()
|
python
|
from abc import ABCMeta,abstractclassmethod
class PocInterface(metaclass=ABCMeta):
'''
POC 实现接口
'''
@abstractclassmethod
def validate(self,*args,**kwargs):
'''
漏洞验证接口方法
:param args: 自定义参数
:param kwargs: 自定义参数
:return: 自定义,建议存在返回True,否则返回False
'''
pass
@abstractclassmethod
def exploit(self,*args,**kwargs):
'''
漏洞利用接口方法
:param args: 自定义参数
:param kwargs: 自定义参数
:return: 自定义
'''
pass
|
python
|
from collections import OrderedDict
import requests
from civis import APIClient
from civis.base import EmptyResultError
def file_to_civis(buf, name, api_key=None, **kwargs):
"""Upload a file to Civis.
Parameters
----------
buf : file-like object
The file or other buffer that you wish to upload.
name : str
The name you wish to give the file.
api_key : str, optional
Your Civis API key. If not given, the :envvar:`CIVIS_API_KEY`
environment variable will be used.
**kwargs : kwargs
Extra keyword arguments will be passed to the file creation
endpoint. See :func:`~civis.resources._resources.Files.post`.
Returns
-------
file_id : int
The new Civis file ID.
Notes
-----
If you are opening a binary file (e.g., a compressed archive) to
pass to this function, do so using the ``'rb'`` (read binary)
mode (e.g., ``open('myfile.zip', 'rb')``).
"""
client = APIClient(api_key=api_key)
file_response = client.files.post(name, **kwargs)
form = file_response.upload_fields
# order matters here! key must be first
form_key = OrderedDict(key=form.pop('key'))
form_key.update(form)
form_key['file'] = buf
url = file_response.upload_url
response = requests.post(url, files=form_key)
response.raise_for_status()
return file_response.id
def civis_to_file(file_id, buf, api_key=None):
"""Download a file from Civis.
Parameters
----------
file_id : int
The Civis file ID.
buf : file-like object
The file or other buffer to write the contents of the Civis file
into.
api_key : str, optional
Your Civis API key. If not given, the :envvar:`CIVIS_API_KEY`
environment variable will be used.
Returns
-------
None
Examples
--------
>>> file_id = 100
>>> with open("my_file.txt", "w") as f:
... civis_to_file(file_id, f)
"""
url = _get_url_from_file_id(file_id, api_key=api_key)
if not url:
raise EmptyResultError('Unable to locate file {}. If it previously '
'existed, it may have '
'expired.'.format(file_id))
response = requests.get(url, stream=True)
response.raise_for_status()
chunk_size = 32 * 1024
chunked = response.iter_content(chunk_size)
for lines in chunked:
buf.write(lines)
def _get_url_from_file_id(file_id, api_key=None):
client = APIClient(api_key=api_key)
files_response = client.files.get(file_id)
url = files_response.file_url
return url
|
python
|
# imports
import datetime as dt
import psycopg2
# db global variables
HOST = 'localhost'
USER = 'postgres'
PASSWORD = 'Master/99'
DATABASE = 'job_apps'
print('Initiating database connections.')
# db connection
conn = psycopg2.connect(host=HOST, database=DATABASE,
user=USER, password=PASSWORD)
print('Database connected.')
class UpdateRecord():
"""Updates a job entry to add to the database"""
def __init__(self, last_contact=False):
if last_contact:
self.company = self.get_company()
else:
self.company = self.get_company()
self.column = self.get_column()
self.value = self.get_value()
def get_company(self):
return input('enter the company name to update:\n> ')
def get_column(self):
return input('enter to column name to update:\n> ')
def get_value(self):
return input(f'input the new value for {self.column}:\n> ')
if __name__ == '__main__':
print('Please provide inputs.\n')
# get inputs
contact_date = input('Update last contact date? (y/n)\n> ')
if contact_date.lower() == 'y':
record = UpdateRecord(last_contact=True)
query = f'''UPDATE jobs
SET last_contact = '{dt.date.today()}'
WHERE company = '{record.company}'
'''
print(f'{record.company} record updated: last_contact {dt.date.today()}.')
else:
record = UpdateRecord()
query = f'''UPDATE jobs
SET {record.column} = '{record.value}', last_contact = '{dt.date.today()}'
WHERE company = '{record.company}'
'''
print(f'{record.company} record updated: {record.column} == {record.value}.')
# push data to the database
with conn.cursor() as c:
c.execute(query)
conn.commit()
|
python
|
from . import blockcipher
from . import pkc
from . import keyexchange
from . import signature
|
python
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from datastruct.list.node import SNode
def reverse_order_output(head, res=None):
if head is None:
return []
if res is None:
res = []
res = reverse_order_output(head.next, res)
res.append(head.value)
return res
def find_intersection(list1, list2):
diff = len(list1) - len(list2)
head1 = list1.head
head2 = list2.head
if diff < 0:
# len1 < len2
for _ in range(diff*-1):
head2 = head2.next
elif diff > 0:
# len1 > len2
for _ in range(diff):
head1 = head1.next
else:
pass
while head1 is not None and head2 is not None and head1 is not head2:
head1 = head1.next
head2 = head2.next
return head1 if head1 is head2 else None
def merge_lists(l1_head, l2_head):
dummy = cur = SNode(0)
while l1_head and l2_head:
if l1_head.value <= l2_head.value:
cur.next = l1_head
l1_head = l1_head.next
else:
cur.next = l2_head
l2_head = l2_head.next
cur = cur.next
if l1_head:
cur.next = l1_head
if l2_head:
cur.next = l2_head
return dummy.next
def remove_nth_from_end(head, n):
temp_head = head
fast = head
slow = head
while fast is not None and n > -1:
fast = fast.next
n -= 1
while fast is not None:
fast = fast.next
slow = slow.next
slow.next = slow.next.next
return temp_head
def find_middle(head):
fast = head
slow = head
while fast.next is not None and fast.next.next is not None:
fast = fast.next.next
slow = slow.next
return slow
|
python
|
from core import analyzer, element
from ui import run
element_type = {
"Resistance": element.Resistance,
"CurrentSource": element.CurrentSource,
"VoltageSource": element.VoltageSource,
"CCCS": element.CCCS,
"VCCS": element.VCCS,
"CCVS": element.CCVS,
"VCVS": element.VCVS
}
def solve(data):
try:
node_count = data["node_count"]
elements = []
for tmp in data["elements"]:
new_elem = element_type[tmp["name"]](*tmp["args"], **tmp["kwargs"])
elements.append(new_elem)
an = analyzer.Analyzer(node_count, elements)
result = an.run()
return analyzer.Analyzer.get_data(result)
except:
return None
if __name__ == "__main__":
run.Run(800, 600, "Nodal analysis", solve).run()
|
python
|
__all__ = [
"ConjugateGradient",
"NelderMead",
"ParticleSwarm",
"SteepestDescent",
"TrustRegions",
]
from .conjugate_gradient import ConjugateGradient
from .nelder_mead import NelderMead
from .particle_swarm import ParticleSwarm
from .steepest_descent import SteepestDescent
from .trust_regions import TrustRegions
|
python
|
# Generated by Django 3.0.1 on 2019-12-21 20:02
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('tacos', '0004_auto_20191221_1313'),
]
operations = [
migrations.AlterField(
model_name='cart',
name='total',
field=models.DecimalField(blank=True, decimal_places=2, default=0, max_digits=7, verbose_name='Total'),
),
migrations.AlterField(
model_name='cart',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='order',
name='subtotal',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, verbose_name='Subtotal'),
),
]
|
python
|
import ipaddress
import logging
import time
import uuid
import redis
from haipproxy.settings import (
REDIS_HOST,
REDIS_PORT,
REDIS_DB,
REDIS_PIPE_BATCH_SIZE,
LOCKER_PREFIX,
)
logger = logging.getLogger(__name__)
REDIS_POOL = None
#### redis ####
def get_redis_conn():
global REDIS_POOL
if REDIS_POOL == None:
REDIS_POOL = redis.ConnectionPool(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return redis.StrictRedis(connection_pool=REDIS_POOL)
def acquire_lock(conn, lock_name, acquire_timeout=10, lock_timeout=10):
"""inspired by the book 'redis in action' """
identifier = str(uuid.uuid4())
lock_name = LOCKER_PREFIX + lock_name
end = time.time() + acquire_timeout
while time.time() < end:
if conn.set(lock_name, identifier, lock_timeout, nx=True):
return identifier
elif not conn.ttl(lock_name) or conn.ttl(lock_name) == -1:
conn.expire(lock_name, lock_timeout)
time.sleep(0.1)
return False
def release_lock(conn, lock_name, identifier):
pipe = conn.pipeline(True)
lock_name = LOCKER_PREFIX + lock_name
while True:
try:
pipe.watch(lock_name)
identifier_origin = pipe.get(lock_name).decode()
if identifier_origin == identifier:
pipe.multi()
pipe.delete(lock_name)
pipe.execute()
return True
pipe.unwatch()
break
except redis.exceptions.WatchError:
pass
return False
class RedisOps(object):
def __init__(self):
self.redis_conn = get_redis_conn()
self.rpipe = self.redis_conn.pipeline()
self.rpipe_size = 0
def _batch_exe(self, last=False):
if not last:
self.rpipe_size += 1
if self.rpipe_size >= REDIS_PIPE_BATCH_SIZE or last:
self.rpipe.execute()
logger.info(f"{self.rpipe_size} redis commands executed")
self.rpipe_size = 0
def flush(self):
self._batch_exe(last=True)
def set_proxy(self, proxy):
if (
not proxy
or not is_valid_proxy(proxy=proxy)
or self.redis_conn.exists(proxy)
):
return
self.rpipe.hmset(
proxy,
{
"used_count": 0,
"success_count": 0,
"total_seconds": 0,
"last_fail": "",
"timestamp": 0,
"score": 0,
},
)
self._batch_exe()
def inc_stat(self, item):
self.rpipe.hincrby(item["proxy"], "used_count")
self.rpipe.hincrby(item["proxy"], "success_count", item["success"])
self.rpipe.hincrby(item["proxy"], "total_seconds", item["seconds"])
self.rpipe.hset(item["proxy"], "last_fail", item["fail"])
if item["success"] != 0:
self.rpipe.hset(item["proxy"], "timestamp", int(time.time()))
self.rpipe.execute()
def map_all(self, op, need_op, match="*", **kwargs):
# apply operation to each item
total = 0
nop = 0
ope = getattr(self.rpipe, op)
if not ope:
logger.warning(f"Invalid operation: {op}")
return
for pkey in self.redis_conn.scan_iter(match=match):
total += 1
# if self.redis_conn.hget(pkey, 'fail') == b'badcontent':
# print(pkey)
# else:
# continue
row = self.redis_conn.hgetall(pkey)
if need_op(row):
ope(pkey, **kwargs)
nop += 1
self.rpipe.execute()
logger.info(f"{op} operations to {nop} proxies, total {total} scanned")
####
def is_valid_proxy(ip=None, port=None, protocol=None, proxy=None):
if proxy:
try:
protocol, ip, port = proxy.split(":")
ip = ip.lstrip("//")
except ValueError as e:
logger.warning(f"{proxy}: {e}")
return False
try:
ipaddress.ip_address(ip)
port = int(port)
except ValueError as e:
logger.warning(f"{ip}:{port} {e}")
return False
return (
0 <= port
and port <= 65535
and protocol in ["http", "https", "sock4", "sock5", None]
)
|
python
|
f = open("latency", "w")
for x in range(0,100):
for y in range(0,100):
if(x == y):
f.write("0")
else:
f.write("100")
if(y != 99):
f.write(" ")
f.write('\n')
f.close()
|
python
|
from config.config import IMDB_BASE_URL
class MovieResponse:
# TODO: refactor this - use named parameters
def __init__(self, *vargs):
self.movie_id = vargs[0]
self.imdb_url = IMDB_BASE_URL + self.movie_id + "/"
self.title = vargs[1]
self.directors = vargs[2]
self.actors = vargs[3]
self.duration = vargs[4]
self.description = vargs[5]
self.score = vargs[6]
self.poster_url = vargs[7]
self.year = vargs[8]
self.production_company = vargs[9]
self.genre_name = vargs[10]
def as_json(self):
return self.__dict__ if self else {}
|
python
|
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 6 10:11:05 2019
@author: alheritier
"""
from pykds import KDSForest
import numpy as np
from sklearn.datasets import load_breast_cancer
data = load_breast_cancer()
dim = data.data.shape[1]
alpha_label= len(np.unique(data.target))
m = KDSForest(ntrees=1,seed=123,dim=dim,alpha_label=alpha_label,ctw=False, theta0=[])
nll = 0
for point,label in zip(data.data, data.target):
log_probs = m.predict_log2_proba(point=point)
obs_prob = 2 ** log_probs[label]
nll += - log_probs[label]
print("prob assigned to observed symbol ", label, " : ", 2 ** log_probs[label] )
m.update(point=point,label=label)
nll /= data.data.shape[0]
print("Normalized Log Loss: ", nll)
|
python
|
import luigi
import json
import time
import re
import datetime
import subprocess
import base64
from urllib import urlopen
import uuid
from uuid import uuid4
from uuid import uuid5
from elasticsearch import Elasticsearch
#for hack to get around non self signed certificates
import ssl
import sys
# TODO
# * I think we want to use S3 for our touch files (aka lock files) since that will be better than local files that could be lost/deleted
# * I have the consonance call turned off here until I figure out why bamstats on rnaseq produces an empty report
class ConsonanceTaskV2(luigi.Task):
redwood_host = luigi.Parameter("storage.ucsc-cgl.org")
redwood_token = luigi.Parameter("must_be_defined")
dockstore_tool_running_dockstore_tool = luigi.Parameter(default="quay.io/ucsc_cgl/dockstore-tool-runner:1.0.7")
target_tool = luigi.Parameter(default="quay.io/briandoconnor/dockstore-tool-bamstats:1.25-11")
target_tool_url = luigi.Parameter(default="https://dockstore.org/containers/quay.io/briandoconnor/dockstore-tool-bamstats")
workflow_type = luigi.Parameter(default="alignment_qc_report")
image_descriptor = luigi.Parameter(default="must be defined")
filename = luigi.Parameter(default="filename")
file_uuid = luigi.Parameter(default="uuid")
bundle_uuid = luigi.Parameter(default="bundle_uuid")
parent_uuids = luigi.ListParameter(default=["parent_uuid"])
tmp_dir = luigi.Parameter(default='/tmp')
def run(self):
print "** EXECUTING IN CONSONANCE **"
print "** MAKE TEMP DIR **"
# create a unique temp dir
cmd = '''mkdir -p %s/consonance-jobs/AlignmentQCCoordinator/%s/''' % (self.tmp_dir, self.get_task_uuid())
print cmd
result = subprocess.call(cmd, shell=True)
if result != 0:
print "PROBLEMS MAKING DIR!!"
print "** MAKE JSON FOR WORKER **"
# create a json for FastQC which will be executed by the dockstore-tool-running-dockstore-tool and passed as base64encoded
# will need to encode the JSON above in this: https://docs.python.org/2/library/base64.html
# see http://luigi.readthedocs.io/en/stable/api/luigi.parameter.html?highlight=luigi.parameter
json_str = '''{
"bam_input":
{
"class": "File",
"path": "redwood://%s/%s/%s/%s"
},
''' % (self.redwood_host, self.bundle_uuid, self.file_uuid, self.filename)
json_str = json_str + '''"bamstats_report" :
{
"class": "File",
"path": "./tmp/bamstats_report.zip"
}
}
'''
print "THE JSON: "+json_str
# now make base64 encoded version
base64_json_str = base64.urlsafe_b64encode(json_str)
print "** MAKE JSON FOR DOCKSTORE TOOL WRAPPER **"
# create a json for dockstoreRunningDockstoreTool, embed the FastQC JSON as a param
p = self.output().open('w')
print >>p, '''{
"json_encoded": "%s",
"docker_uri": "%s",
"dockstore_url": "%s",
"redwood_token": "%s",
"redwood_host": "%s",
"parent_uuids": "%s",
"workflow_type": "%s",
"tmpdir": "/datastore",
"vm_instance_type": "c4.8xlarge",
"vm_region": "us-west-2",
"vm_location": "aws",
"vm_instance_cores": 36,
"vm_instance_mem_gb": 60,
"output_metadata_json": "/tmp/final_metadata.json"
}''' % (base64_json_str, self.target_tool, self.target_tool_url, self.redwood_token, self.redwood_host, ','.join(map("{0}".format, self.parent_uuids)), self.workflow_type)
p.close()
# execute consonance run, parse the job UUID
print "** SUBMITTING TO CONSONANCE **"
cmd = ["consonance", "run", "--image-descriptor", self.image_descriptor, "--flavour", "c4.8xlarge", "--run-descriptor", p.path]
print "executing:"+ ' '.join(cmd)
# try:
# result = subprocess.call(cmd)
# except Exception as e:
# print "Error in Consonance call!!!:" + e.message
#
# if result == 0:
# print "Consonance job return success code!"
# else:
# print "ERROR: Consonance job failed!!!"
def output(self):
return luigi.LocalTarget('%s/consonance-jobs/AlignmentQCCoordinator/%s/settings.json' % (self.tmp_dir, self.get_task_uuid()))
def get_task_uuid(self):
#get a unique id for this task based on the some inputs
#this id will not change if the inputs are the same
#This helps make the task idempotent; it that it
#always has the same task id for the same inputs
reload(sys)
sys.setdefaultencoding('utf8')
print "FILENAME: "+self.filename+" FILE UUID: "+ self.file_uuid +" TARGET TOOL: "+ self.target_tool +" Target TOOL URL "+ self.target_tool_url +" REDWOOD TOKEN: "+ self.redwood_token +" REDWOOD HOST "+ self.redwood_host
task_uuid = uuid5(uuid.NAMESPACE_DNS, (self.filename + self.file_uuid + self.target_tool + self.target_tool_url + self.redwood_token + self.redwood_host).encode('utf-8'))
return task_uuid
class AlignmentQCCoordinatorV2(luigi.Task):
es_index_host = luigi.Parameter(default='localhost')
es_index_port = luigi.Parameter(default='9200')
redwood_token = luigi.Parameter("must_be_defined")
redwood_client_path = luigi.Parameter(default='../ucsc-storage-client')
redwood_host = luigi.Parameter(default='storage.ucsc-cgl.org')
image_descriptor = luigi.Parameter(default="must be defined")
dockstore_tool_running_dockstore_tool = luigi.Parameter(default="quay.io/ucsc_cgl/dockstore-tool-runner:1.0.7")
tmp_dir = luigi.Parameter(default='/tmp')
data_dir = luigi.Parameter(default='/tmp/data_dir')
max_jobs = luigi.Parameter(default='1')
bundle_uuid_filename_to_file_uuid = {}
def requires(self):
print "** COORDINATOR **"
# now query the metadata service so I have the mapping of bundle_uuid & file names -> file_uuid
print str("https://"+self.redwood_host+":8444/entities?page=0")
#hack to get around none self signed certificates
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
json_str = urlopen(str("https://"+self.redwood_host+":8444/entities?page=0"), context=ctx).read()
metadata_struct = json.loads(json_str)
print "** METADATA TOTAL PAGES: "+str(metadata_struct["totalPages"])
for i in range(0, metadata_struct["totalPages"]):
print "** CURRENT METADATA TOTAL PAGES: "+str(i)
json_str = urlopen(str("https://"+self.redwood_host+":8444/entities?page="+str(i)), context=ctx).read()
metadata_struct = json.loads(json_str)
for file_hash in metadata_struct["content"]:
self.bundle_uuid_filename_to_file_uuid[file_hash["gnosId"]+"_"+file_hash["fileName"]] = file_hash["id"]
# now query elasticsearch
es = Elasticsearch([{'host': self.es_index_host, 'port': self.es_index_port}])
# see jqueryflag_alignment_qc
# curl -XPOST http://localhost:9200/analysis_index/_search?pretty -d @jqueryflag_alignment_qc
res = es.search(index="analysis_index", body={"query" : {"bool" : {"should" : [{"term" : { "flags.normal_alignment_qc_report" : "false"}},{"term" : {"flags.tumor_alignment_qc_report" : "false" }}],"minimum_should_match" : 1 }}}, size=5000)
listOfJobs = []
print("Got %d Hits:" % res['hits']['total'])
for hit in res['hits']['hits']:
print("\n\n\n%(donor_uuid)s %(submitter_donor_id)s %(center_name)s %(project)s" % hit["_source"])
for specimen in hit["_source"]["specimen"]:
for sample in specimen["samples"]:
for analysis in sample["analysis"]:
if (analysis["analysis_type"] == "alignment" or analysis["analysis_type"] == "rna_seq_quantification") and \
((hit["_source"]["flags"]["normal_alignment_qc_report"] == False and \
re.match("^Normal - ", specimen["submitter_specimen_type"]) and \
sample["sample_uuid"] in hit["_source"]["missing_items"]["normal_alignment_qc_report"]) or \
(hit["_source"]["flags"]["tumor_alignment_qc_report"] == False and \
re.match("^Primary tumour - |^Recurrent tumour - |^Metastatic tumour - |^Xenograft - |^Cell line - ", specimen["submitter_specimen_type"]) and \
sample["sample_uuid"] in hit["_source"]["missing_items"]["tumor_alignment_qc_report"])):
print "HIT!!!! "+analysis["analysis_type"]+" "+str(hit["_source"]["flags"]["normal_alignment_qc_report"])+" "+specimen["submitter_specimen_type"]
parent_uuids = []
parent_uuids.append(sample["sample_uuid"])
for file in analysis["workflow_outputs"]:
if file["file_type"] == "bam":
print " + will run report for %s file" % (file["file_path"])
if len(listOfJobs) < int(self.max_jobs):
listOfJobs.append(ConsonanceTaskV2(redwood_host=self.redwood_host, redwood_token=self.redwood_token, dockstore_tool_running_dockstore_tool=self.dockstore_tool_running_dockstore_tool, filename=file["file_path"], file_uuid = self.fileToUUID(file["file_path"], analysis["bundle_uuid"]), bundle_uuid = analysis["bundle_uuid"], parent_uuids = parent_uuids, tmp_dir=self.tmp_dir, image_descriptor=self.image_descriptor))
# these jobs are yielded to
return listOfJobs
def run(self):
# now make a final report
f = self.output().open('w')
# TODO: could print report on what was successful and what failed? Also, provide enough details like donor ID etc
print >>f, "batch is complete"
f.close()
def output(self):
# the final report
ts = time.time()
ts_str = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H:%M:%S')
return luigi.LocalTarget('%s/consonance-jobs/AlignmentQCCoordinator/AlignmentQCTask-%s.txt' % (self.tmp_dir, ts_str))
def fileToUUID(self, input, bundle_uuid):
return self.bundle_uuid_filename_to_file_uuid[bundle_uuid+"_"+input]
if __name__ == '__main__':
luigi.run()
|
python
|
# Lint as: python3
# Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ExecutorFactory."""
from unittest import mock
from tensorflow_federated.python.common_libs import test as common_test
from tensorflow_federated.python.core.impl import eager_executor
from tensorflow_federated.python.core.impl import executor_factory
from tensorflow_federated.python.core.impl.compiler import placement_literals
from tensorflow_federated.python.core.impl.executors import executor_base
class ExecutorFactoryImplTest(common_test.TestCase):
def test_subclass_base_fails_no_create_method(self):
class NotCallable(executor_factory.ExecutorFactory):
def clean_up_executors(self):
pass
with self.assertRaisesRegex(TypeError, 'instantiate abstract class'):
NotCallable()
def test_subclass_base_fails_no_cleanup(self):
class NoCleanup(executor_factory.ExecutorFactory):
def create_executor(self, x):
pass
with self.assertRaisesRegex(TypeError, 'instantiate abstract class'):
NoCleanup()
def test_instantiation_succeeds_both_methods_specified(self):
class Fine(executor_factory.ExecutorFactory):
def create_executor(self, x):
pass
def clean_up_executors(self):
pass
Fine()
def test_concrete_class_instantiates_stack_fn(self):
def _stack_fn(x):
del x # Unused
return eager_executor.EagerExecutor()
factory = executor_factory.ExecutorFactoryImpl(_stack_fn)
self.assertIsInstance(factory, executor_factory.ExecutorFactoryImpl)
def test_call_constructs_executor(self):
def _stack_fn(x):
del x # Unused
return eager_executor.EagerExecutor()
factory = executor_factory.ExecutorFactoryImpl(_stack_fn)
ex = factory.create_executor({})
self.assertIsInstance(ex, executor_base.Executor)
def test_cleanup_succeeds_without_init(self):
def _stack_fn(x):
del x # Unused
return eager_executor.EagerExecutor()
factory = executor_factory.ExecutorFactoryImpl(_stack_fn)
factory.clean_up_executors()
def test_cleanup_calls_close(self):
ex = eager_executor.EagerExecutor()
ex.close = mock.MagicMock()
def _stack_fn(x):
del x # Unused
return ex
factory = executor_factory.ExecutorFactoryImpl(_stack_fn)
factory.create_executor({})
factory.clean_up_executors()
ex.close.assert_called_once()
def test_construction_with_multiple_cardinalities_reuses_existing_stacks(
self):
ex = eager_executor.EagerExecutor()
ex.close = mock.MagicMock()
num_times_invoked = 0
def _stack_fn(x):
del x # Unused
nonlocal num_times_invoked
num_times_invoked += 1
return ex
factory = executor_factory.ExecutorFactoryImpl(_stack_fn)
for _ in range(2):
factory.create_executor({})
factory.create_executor({placement_literals.SERVER: 1})
self.assertEqual(num_times_invoked, 2)
if __name__ == '__main__':
common_test.main()
|
python
|
import weakref
import numpy as np
import qmhub.helpmelib as pme
from .dobject import cache_update
class DependPME(pme.PMEInstanceD):
def __init__(self, cell_basis, alpha, order, nfft):
super().__init__()
self._name = "PME"
self._kwargs = {"alpha": alpha, "order": order, "nfft": nfft}
self._dependencies = [cell_basis]
self._dependants = []
self._cache_valid = False
def _func(self, cell_basis, alpha, order, nfft):
super().setup(
1,
np.asscalar(alpha),
order,
*nfft.tolist(),
1.,
1,
)
super().set_lattice_vectors(
*np.diag(cell_basis).tolist(),
*[90., 90., 90.],
self.LatticeType.XAligned,
)
@cache_update
def compute_recip_esp(self, positions, grid_positions, grid_charges):
recip_esp = np.zeros((len(positions.T), 4))
charges = np.ascontiguousarray(grid_charges)[:, np.newaxis]
coord1 = np.ascontiguousarray(grid_positions.T)
coord2 = np.ascontiguousarray(positions.T)
mat = pme.MatrixD
super().compute_P_rec(
0,
mat(charges),
mat(coord1),
mat(coord2),
1,
mat(recip_esp),
)
return np.ascontiguousarray(recip_esp.T)
def add_dependant(self, dependant):
self._dependants.append(weakref.ref(dependant))
def update_cache(self):
if not self._cache_valid:
self._func(*self._dependencies, **self._kwargs)
self._cache_valid = True
|
python
|
import torch
import torch.nn as nn
class NeuralColumn(nn.Module):
def __init__(self, channels: int, output_dim: int) -> None:
""" channels is the number of output convolution channels for
each convolution layer of the network, except the last one.
"""
super(NeuralColumn, self).__init__()
self._conv_net = nn.Sequential(
nn.Conv2d(3, channels, kernel_size=5),
nn.LeakyReLU(0.2),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(channels, channels, kernel_size=3),
nn.LeakyReLU(0.2),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(channels, output_dim, kernel_size=3, bias=False)
)
def output_dim(self) -> int:
return self._conv_net[-1].weight.size(0)
def channels(self) -> int:
return self._conv_net[0].weight.size(0)
def forward(self, x: torch.Tensor) -> torch.Tensor:
conved = self._conv_net(x)
# global pooling on x and y axis
pooled = conved.max(dim=3)[0].max(dim=2)[0]
return pooled.view(x.size(0), self.output_dim())
class Ensemble(nn.Module):
def __init__(self, n_columns: int, column_dim: int, channels: int) -> None:
super(Ensemble, self).__init__()
self.columns = nn.ModuleList([
NeuralColumn(channels, column_dim) for _ in range(n_columns)
])
def num_columns(self) -> int:
return len(self.columns)
def channels(self) -> int:
return self.columns[0].channels()
def column_dim(self) -> int:
return self.columns[0].output_dim()
def forward(self, x: torch.Tensor) -> torch.Tensor:
return torch.cat([
column(x) for column in self.columns
], dim=1)
def load_model(path: str) -> nn.Module:
print("loading model")
parts = path.split("_")
columns, channels, col_dim = parts[-3:]
columns = int(columns)
channels = int(channels)
col_dim = int(col_dim)
net = Ensemble(n_columns=columns, column_dim=col_dim, channels=channels)
net.load_state_dict(torch.load(path))
net.eval()
for param in net.parameters():
param.requires_grad_(False)
print("model loaded")
return net
|
python
|
"""D-Bus interface objects."""
from .systemd import Systemd
from .hostname import Hostname
from .rauc import Rauc
from ..coresys import CoreSysAttributes
class DBusManager(CoreSysAttributes):
"""A DBus Interface handler."""
def __init__(self, coresys):
"""Initialize D-Bus interface."""
self.coresys = coresys
self._systemd = Systemd()
self._hostname = Hostname()
self._rauc = Rauc()
@property
def systemd(self):
"""Return the systemd interface."""
return self._systemd
@property
def hostname(self):
"""Return the hostname interface."""
return self._hostname
@property
def rauc(self):
"""Return the rauc interface."""
return self._rauc
async def load(self):
"""Connect interfaces to D-Bus."""
await self.systemd.connect()
await self.hostname.connect()
await self.rauc.connect()
|
python
|
from enforce_typing import enforce_types
from engine import AgentBase
from agents.PublisherAgent import PublisherAgent
from agents.SpeculatorAgent import StakerspeculatorAgent
from agents.DataconsumerAgent import DataconsumerAgent
@enforce_types
class DataecosystemAgent(AgentBase.AgentBaseNoEvm):
"""Will operate as a high-fidelity replacement for MarketplacesAgents,
when it's ready."""
def takeStep(self, state):
if self._doCreatePublisherAgent(state):
self._createPublisherAgent(state)
if self._doCreateStakerspeculatorAgent(state):
self._createStakerspeculatorAgent(state)
if self._doCreateDataconsumerAgent(state):
self._createDataconsumerAgent(state)
@staticmethod
def _doCreatePublisherAgent(state) -> bool:
# magic number: rule - only create if no agents so far
return not state.publisherAgents()
def _createPublisherAgent(self, state) -> None: # pylint: disable=no-self-use
name = "foo_publisher"
USD = 0.0 # magic number
OCEAN = 1000.0 # magic number
new_agent = PublisherAgent(name=name, USD=USD, OCEAN=OCEAN)
state.addAgent(new_agent)
@staticmethod
def _doCreateStakerspeculatorAgent(state) -> bool:
# magic number: rule - only create if no agents so far
return not state.stakerspeculatorAgents()
def _createStakerspeculatorAgent( # pylint: disable=no-self-use
self, state
) -> None:
name = "foo_stakerspeculator"
USD = 0.0 # magic number
OCEAN = 1000.0 # magic number
new_agent = StakerspeculatorAgent(name=name, USD=USD, OCEAN=OCEAN)
state.addAgent(new_agent)
@staticmethod
def _doCreateDataconsumerAgent(state) -> bool:
# magic number: rule - only create if no agents so far
return not state.dataconumerAgents()
def _createDataconsumerAgent(self, state) -> None: # pylint: disable=no-self-use
name = "foo_dataconsumer"
USD = 0.0 # magic number
OCEAN = 1000.0 # magic number
new_agent = DataconsumerAgent(name=name, USD=USD, OCEAN=OCEAN)
state.addAgent(new_agent)
|
python
|
from django.core.management.commands.test import Command as TestCommand
from jsdir.core import JSDir
class Command(TestCommand):
def __init__(self):
JSDir.set_use_finders(True) # sets the value only for this thread
super(Command, self).__init__()
|
python
|
import matplotlib as mil
import tensorflow as tf
from matplotlib import pyplot
fig = pyplot.gcf()
fig.set_size_inches(4, 4)
sess = tf.InteractiveSession()
image_filename = "/home/ubuntu/Downloads/n02107142_16917.jpg"
filename_queue = tf.train.string_input_producer([image_filename]) # list of files to read
reader = tf.WholeFileReader()
try:
image_reader = tf.WholeFileReader()
_, image_file = image_reader.read(filename_queue)
image = tf.image.decode_jpeg(image_file)
print(image)
except Exception as e:
print(e)
sess.run(tf.initialize_all_variables())
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
image_batch = tf.image.convert_image_dtype(tf.expand_dims(image, 0), tf.float32, saturate=False)
# In[8]:
kernel = tf.constant([
[
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]],
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]],
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]]
],
[
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]],
[[ 8., 0., 0.], [ 0., 8., 0.], [ 0., 0., 8.]],
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]]
],
[
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]],
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]],
[[ -1., 0., 0.], [ 0., -1., 0.], [ 0., 0., -1.]]
]
])
conv2d = tf.nn.conv2d(image_batch, kernel, [1, 1, 1, 1], padding="SAME")
activation_map = sess.run(tf.minimum(tf.nn.relu(conv2d), 255))
fig = pyplot.gcf()
pyplot.imshow(activation_map[0], interpolation='nearest')
fig.set_size_inches(4, 4)
fig.savefig("./example-edge-detection.png")
#pyplot.show()
|
python
|
#
# Copyright (C) 2014-2015 UAVCAN Development Team <uavcan.org>
#
# This software is distributed under the terms of the MIT License.
#
# Author: Ben Dyer <[email protected]>
# Pavel Kirienko <[email protected]>
#
from __future__ import division, absolute_import, print_function, unicode_literals
import sys
import time
import math
import copy
import struct
import functools
try:
import collections.abc # Python 3
MutableSequence = collections.abc.MutableSequence
except ImportError:
import collections # Python 2
MutableSequence = collections.MutableSequence
import uavcan
import uavcan.dsdl as dsdl
import uavcan.dsdl.common as common
try:
long # Python 2
except NameError:
long = int # Python 3
if sys.version_info[0] < 3:
bchr = chr
else:
def bchr(x):
return bytes([x])
def get_uavcan_data_type(obj):
# noinspection PyProtectedMember
return obj._type
def is_union(obj):
if not isinstance(obj, CompoundValue):
raise ValueError('Only CompoundValue can be union')
# noinspection PyProtectedMember
return obj._is_union
def get_active_union_field(obj):
if not is_union(obj):
raise ValueError('Object is not a union')
# noinspection PyProtectedMember
return obj._union_field
def switch_union_field(obj, value):
if not is_union(obj):
raise ValueError('Object is not a union')
# noinspection PyProtectedMember
obj._union_field = value
def get_fields(obj):
if not isinstance(obj, CompoundValue):
raise ValueError('Only CompoundValue can have fields')
# noinspection PyProtectedMember
return obj._fields
def get_constants(obj):
if not isinstance(obj, CompoundValue):
raise ValueError('Only CompoundValue can have constants')
# noinspection PyProtectedMember
return obj._constants
def is_request(obj):
# noinspection PyProtectedMember
return obj._mode == 'request'
def is_response(obj):
# noinspection PyProtectedMember
return obj._mode == 'response'
def bits_from_bytes(s):
return "".join(format(c, "08b") for c in s)
def bytes_from_bits(s):
#pad bytes if not a multiple of 8
if len(s) % 8 != 0:
s += '0' * (8 - len(s) % 8)
return bytearray(int(s[i:i + 8], 2) for i in range(0, len(s), 8))
def be_from_le_bits(s, bitlen):
if len(s) < bitlen:
raise ValueError("Not enough bits; need {0} but got {1}".format(bitlen, len(s)))
elif len(s) > bitlen:
s = s[0:bitlen]
return "".join([s[i:i + 8] for i in range(0, len(s), 8)][::-1])
def le_from_be_bits(s, bitlen):
if len(s) < bitlen:
raise ValueError("Not enough bits; need {0} but got {1}".format(bitlen, len(s)))
elif len(s) > bitlen:
s = s[len(s) - bitlen:]
return "".join([s[max(0, i - 8):i] for i in range(len(s), 0, -8)])
def format_bits(s):
return " ".join(s[i:i + 8] for i in range(0, len(s), 8))
def union_tag_bits_from_num_elements(num_elements):
return int(math.ceil(math.log(num_elements, 2)))
def array_len_bits_from_max_size(max_size):
return int(math.ceil(math.log(max_size+1, 2)))
def enum_mark_last(iterable, start=0):
"""
Returns a generator over iterable that tells whether the current item is the last one.
Usage:
>>> iterable = range(10)
>>> for index, is_last, item in enum_mark_last(iterable):
>>> print(index, item, end='\n' if is_last else ', ')
"""
it = iter(iterable)
count = start
try:
last = next(it)
except StopIteration:
return
for val in it:
yield count, False, last
last = val
count += 1
yield count, True, last
class Float32IntegerUnion(object):
"""
Yes we've got ourselves a tiny little union here:
union FloatIntegerUnion
{
std::uint32_t u;
float f;
};
This is madness.
"""
def __init__(self, integer=None, floating_point=None):
self._bytes = struct.pack("=L", 0)
if integer is not None:
assert floating_point is None
self.u = int(integer)
if floating_point is not None:
self.f = float(floating_point)
@property
def f(self):
return struct.unpack("=f", self._bytes)[0]
@f.setter
def f(self, value):
assert isinstance(value, float)
self._bytes = struct.pack("=f", value)
@property
def u(self):
return struct.unpack("=I", self._bytes)[0]
@u.setter
def u(self, value):
assert isinstance(value, (int, long))
self._bytes = struct.pack("=I", value)
def f16_from_f32(float32):
# Directly translated from libuavcan's implementation in C++
f32infty = Float32IntegerUnion(integer=255 << 23)
f16infty = Float32IntegerUnion(integer=31 << 23)
magic = Float32IntegerUnion(integer=15 << 23)
inval = Float32IntegerUnion(floating_point=float32)
sign_mask = 0x80000000
round_mask = ~0xFFF
sign = inval.u & sign_mask
inval.u ^= sign
if inval.u >= f32infty.u: # Inf or NaN (all exponent bits set)
out = 0x7FFF if inval.u > f32infty.u else 0x7C00
else:
inval.u &= round_mask
inval.f *= magic.f
inval.u -= round_mask
if inval.u > f16infty.u:
inval.u = f16infty.u # Clamp to signed infinity if overflowed
out = (inval.u >> 13) & 0xFFFF # Take the bits!
return out | (sign >> 16) & 0xFFFF
def f32_from_f16(float16):
# Directly translated from libuavcan's implementation in C++
magic = Float32IntegerUnion(integer=(254 - 15) << 23)
was_inf_nan = Float32IntegerUnion(integer=(127 + 16) << 23)
out = Float32IntegerUnion(integer=(float16 & 0x7FFF) << 13) # exponent/mantissa bits
out.f *= magic.f # exponent adjust
if out.f >= was_inf_nan.f: # make sure Inf/NaN survive
out.u |= 255 << 23
out.u |= (float16 & 0x8000) << 16 # sign bit
return out.f
def cast(value, dtype):
if dtype.cast_mode == dsdl.PrimitiveType.CAST_MODE_SATURATED:
if value > dtype.value_range[1]:
value = dtype.value_range[1]
elif value < dtype.value_range[0]:
value = dtype.value_range[0]
return value
elif dtype.cast_mode == dsdl.PrimitiveType.CAST_MODE_TRUNCATED and dtype.kind == dsdl.PrimitiveType.KIND_FLOAT:
if not math.isnan(value) and value > dtype.value_range[1]:
value = float("+inf")
elif not math.isnan(value) and value < dtype.value_range[0]:
value = float("-inf")
return value
elif dtype.cast_mode == dsdl.PrimitiveType.CAST_MODE_TRUNCATED:
return value & ((1 << dtype.bitlen) - 1)
else:
raise ValueError("Invalid cast_mode: " + repr(dtype))
class BaseValue(object):
# noinspection PyUnusedLocal
def __init__(self, _uavcan_type, *_args, **_kwargs):
self._type = _uavcan_type
self._bits = None
def _unpack(self, stream, tao):
if self._type.bitlen:
self._bits = be_from_le_bits(stream, self._type.bitlen)
return stream[self._type.bitlen:]
else:
return stream
def _pack(self, tao):
if self._bits:
return le_from_be_bits(self._bits, self._type.bitlen)
else:
return "0" * self._type.bitlen
class VoidValue(BaseValue):
def _unpack(self, stream, tao):
return stream[self._type.bitlen:]
def _pack(self, tao):
return "0" * self._type.bitlen
class PrimitiveValue(BaseValue):
def __init__(self, _uavcan_type, *args, **kwargs):
super(PrimitiveValue, self).__init__(_uavcan_type, *args, **kwargs)
# Default initialization
self.value = 0
def __repr__(self):
return repr(self.value)
@property
def value(self):
if not self._bits:
return None
int_value = int(self._bits, 2)
if self._type.kind == dsdl.PrimitiveType.KIND_BOOLEAN:
return bool(int_value)
elif self._type.kind == dsdl.PrimitiveType.KIND_UNSIGNED_INT:
return int_value
elif self._type.kind == dsdl.PrimitiveType.KIND_SIGNED_INT:
if int_value >= (1 << (self._type.bitlen - 1)):
int_value = -((1 << self._type.bitlen) - int_value)
return int_value
elif self._type.kind == dsdl.PrimitiveType.KIND_FLOAT:
if self._type.bitlen == 16:
return f32_from_f16(int_value)
elif self._type.bitlen == 32:
return struct.unpack("<f", struct.pack("<L", int_value))[0]
elif self._type.bitlen == 64:
return struct.unpack("<d", struct.pack("<Q", int_value))[0]
else:
raise ValueError('Bad float')
@value.setter
def value(self, new_value):
if new_value is None:
raise ValueError("Can't serialize a None value")
elif self._type.kind == dsdl.PrimitiveType.KIND_BOOLEAN:
self._bits = "1" if new_value else "0"
elif self._type.kind == dsdl.PrimitiveType.KIND_UNSIGNED_INT:
new_value = cast(new_value, self._type)
self._bits = format(new_value, "0" + str(self._type.bitlen) + "b")
elif self._type.kind == dsdl.PrimitiveType.KIND_SIGNED_INT:
new_value = cast(new_value, self._type)
if new_value < 0: # Computing two's complement for negatives
new_value += 2 ** self._type.bitlen
self._bits = format(new_value, "0" + str(self._type.bitlen) + "b")
elif self._type.kind == dsdl.PrimitiveType.KIND_FLOAT:
new_value = cast(new_value, self._type)
if self._type.bitlen == 16:
int_value = f16_from_f32(new_value)
elif self._type.bitlen == 32:
int_value = struct.unpack("<L", struct.pack("<f", new_value))[0]
elif self._type.bitlen == 64:
int_value = struct.unpack("<Q", struct.pack("<d", new_value))[0]
else:
raise ValueError('Bad float, no donut')
self._bits = format(int_value, "0" + str(self._type.bitlen) + "b")
# noinspection PyProtectedMember
class ArrayValue(BaseValue, MutableSequence):
def __init__(self, _uavcan_type, *args, **kwargs):
super(ArrayValue, self).__init__(_uavcan_type, *args, **kwargs)
if isinstance(self._type.value_type, dsdl.PrimitiveType):
self.__item_ctor = functools.partial(PrimitiveValue, self._type.value_type)
elif isinstance(self._type.value_type, dsdl.ArrayType):
self.__item_ctor = functools.partial(ArrayValue, self._type.value_type)
elif isinstance(self._type.value_type, dsdl.CompoundType):
self.__item_ctor = functools.partial(CompoundValue, self._type.value_type)
if self._type.mode == dsdl.ArrayType.MODE_STATIC:
self.__items = list(self.__item_ctor() for _ in range(self._type.max_size))
else:
self.__items = []
def __repr__(self):
return "ArrayValue(type={0!r}, items={1!r})".format(self._type, self.__items)
def __str__(self):
if self._type.is_string_like:
# noinspection PyBroadException
try:
return self.decode()
except Exception:
pass
return self.__repr__()
def __getitem__(self, idx):
if isinstance(self.__items[idx], PrimitiveValue):
return self.__items[idx].value if self.__items[idx]._bits else 0
else:
return self.__items[idx]
def __setitem__(self, idx, value):
if idx >= self._type.max_size:
raise IndexError("Index {0} too large (max size {1})".format(idx, self._type.max_size))
if isinstance(self._type.value_type, dsdl.PrimitiveType):
self.__items[idx].value = value
else:
self.__items[idx] = value
def __delitem__(self, idx):
del self.__items[idx]
def __len__(self):
return len(self.__items)
def __eq__(self, other):
if isinstance(other, str):
return self.decode() == other
else:
return list(self) == other
def clear(self):
try:
while True:
self.pop()
except IndexError:
pass
def new_item(self):
return self.__item_ctor()
def insert(self, idx, value):
if idx >= self._type.max_size:
raise IndexError("Index {0} too large (max size {1})".format(idx, self._type.max_size))
elif len(self) == self._type.max_size:
raise IndexError("Array already full (max size {0})".format(self._type.max_size))
if isinstance(self._type.value_type, dsdl.PrimitiveType):
new_item = self.__item_ctor()
new_item.value = value
self.__items.insert(idx, new_item)
else:
self.__items.insert(idx, value)
def _unpack(self, stream, tao):
if self._type.mode == dsdl.ArrayType.MODE_STATIC:
for _, last, i in enum_mark_last(range(self._type.max_size)):
stream = self.__items[i]._unpack(stream, tao and last)
elif tao and self._type.value_type.get_min_bitlen() >= 8:
del self[:]
while len(stream) >= 8:
new_item = self.__item_ctor()
stream = new_item._unpack(stream, False)
self.__items.append(new_item)
stream = ''
else:
del self[:]
count_width = array_len_bits_from_max_size(self._type.max_size)
count = int(be_from_le_bits(stream[0:count_width], count_width), 2)
stream = stream[count_width:]
for _, last, i in enum_mark_last(range(count)):
new_item = self.__item_ctor()
stream = new_item._unpack(stream, tao and last)
self.__items.append(new_item)
return stream
def _pack(self, tao):
self.__items = self.__items[:self._type.max_size] # Constrain max len
if self._type.mode == dsdl.ArrayType.MODE_STATIC:
while len(self) < self._type.max_size: # Constrain min len
self.__items.append(self.new_item())
return ''.join(i._pack(tao and last) for _, last, i in enum_mark_last(self.__items))
elif tao and self._type.value_type.get_min_bitlen() >= 8:
return ''.join(i._pack(False) for i in self.__items)
else:
count_width = array_len_bits_from_max_size(self._type.max_size)
count = le_from_be_bits(format(len(self), '0{0:1d}b'.format(count_width)), count_width)
return count + ''.join(i._pack(tao and last) for _, last, i in enum_mark_last(self.__items))
def from_bytes(self, value):
del self[:]
for byte in bytearray(value):
self.append(byte)
def to_bytes(self):
return bytes(bytearray(item.value for item in self.__items if item._bits))
def encode(self, value, errors='strict'):
if not self._type.is_string_like:
raise ValueError('encode() can be used only with string-like arrays')
del self[:]
value = bytearray(value, encoding="utf-8", errors=errors)
for byte in value:
self.append(byte)
def decode(self, encoding="utf-8"):
if not self._type.is_string_like:
raise ValueError('decode() can be used only with string-like arrays')
return bytearray(item.value for item in self.__items if item._bits).decode(encoding)
# noinspection PyProtectedMember
class CompoundValue(BaseValue):
def __init__(self, _uavcan_type, _mode=None, *args, **kwargs):
self.__dict__["_fields"] = collections.OrderedDict()
self.__dict__["_constants"] = {}
super(CompoundValue, self).__init__(_uavcan_type, *args, **kwargs)
if self._type.kind == dsdl.CompoundType.KIND_SERVICE:
if _mode == "request":
source_fields = self._type.request_fields
source_constants = self._type.request_constants
self._is_union = self._type.request_union
elif _mode == "response":
source_fields = self._type.response_fields
source_constants = self._type.response_constants
self._is_union = self._type.response_union
else:
raise ValueError("mode must be either 'request' or 'response' for service types")
else:
if _mode is not None:
raise ValueError("mode is not applicable for message types")
source_fields = self._type.fields
source_constants = self._type.constants
self._is_union = self._type.union
self._mode = _mode
self._union_field = None
for constant in source_constants:
self._constants[constant.name] = constant.value
for idx, field in enumerate(source_fields):
if isinstance(field.type, dsdl.VoidType):
self._fields["_void_{0}".format(idx)] = VoidValue(field.type)
elif isinstance(field.type, dsdl.PrimitiveType):
self._fields[field.name] = PrimitiveValue(field.type)
elif isinstance(field.type, dsdl.ArrayType):
self._fields[field.name] = ArrayValue(field.type)
elif isinstance(field.type, dsdl.CompoundType):
self._fields[field.name] = CompoundValue(field.type)
for name, value in kwargs.items():
if name.startswith('_'):
raise NameError('%r is not a valid field name' % name)
setattr(self, name, value)
def __repr__(self):
if self._is_union:
field = self._union_field or list(self._fields.keys())[0]
fields = "{0}={1!r}".format(field, self._fields[field])
else:
fields = ", ".join("{0}={1!r}".format(f, v) for f, v in self._fields.items() if not f.startswith("_void_"))
return "{0}({1})".format(self._type.full_name, fields)
def __copy__(self):
# http://stackoverflow.com/a/15774013/1007777
cls = self.__class__
result = cls.__new__(cls)
result.__dict__.update(self.__dict__)
return result
def __deepcopy__(self, memo):
# http://stackoverflow.com/a/15774013/1007777
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
# noinspection PyArgumentList
result.__dict__[k] = copy.deepcopy(v, memo)
return result
def __getattr__(self, attr):
if attr in self._constants:
return self._constants[attr]
elif attr in self._fields:
if self._is_union:
if self._union_field and self._union_field != attr:
raise AttributeError(attr)
else:
self._union_field = attr
if isinstance(self._fields[attr], PrimitiveValue):
return self._fields[attr].value
else:
return self._fields[attr]
else:
raise AttributeError(attr)
def __setattr__(self, attr, value):
if attr in self._constants:
raise AttributeError(attr + " is read-only")
elif attr in self._fields:
if self._is_union:
if self._union_field and self._union_field != attr:
raise AttributeError(attr)
else:
self._union_field = attr
# noinspection PyProtectedMember
attr_type = self._fields[attr]._type
if isinstance(attr_type, dsdl.PrimitiveType):
self._fields[attr].value = value
elif isinstance(attr_type, dsdl.CompoundType):
if not isinstance(value, CompoundValue):
raise AttributeError('Invalid type of the value, expected CompoundValue, got %r' % type(value))
if attr_type.full_name != get_uavcan_data_type(value).full_name:
raise AttributeError('Incompatible type of the value, expected %r, got %r' %
(attr_type.full_name, get_uavcan_data_type(value).full_name))
self._fields[attr] = copy.copy(value)
elif isinstance(attr_type, dsdl.ArrayType):
self._fields[attr].clear()
try:
if isinstance(value, str):
self._fields[attr].encode(value)
else:
for item in value:
self._fields[attr].append(item)
except Exception as ex:
# We should be using 'raise from' here, but unfortunately we have to be compatible with 2.7
raise AttributeError('Array field could not be constructed from the provided value', ex)
else:
raise AttributeError(attr + " cannot be set directly")
else:
super(CompoundValue, self).__setattr__(attr, value)
def _unpack(self, stream, tao=True):
if self._is_union:
tag_len = union_tag_bits_from_num_elements(len(self._fields))
self._union_field = list(self._fields.keys())[int(stream[0:tag_len], 2)]
stream = self._fields[self._union_field]._unpack(stream[tag_len:], tao)
else:
for _, last, field in enum_mark_last(self._fields.values()):
stream = field._unpack(stream, tao and last)
return stream
def _pack(self, tao=True):
if self._is_union:
keys = list(self._fields.keys())
field = self._union_field or keys[0]
tag = keys.index(field)
tag_len = union_tag_bits_from_num_elements(len(self._fields))
return format(tag, '0' + str(tag_len) + 'b') + self._fields[field]._pack(tao)
else:
return ''.join(field._pack(tao and last) for _, last, field in enum_mark_last(self._fields.values()))
class Frame(object):
def __init__(self, message_id, data, ts_monotonic=None, ts_real=None): # @ReservedAssignment
self.message_id = message_id
self.bytes = bytearray(data)
self.ts_monotonic = ts_monotonic
self.ts_real = ts_real
@property
def transfer_key(self):
# The transfer is uniquely identified by the message ID and the 5-bit
# Transfer ID contained in the last byte of the frame payload.
return self.message_id, (self.bytes[-1] & 0x1F) if self.bytes else None
@property
def toggle(self):
return bool(self.bytes[-1] & 0x20) if self.bytes else False
@property
def end_of_transfer(self):
return bool(self.bytes[-1] & 0x40) if self.bytes else False
@property
def start_of_transfer(self):
return bool(self.bytes[-1] & 0x80) if self.bytes else False
class TransferError(uavcan.UAVCANException):
pass
class Transfer(object):
DEFAULT_TRANSFER_PRIORITY = 31
def __init__(self,
transfer_id=0,
source_node_id=0,
dest_node_id=None,
payload=None,
transfer_priority=None,
request_not_response=False,
service_not_message=False,
discriminator=None):
self.transfer_priority = transfer_priority if transfer_priority is not None else self.DEFAULT_TRANSFER_PRIORITY
self.transfer_id = transfer_id
self.source_node_id = source_node_id
self.dest_node_id = dest_node_id
self.data_type_signature = 0
self.request_not_response = request_not_response
self.service_not_message = service_not_message
self.discriminator = discriminator
self.ts_monotonic = None
self.ts_real = None
if payload:
# noinspection PyProtectedMember
payload_bits = payload._pack()
if len(payload_bits) & 7:
payload_bits += "0" * (8 - (len(payload_bits) & 7))
self.payload = bytes_from_bits(payload_bits)
self.data_type_id = get_uavcan_data_type(payload).default_dtid
self.data_type_signature = get_uavcan_data_type(payload).get_data_type_signature()
self.data_type_crc = get_uavcan_data_type(payload).base_crc
else:
self.payload = None
self.data_type_id = None
self.data_type_signature = None
self.data_type_crc = None
self.is_complete = True if self.payload else False
def __repr__(self):
return "Transfer(id={0}, source_node_id={1}, dest_node_id={2}, transfer_priority={3}, payload={4!r})"\
.format(self.transfer_id, self.source_node_id, self.dest_node_id, self.transfer_priority, self.payload)
@property
def message_id(self):
# Common fields
id_ = (((self.transfer_priority & 0x1F) << 24) |
(int(self.service_not_message) << 7) |
(self.source_node_id or 0))
if self.service_not_message:
assert 0 <= self.data_type_id <= 0xFF
assert 1 <= self.dest_node_id <= 0x7F
# Service frame format
id_ |= self.data_type_id << 16
id_ |= int(self.request_not_response) << 15
id_ |= self.dest_node_id << 8
elif self.source_node_id == 0:
assert self.dest_node_id is None
assert self.discriminator is not None
# Anonymous message frame format
id_ |= self.discriminator << 10
id_ |= (self.data_type_id & 0x3) << 8
else:
assert 0 <= self.data_type_id <= 0xFFFF
# Message frame format
id_ |= self.data_type_id << 8
return id_
@message_id.setter
def message_id(self, value):
self.transfer_priority = (value >> 24) & 0x1F
self.service_not_message = bool(value & 0x80)
self.source_node_id = value & 0x7F
if self.service_not_message:
self.data_type_id = (value >> 16) & 0xFF
self.request_not_response = bool(value & 0x8000)
self.dest_node_id = (value >> 8) & 0x7F
elif self.source_node_id == 0:
self.discriminator = (value >> 10) & 0x3FFF
self.data_type_id = (value >> 8) & 0x3
else:
self.data_type_id = (value >> 8) & 0xFFFF
def to_frames(self):
out_frames = []
remaining_payload = self.payload
# Prepend the transfer CRC to the payload if the transfer requires
# multiple frames
if len(remaining_payload) > 7:
crc = common.crc16_from_bytes(self.payload,
initial=self.data_type_crc)
remaining_payload = bytearray([crc & 0xFF, crc >> 8]) + remaining_payload
# Generate the frame sequence
tail = 0x20 # set toggle bit high so the first frame is emitted with it cleared
while True:
# Tail byte contains start-of-transfer, end-of-transfer, toggle, and Transfer ID
tail = ((0x80 if len(out_frames) == 0 else 0) |
(0x40 if len(remaining_payload) <= 7 else 0) |
((tail ^ 0x20) & 0x20) |
(self.transfer_id & 0x1F))
out_frames.append(Frame(message_id=self.message_id, data=remaining_payload[0:7] + bchr(tail)))
remaining_payload = remaining_payload[7:]
if not remaining_payload:
break
return out_frames
def from_frames(self, frames):
# Initialize transfer timestamps from the first frame
self.ts_monotonic = frames[0].ts_monotonic
self.ts_real = frames[0].ts_real
# Validate the flags in the tail byte
expected_toggle = 0
expected_transfer_id = frames[0].bytes[-1] & 0x1F
for idx, f in enumerate(frames):
tail = f.bytes[-1]
if (tail & 0x1F) != expected_transfer_id:
raise TransferError("Transfer ID {0} incorrect, expected {1}".format(tail & 0x1F, expected_transfer_id))
elif idx == 0 and not (tail & 0x80):
raise TransferError("Start of transmission not set on frame 0")
elif idx > 0 and tail & 0x80:
raise TransferError("Start of transmission set unexpectedly on frame {0}".format(idx))
elif idx == len(frames) - 1 and not (tail & 0x40):
raise TransferError("End of transmission not set on last frame")
elif idx < len(frames) - 1 and (tail & 0x40):
raise TransferError("End of transmission set unexpectedly on frame {0}".format(idx))
elif (tail & 0x20) != expected_toggle:
raise TransferError("Toggle bit value {0} incorrect on frame {1}".format(tail & 0x20, idx))
expected_toggle ^= 0x20
self.transfer_id = expected_transfer_id
self.message_id = frames[0].message_id
payload_bytes = bytearray(b''.join(bytes(f.bytes[0:-1]) for f in frames))
# Find the data type
if self.service_not_message:
kind = dsdl.CompoundType.KIND_SERVICE
else:
kind = dsdl.CompoundType.KIND_MESSAGE
datatype = uavcan.DATATYPES.get((self.data_type_id, kind))
if not datatype:
raise TransferError("Unrecognised {0} type ID {1}"
.format("service" if self.service_not_message else "message", self.data_type_id))
# For a multi-frame transfer, validate the CRC and frame indexes
if len(frames) > 1:
transfer_crc = payload_bytes[0] + (payload_bytes[1] << 8)
payload_bytes = payload_bytes[2:]
crc = common.crc16_from_bytes(payload_bytes, initial=datatype.base_crc)
if crc != transfer_crc:
raise TransferError("CRC mismatch: expected {0:x}, got {1:x} for payload {2!r} (DTID {3:d})"
.format(crc, transfer_crc, payload_bytes, self.data_type_id))
self.data_type_id = datatype.default_dtid
self.data_type_signature = datatype.get_data_type_signature()
self.data_type_crc = datatype.base_crc
if self.service_not_message:
self.payload = datatype(_mode="request" if self.request_not_response else "response")
else:
self.payload = datatype()
# noinspection PyProtectedMember
self.payload._unpack(bits_from_bytes(payload_bytes))
@property
def key(self):
return self.message_id, self.transfer_id
def is_response_to(self, transfer):
if (transfer.service_not_message and self.service_not_message and
transfer.request_not_response and
not self.request_not_response and
transfer.dest_node_id == self.source_node_id and
transfer.source_node_id == self.dest_node_id and
transfer.data_type_id == self.data_type_id and
transfer.transfer_id == self.transfer_id):
return True
else:
return False
class TransferManager(object):
def __init__(self):
self.active_transfers = {}
self.active_transfer_timestamps = {}
def receive_frame(self, frame):
result = None
key = frame.transfer_key
if key in self.active_transfers or frame.start_of_transfer:
# If the first frame was received, restart this transfer from scratch
if frame.start_of_transfer:
self.active_transfers[key] = []
self.active_transfers[key].append(frame)
self.active_transfer_timestamps[key] = time.monotonic()
# If the last frame of a transfer was received, return its frames
if frame.end_of_transfer:
result = self.active_transfers[key]
del self.active_transfers[key]
del self.active_transfer_timestamps[key]
return result
def remove_inactive_transfers(self, timeout=1.0):
t = time.monotonic()
transfer_keys = self.active_transfers.keys()
for key in transfer_keys:
if t - self.active_transfer_timestamps[key] > timeout:
del self.active_transfers[key]
del self.active_transfer_timestamps[key]
|
python
|
from .binarytree import BinaryTree
class LinkedBinaryTree(BinaryTree):
""" Linked reperesentation of a binary tree structure. """
class _Node: # Ligtweight non-public class for storing a node
__slots__ = '_element', '_parent', '_left', '_right'
def __init__(self, element, parent=None, left=None, right=None):
self._element = element
self._parent = parent
self._left = left
self._right = right
class Position(BinaryTree.Position):
""" A concerete class representing the location of a single element. """
def __init__(self, container, node):
""" Constructor should not be invoked by user. """
self._container = container
self._node = node
def element(self):
""" Return the element stored at this position. """
return self._node._element
def __eq__(self, other):
""" Return True if other is a Position representing the same location."""
return type(other) is type(self) and other._nodde is self._node
def _validate(self, p):
"""" Returns associated node if Position p is valid. """
if not isinstance(p, self.Position):
raise TypeError('p must be a proper Position type')
if p._container is not self:
raise ValueError("p does not belong to this container")
if p._node._parent is p._node: # convention for deprecated nodes
raise ValueError("p is no longer valid")
return p._node
def make_postion(self, node):
""" Return Position instance for given Node (or None if no node). """
return self.Position(self, node) if node is not None else None
# --------------------- binary tree constructor ---------------------------------
def __init__(self):
""" Create an initial empty binary tree. """
self._root = None
self._size = 0
# ------------------------- public accessors --------------------------------------
def __len__(self):
""" Return the number of elements in the tree."""
return self._size
def root(self):
""" Return the root Position of the tree (or None if tree is empty). """
return self._root
def parent(self, p):
""" Return the Position p's parent. """
node = self._validate(p)
return self.make_postion(node)
def left(self, p):
""" Return the Position p's left child (or None if no left child). """
node = self._validate(p)
return self.make_postion(node._left)
def right(self, p):
""" Return the Position p's right child (or None if no right child). """
node = self._validate(p)
return self.make_postion(node._right)
def num_children(self, p):
"""" Return the number of children of Position p."""
node = self._validate(p)
count = 0
if node._left is not None: # left child exists:
count += 1
if node._right is not None: # right child exists:
count += 1
return count
def add_root(self, e):
""" Place element e at the root of an empty tree and return new Position.
Raise ValueError if tree is nonEmpty.
"""
|
python
|
import logging
from helper import is_welsh
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class CoursesBySubject:
def __init__(self, mapper, language):
self.mapper = mapper
self.language = language
def group(self, courses, limit, offset):
logging.debug('group')
single_course_accordions = {}
multiple_course_accordions = {}
institutions = []
self.add_courses_to_accordions(
courses,
single_course_accordions,
multiple_course_accordions,
institutions,
)
# single courses
single_course_accordions = self.sort_by_count(single_course_accordions)
most_common_subject_code = get_first_accordion_subject_code(single_course_accordions)
most_common_subject_label = self.mapper.get_label(most_common_subject_code)
combine_most_common_subjects(self.mapper, most_common_subject_code, most_common_subject_label, single_course_accordions)
self.group_single_courses_that_are_less_than_one_percent(
courses,
single_course_accordions,
)
self.replace_codes_with_labels(most_common_subject_label, single_course_accordions)
single_course_accordions = self.sort_by_count(single_course_accordions)
# multiple courses
self.group_multiple_courses_that_are_less_than_one_percent(
courses,
multiple_course_accordions,
most_common_subject_code,
)
self.replace_codes_with_labels(most_common_subject_label, multiple_course_accordions)
multiple_course_accordions = sort_alphabetically(multiple_course_accordions)
self.sort_other_combinations(most_common_subject_label, multiple_course_accordions)
self.sort_contents(single_course_accordions)
self.sort_contents(multiple_course_accordions)
add_number_of_courses(single_course_accordions)
add_number_of_courses(multiple_course_accordions)
log_accordions(single_course_accordions, courses)
log_accordions(multiple_course_accordions, courses)
return {
"items": {
"single_subject_courses": single_course_accordions,
"multiple_subject_courses": multiple_course_accordions,
},
"limit": limit,
"number_of_items": len(single_course_accordions) + len(multiple_course_accordions),
"offset": offset,
"total_number_of_courses": len(courses),
"total_results": len(institutions),
}
def add_courses_to_accordions(self, courses, single_course_accordions, multiple_course_accordions, institutions):
logging.debug('add_courses_to_accordions')
single_courses = {}
multiple_courses = {}
for c in courses:
institution = c[key_course][key_institution]
if institution[key_pub_ukprn_name] == "not available":
continue
add_institution_to_list(institution, institutions)
course = build_course(c[key_course], institution, self.language)
sort_results_into_groups(
course,
single_courses,
multiple_courses,
)
add_single_courses_to_accordions(
single_courses,
single_course_accordions,
)
add_multiple_courses_to_accordions(
multiple_courses,
multiple_course_accordions,
)
def sort_contents(self, accordion):
logging.debug('sort_contents')
self.sort_contents_alphabetically_by_subject(accordion)
self.sort_contents_alphabetically_by_institution(accordion)
def sort_contents_alphabetically_by_subject(self, accordion):
logging.debug('sort_contents_alphabetically_by_subject')
for key in list(accordion.keys()):
accordion[key][key_courses] = sorted(accordion[key][key_courses], key=lambda k: self.get_translation(k[key_title]))
def sort_contents_alphabetically_by_institution(self, accordion):
logging.debug('sort_contents_alphabetically_by_institution')
for key in list(accordion.keys()):
courses = {}
for course in accordion[key][key_courses]:
title = self.get_translation(course[key_title])
group_courses(key, course, title, courses)
accordion[key][key_courses] = []
for k, v in courses.items():
for k2, v2 in v.items():
v2 = sorted(v2, key=lambda k3: k3[key_institution][key_pub_ukprn_name])
accordion[key][key_courses].extend(v2)
def get_translation(self, json):
logging.debug(f'get_translation({self.language})')
language_name = 'welsh' if is_welsh(self.language) else 'english'
if not json[language_name]:
logging.warning(f'missing translation: {json}')
return json['english']
return json[language_name]
def replace_codes_with_labels(self, most_common_subject_label, accordions):
logging.debug('replace_codes_with_labels')
key_other_combinations_with = get_key_other_combinations_with(self.language)
for codes in list(accordions):
if codes.startswith(key_other_combinations_with):
accordions[f'{key_other_combinations_with} {most_common_subject_label}'] = accordions.pop(codes)
continue
labels = []
for code in codes.split():
if code.startswith('CAH'):
labels.append(self.mapper.get_label(code))
label = wrap_with_course(labels, self.language)
if codes.startswith('CAH'):
accordions[label] = accordions.pop(codes)
def group_multiple_courses_that_are_less_than_one_percent(self, courses, accordions, most_common_subject_code):
logging.debug('group_multiple_courses_that_are_less_than_one_percent')
key_other_combinations_with = get_key_other_combinations_with(self.language)
for key in list(accordions.keys()):
if most_common_subject_code == key:
continue
percentage = len(accordions[key][key_courses]) / len(courses) * 100
if percentage <= 1:
if most_common_subject_code in key.split():
label = f'{key_other_combinations_with} {most_common_subject_code}'
move_course(accordions, key, label)
else:
label = get_key_other_combinations(self.language)
move_course(accordions, key, label)
def sort_other_combinations(self, most_common_subject_label, accordions):
logging.debug('sort_other_combinations')
key_other_combinations_with = get_key_other_combinations_with(self.language)
key = f'{key_other_combinations_with} {most_common_subject_label}'
if accordions.get(key):
other_combinations_with = accordions[key]
accordions.pop(key)
accordions[key] = other_combinations_with
key_other_combinations = get_key_other_combinations(self.language)
if accordions.get(key_other_combinations):
other_combinations = accordions[key_other_combinations]
accordions.pop(key_other_combinations)
accordions[key_other_combinations] = other_combinations
def sort_by_count(self, accordion):
logging.debug('sort_by_count')
keys = accordion.keys()
sorted_keys = sorted(keys, key=lambda key: len(accordion[key][key_courses]), reverse=True)
[accordion[key] for key in sorted_keys]
sorted_accordion = {}
for key in sorted_keys:
sorted_accordion[key] = accordion[key]
key_courses_in_other_subjects = get_key_courses_in_other_subjects(self.language)
if key_courses_in_other_subjects in sorted_accordion:
sorted_accordion[key_courses_in_other_subjects] = sorted_accordion.pop(key_courses_in_other_subjects)
return sorted_accordion
def group_single_courses_that_are_less_than_one_percent(self, courses, accordions):
logging.debug('group_single_courses_that_are_less_than_one_percent')
key_courses_in_other_subjects = get_key_courses_in_other_subjects(self.language)
for key in list(accordions.keys()):
label = key_courses_in_other_subjects
if label == key:
continue
percentage = len(accordions[key][key_courses]) / len(courses) * 100
if percentage <= 1:
move_course(accordions, key, label)
def get_key_courses_in_other_subjects(language):
return key_courses_in_other_subjects[get_language_name(language)]
def get_key_other_combinations(language):
return key_other_combinations[get_language_name(language)]
def get_key_other_combinations_with(language):
return key_other_combinations_with[get_language_name(language)]
def get_language_name(language):
return 'welsh' if is_welsh(language) else 'english'
def wrap_with_course(labels, language):
return f'Cyrsiau {" & ".join(labels)}' if is_welsh(language) else f'{" & ".join(labels)} courses'
def build_course(course, institution, language):
logging.debug('build_course')
institution_body = {
key_pub_ukprn_name: institution[key_pub_ukprn_welsh_name] if is_welsh(language) else institution[key_pub_ukprn_name],
key_pub_ukprn: institution[key_pub_ukprn],
}
locations = []
for location in course[key_locations]:
locations.append(location[key_name])
return {
"country": course["country"]["label"],
"distance_learning": course["distance_learning"]["label"],
"foundation_year": course["foundation_year_availability"]["label"],
"honours_award": course["honours_award_provision"],
"kis_course_id": course[key_kis_course_id],
"length_of_course": course["length_of_course"]["label"],
"mode": course["mode"]["label"],
"qualification": course["qualification"]["label"],
"sandwich_year": course["sandwich_year"]["label"],
"subjects": course[key_subjects],
"title": course["title"],
"year_abroad": course["year_abroad"]["label"],
key_locations: locations,
key_institution: institution_body,
}
def add_institution_to_list(institution, institutions):
logging.debug('add_institution_to_list')
pub_ukprn = institution[key_pub_ukprn]
if pub_ukprn not in institutions:
institutions.append(pub_ukprn)
def sort_results_into_groups(course, single_courses, multiple_courses):
logging.debug('sort_results_into_groups')
if len(course[key_subjects]) == 1:
single_courses[course[key_kis_course_id]] = course
if len(course[key_subjects]) > 1:
multiple_courses[course[key_kis_course_id]] = course
def add_single_courses_to_accordions(courses, accordions):
logging.debug('add_single_courses_to_accordions')
for course in courses.values():
label = course[key_subjects][0][key_code]
add_course_to_accordions(course, label, accordions)
def add_course_to_accordions(course, label, accordions):
if label not in accordions:
accordions[label] = {}
accordions[label][key_courses] = []
if course not in accordions[label][key_courses]:
accordions[label][key_courses].append(course)
def add_multiple_courses_to_accordions(courses, accordions):
logging.debug('add_multiple_courses_to_accordions')
for course in courses.values():
subject_codes = []
for subject in course[key_subjects]:
subject_codes.append(subject[key_code])
label = f'{" ".join(subject_codes)}'
add_course_to_accordions(course, label, accordions)
def move_course(accordions, key, label):
if label not in accordions:
accordions[label] = {}
accordions[label][key_courses] = []
for c in accordions[key][key_courses]:
if c not in accordions[label][key_courses]:
accordions[label][key_courses].append(c)
accordions.pop(key)
def group_courses(key, course, title, accordions):
logging.debug('group_courses')
if not accordions.get(key):
accordions[key] = {}
if not accordions[key].get(title):
accordions[key][title] = []
accordions[key][title].append(course)
def sort_alphabetically(accordions):
logging.debug('sort_alphabetically')
return dict(sorted(accordions.items()))
def get_first_accordion_subject_code(accordions):
logging.debug('get_first_accordion_subject_code')
key = next(iter(accordions))
return accordions.get(key)[key_courses][0][key_subjects][0][key_code]
def combine_most_common_subjects(mapper, most_common_subject_code, most_common_subject_label, accordions):
for key in list(accordions.keys()):
if key != most_common_subject_code:
if mapper.get_label(key) == most_common_subject_label:
accordions[most_common_subject_code][key_courses].extend(accordions[key][key_courses])
accordions.pop(key)
break
def add_number_of_courses(accordions):
logging.debug('add_number_of_courses')
for key in accordions.keys():
accordions[key][key_number_of_courses] = len(accordions.get(key)[key_courses])
def log_accordions(accordions, courses):
logging.info('---------------------------------------')
for key in accordions.keys():
percentage = len(accordions[key][key_courses]) / len(courses) * 100
logging.info(f'{key}: {len(accordions[key][key_courses])} ({round(percentage,1)}%)')
key_code = 'code'
key_course = 'course'
key_courses = 'courses'
key_courses_in_other_subjects = {'english': 'Courses in other subjects', 'welsh': 'Cyrsiau mewn pynciau eraill'}
key_institution = 'institution'
key_institutions = 'institutions'
key_kis_course_id = 'kis_course_id'
key_locations = 'locations'
key_name = 'name'
key_number_of_courses = 'number_of_courses'
key_other_combinations = {'english': 'Other combinations', 'welsh': 'Cyfuniadau arall'}
key_other_combinations_with = {'english': 'Other combinations with', 'welsh': 'Cyfuniadau eraill gyda'}
key_pub_ukprn = 'pub_ukprn'
key_pub_ukprn_name = 'pub_ukprn_name'
key_pub_ukprn_welsh_name = 'pub_ukprn_welsh_name'
key_subjects = 'subjects'
key_title = 'title'
|
python
|
import copy
import numpy
def convert_to_binary(list_of_digits):
list_of_str_digits = [str(digit) for digit in list_of_digits]
return int("".join(list_of_str_digits), 2)
with open("data/day3.txt") as f:
# The entire input forms a matrix of integers, parse it as such.
matrix = [[int(num) for num in list(line.rstrip())] for line in f.readlines()]
matrix_transpose = numpy.transpose(matrix)
# `bincount` gives the occurrences of each element in the array.
# `argmax` returns the element having the maximum occurrences.
# `transpose` transposes a given matrix.
max_occurrence = [numpy.bincount(row).argmax() for row in matrix_transpose]
# There has to be a better way to do this. :-)
max_occurrence_complement = [1 if digit == 0 else 0 for digit in max_occurrence]
gamma = convert_to_binary(max_occurrence)
epsilon = convert_to_binary(max_occurrence_complement)
print("Day 3 - a")
print(gamma * epsilon)
# =======================
oxy_matrix = copy.deepcopy(matrix)
for column_index in range(len(matrix_transpose)):
column = [row[column_index] for row in oxy_matrix]
bincount = numpy.bincount(column)
max_element = 1 if len(bincount) > 1 and bincount[1] >= bincount[0] else 0
oxy_matrix = filter(lambda x: (x[column_index] == max_element), oxy_matrix)
if len(oxy_matrix) == 1:
break
oxy = convert_to_binary(oxy_matrix[0])
co2_matrix = copy.deepcopy(matrix)
for column_index in range(len(matrix_transpose)):
column = [row[column_index] for row in co2_matrix]
bincount = numpy.bincount(column)
min_element = 0 if len(bincount) <= 1 or bincount[0] <= bincount[1] else 1
min_element = min_element if isinstance(min_element, int) else 0
co2_matrix = filter(lambda x: (x[column_index] == min_element), co2_matrix)
if len(co2_matrix) == 1:
break
co2 = convert_to_binary(co2_matrix[0])
print("Day 3 - b")
print(oxy * co2)
|
python
|
# Copyright 2018 The ops Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Sequence
from cirq import ops, circuits, optimizers
from cirq.contrib.paulistring.pauli_string_optimize import (
pauli_string_optimized_circuit)
from cirq.contrib.paulistring.clifford_optimize import (
clifford_optimized_circuit)
def optimized_circuit(circuit: circuits.Circuit,
atol: float = 1e-8,
repeat: int = 10,
merge_interactions: bool = True
) -> circuits.Circuit:
circuit = circuits.Circuit(circuit) # Make a copy
for _ in range(repeat):
start_len = len(circuit)
start_cz_count = _cz_count(circuit)
if merge_interactions:
optimizers.MergeInteractions(allow_partial_czs=False,
post_clean_up=_optimized_ops,
).optimize_circuit(circuit)
circuit2 = pauli_string_optimized_circuit(
circuit,
move_cliffords=False,
atol=atol)
circuit3 = clifford_optimized_circuit(
circuit2,
atol=atol)
if (len(circuit3) == start_len
and _cz_count(circuit3) == start_cz_count):
return circuit3
circuit = circuit3
return circuit
def _optimized_ops(ops: Sequence[ops.Operation],
atol: float = 1e-8,
repeat: int = 10) -> ops.OP_TREE:
c = circuits.Circuit.from_ops(ops)
c_opt = optimized_circuit(c, atol, repeat, merge_interactions=False)
return c_opt.all_operations()
def _cz_count(circuit):
return sum(isinstance(op, ops.GateOperation)
and isinstance(op, ops.CZPowGate)
for op in circuit)
|
python
|
# -*- coding: utf-8 -*-
"""
KJ-65X9500G の輝度制限と階調特性の関係調査
=========================================
Description.
"""
# import standard libraries
import os
# import third-party libraries
import numpy as np
from colour import write_image
from PIL import Image
from PIL import ImageFont
from PIL import ImageDraw
# import my libraries
import test_pattern_generator2 as tpg
import transfer_functions as tf
# information
__author__ = 'Toru Yoshihara'
__copyright__ = 'Copyright (C) 2019 - Toru Yoshihara'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Toru Yoshihara'
__email__ = 'toru.ver.11 at-sign gmail.com'
__all__ = []
FONT_PATH = "/usr/share/fonts/opentype/noto/NotoSansMonoCJKjp-Regular.otf"
def convert_from_pillow_to_numpy(img):
img = np.uint16(np.asarray(img)) * 2 ** (10 - 8)
return img
def merge_text(img, txt_img, pos):
"""
テキストを合成する作業の最後の部分。
pos は テキストの (st_pos_h, st_pos_v) 。
## 個人的実装メモ
今回はちゃんとアルファチャンネルを使った合成をしたかったが、
PILは8bit, それ以外は 10~16bit により BG_COLOR に差が出るので断念。
"""
st_pos_v = pos[1]
ed_pos_v = pos[1] + txt_img.shape[0]
st_pos_h = pos[0]
ed_pos_h = pos[0] + txt_img.shape[1]
# かなり汚い実装。0x00 で無いピクセルのインデックスを抽出し、
# そのピクセルのみを元の画像に上書きするという処理をしている。
text_index = txt_img > 0
temp_img = img[st_pos_v:ed_pos_v, st_pos_h:ed_pos_h]
temp_img[text_index] = txt_img[text_index]
img[st_pos_v:ed_pos_v, st_pos_h:ed_pos_h] = temp_img
def merge_each_spec_text(img, pos, font_size, text_img_size, text):
"""
各パーツの説明テキストを合成。
pos は テキストの (st_pos_h, st_pos_v) 。
text_img_size = (size_h, size_v)
## 個人的実装メモ
今回はちゃんとアルファチャンネルを使った合成をしたかったが、
PILは8bit, それ以外は 10~16bit により BG_COLOR に差が出るので断念。
"""
# テキストイメージ作成
text_width = text_img_size[0]
text_height = text_img_size[1]
fg_color = (0x00, 0x60, 0x60)
bg_coor = (0x00, 0x00, 0x00)
txt_img = Image.new("RGB", (text_width, text_height), bg_coor)
draw = ImageDraw.Draw(txt_img)
font = ImageFont.truetype(FONT_PATH, font_size)
draw.text((0, 0), text, font=font, fill=fg_color)
txt_img = convert_from_pillow_to_numpy(txt_img)
merge_text(img, txt_img, pos)
def research_recognizable_peak_luminance(target_luminance=1000):
"""
識別可能な最大輝度を調査するためのパッチを作成する。
"""
bg_width = 1920
bg_height = 1080
fg_width = int(bg_width * (0.1 ** 0.5) + 0.5)
fg_height = int(bg_height * (0.1 ** 0.5) + 0.5)
img = np.zeros((bg_height, bg_width, 3), dtype=np.uint16)
low_level = tf.oetf_from_luminance(target_luminance, tf.ST2084)
low_level = np.uint16(np.round(low_level * 1023))
print(low_level)
low_level = (low_level, low_level, low_level)
fg_img = tpg.make_tile_pattern(
width=fg_width, height=fg_height, h_tile_num=16, v_tile_num=9,
low_level=low_level, high_level=(1023, 1023, 1023))
tpg.merge(img, fg_img, pos=(0, 0))
merge_each_spec_text(
img, pos=(630, 5), font_size=30, text_img_size=(960, 100),
text="target luminance = {:d} cd/m2".format(target_luminance))
fname = "./img/{:05d}_peak_lumiance.tiff".format(target_luminance)
write_image(img / 0x3FF, fname, bit_depth='uint16')
def research_st2084_with_bg_luminance_change(
target_luminance=1600, bg_luminance=1000):
"""
識別可能な最大輝度を調査するためのパッチを作成する。
"""
bg_width = 1920
bg_height = 1080
fg_width = int(bg_width * (0.1 ** 0.5) + 0.5)
fg_height = int(bg_height * (0.1 ** 0.5) + 0.5)
bg_level = tf.oetf_from_luminance(bg_luminance, tf.ST2084)
bg_level = np.uint16(np.round(bg_level * 1023))
img = np.ones((bg_height, bg_width, 3), dtype=np.uint16) * bg_level
low_level = tf.oetf_from_luminance(target_luminance, tf.ST2084)
low_level = np.uint16(np.round(low_level * 1023))
low_level = (low_level, low_level, low_level)
fg_img = tpg.make_tile_pattern(
width=fg_width, height=fg_height, h_tile_num=16, v_tile_num=9,
low_level=low_level, high_level=(1023, 1023, 1023))
tpg.merge(img, fg_img, pos=(0, 0))
text_base = "target_luminance = {:d} cd/m2, bg_luminance = {:d} cd/m2"
merge_each_spec_text(
img, pos=(630, 5), font_size=30, text_img_size=(960, 100),
text=text_base.format(target_luminance, bg_luminance))
fname_base = "./img/target_{:05d}_bg_{:05d}_lumiance.tiff"
fname = fname_base.format(target_luminance, bg_luminance)
write_image(img / 0x3FF, fname, bit_depth='uint16')
def main_func():
# KJ-65X9500G の表示限界を調査
for idx in range(10):
luminance = 1000 + 100 * idx
research_recognizable_peak_luminance(luminance)
# BG Luminance を変化させた場合の挙動確認
bg_luminance_list = [0, 100, 300, 500, 800, 1000, 2000, 4000, 10000]
target_list = [500, 750, 1000, 1500, 2000]
for target in target_list:
for bg in bg_luminance_list:
research_st2084_with_bg_luminance_change(
target_luminance=target, bg_luminance=bg)
if __name__ == '__main__':
os.chdir(os.path.dirname(os.path.abspath(__file__)))
main_func()
|
python
|
import pytest
from openpecha.utils import download_pecha
@pytest.mark.skip("Downloading github repo")
def test_download_pecha():
pecha_path = download_pecha("collections")
|
python
|
# Generated by Django 2.2 on 2020-08-11 11:55
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Iniciativa',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nro_correlativo', models.IntegerField()),
('eje_estrategico', models.CharField(max_length=100)),
('requerimiento', models.CharField(max_length=100)),
('fecha_solicitud', models.DateField()),
('fecha_solped', models.DateField()),
('nombre', models.CharField(max_length=75)),
('descripcion', models.TextField(max_length=240)),
('objetivo', models.TextField(max_length=300)),
('beneficio', models.TextField(max_length=500)),
('impacto', models.CharField(max_length=100)),
('inicio', models.DateField()),
('fin', models.DateField()),
('sponsor', models.CharField(max_length=100)),
('solicitante', models.CharField(max_length=100)),
('capa_red', models.CharField(max_length=50)),
('ubicacion', models.CharField(max_length=100)),
('tipo', models.CharField(max_length=50)),
],
),
]
|
python
|
from dsame.trees.BinaryTreeNode import BinaryTreeNode
def inOrder(root: BinaryTreeNode):
if root:
inOrder(root.left)
print(root.data)
inOrder(root.right)
a = BinaryTreeNode(2)
b = BinaryTreeNode(3)
c = BinaryTreeNode(1, a, b)
print(inOrder(c))
|
python
|
import os
from ast import literal_eval
from pathlib import Path
import numpy as np
def read_info(filename: os.PathLike) -> dict:
"""Read volume metadata.
Parameters
----------
filename : PathLike
Path to the file.
Returns
-------
dct : dict
Dictionary with the metadata.
"""
dct = {}
with open(filename, 'r') as f:
for line in f:
line = line.strip()
if not line:
continue
if line.startswith('!'):
continue
key, val = line.split('=')
key = key.strip()
val = val.strip()
try:
val = literal_eval(val)
except ValueError:
pass
dct[key] = val
return dct
def load_vol(filename: os.PathLike,
dtype=np.float32,
mmap_mode: str = None,
shape: tuple = None) -> np.ndarray:
"""Load data from `.vol` file.
The image shape is deduced from the `.vol.info` file. If this file is
not present, the shape can be specified using the `shape` keyword.
Parameters
----------
filename : os.PathLike
Path to the file.
dtype : dtype, optional
Numpy dtype of the data.
mmap_mode : None, optional
If not None, open the file using memory mapping. For more info on
the modes, see: :func:`numpy.memmap`
shape : tuple, optional
Tuple of three ints specifying the shape of the data (order: z, y, x).
Returns
-------
result : numpy.ndarray
Data stored in the file.
"""
filename = Path(filename)
if not filename.exists():
raise IOError(f'No such file: {filename}')
try:
filename_info = filename.with_suffix(filename.suffix + '.info')
if not shape:
info = read_info(filename_info)
shape = info['NUM_Z'], info['NUM_Y'], info['NUM_X']
except FileNotFoundError:
raise ValueError(
f'Info file not found: {filename_info.name}, specify '
'the volume shape using the `shape` parameter.') from None
result: np.ndarray
if mmap_mode:
result = np.memmap(filename, dtype=dtype, shape=shape,
mode=mmap_mode) # type: ignore
else:
result = np.fromfile(filename, dtype=dtype)
result = result.reshape(shape)
return result
|
python
|
import time
from options.train_options import TrainOptions
opt = TrainOptions().parse() # set CUDA_VISIBLE_DEVICES before import torch
import pickle
from data.custom_transforms import ToLabelTensor
# with open("opt.obj",'wb') as f:
# pickle.dump(opt,f)
from data.segmentation import SegmentationDataset
from models.models import create_model
from data.unaligned_data_loader import UnalignedDataLoader
import torch.utils.data
import torchvision.transforms as transforms
#from models.models import create_model
from util.visualizer import Visualizer
from pdb import set_trace as st
import numpy as np
import gc
import evaluation.metrics
labels = __import__('data.labels')
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from data.custom_transforms import DownSizeLabelTensor
ds1= DownSizeLabelTensor(2*opt.factor)
size= ds1.findDecreasedResolution(opt.fineSize)/2
transform = transforms.Compose([
transforms.CenterCrop(opt.fineSize),
transforms.Scale(size),
transforms.ToTensor(),
])
target_transform = transforms.Compose([
transforms.CenterCrop(opt.fineSize),transforms.ToTensor(),ToLabelTensor(labels.labels.labels)
])
target_transform2 = transforms.Compose([
transforms.CenterCrop(opt.fineSize),transforms.ToTensor(),ToLabelTensor(labels.labels.labels)
])
opt.continue_train=True
domainAdata= SegmentationDataset(root=opt.dataroot + '/' + opt.domain_A , split_ratio=opt.split_ratio_A,
transform=transform, target_transform=target_transform, return_paths=True)
domainBdata= SegmentationDataset(root=opt.dataroot + '/' + opt.domain_B , split_ratio=opt.split_ratio_B,
transform=transform, target_transform=target_transform2, return_paths=True)
domainAdataloader = torch.utils.data.DataLoader(
domainAdata,
batch_size=opt.batchSize,
shuffle=not opt.serial_batches,
num_workers=int(opt.nThreads))
domainBdataloader = torch.utils.data.DataLoader(
domainBdata,
batch_size=opt.batchSize,
shuffle=not opt.serial_batches,
num_workers=int(opt.nThreads))
cycle_data_loader=UnalignedDataLoader()
cycle_data_loader.initialize(opt,transform,transform)
dataset = cycle_data_loader.load_data()
num_train = len(cycle_data_loader)
print('#training images = %d' % num_train)
print ('Finetune:'+str(opt.finetune))
print ('Split Ratio A:'+str(opt.split_ratio_A))
print ('Split Ratio B:'+str(opt.split_ratio_B))
print ('Split Ratio AB:'+str(opt.split_ratio_AB))
print ('Experiment Name:'+opt.name)
print ('Iterations'+str(opt.niter))
print ('Iterations Decay'+str(opt.niter_decay))
opt.switch=0
model = create_model(opt)
visualizer = Visualizer(opt)
print 'Pretraining Done!!'
print 'Starting Combined Training'
avgtimetaken=[]
total_steps=0
# for epoch in range(1,opt.niter + opt.niter_decay + 1): #
# epoch_start_time = time.time()
# domainBdata_iter = domainBdataloader.__iter__()
# iter=0
# print epoch
# for i in range(0,len(domainBdataloader)):
# s=time.time()
# batch_n= next(domainBdata_iter)
# data={}
# data['B_image'] = batch_n[0][0]
# data['B_label'] = ds1.downsize(ds1.downsize(batch_n[1][0]).data).data
# print i
# iter_start_time = time.time()
# total_steps += opt.batchSize
# epoch_iter = total_steps % num_train
# model.set_input(data,'BC')
# model.optimize_parameters()
# e=time.time()
# avgtimetaken.append(e-s)
# if total_steps % opt.display_freq == 0:
# visualizer.display_current_results(model.get_current_visuals(), epoch)
# if total_steps % opt.print_freq == 0:
# errors = model.get_current_errors()
# visualizer.print_current_errors(epoch, total_steps, errors, iter_start_time)
# if opt.display_id > 0:
# visualizer.plot_current_errors(epoch, total_steps, opt, errors)
# if total_steps % opt.save_latest_freq == 0:
# print('saving the latest model (epoch %d, total_steps %d)' %
# (epoch, total_steps))
# model.save('latest')
# if epoch % opt.save_epoch_freq == 0:
# print('saving the model at the end of epoch %d, iters %d' %
# (epoch, total_steps))
# model.save('latest')
# model.save(epoch)
# print('End of epoch %d / %d \t Time Taken: %d sec' %
# (epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
# if epoch > opt.niter + opt.niter_decay*0.75:
# model.update_learning_rate()
# print 'Done'
print 'Training Target Domain to Source Domain Adversarially'
for epoch in range(1,opt.niter + opt.niter_decay + 1): #
epoch_start_time = time.time()
domainABdata_iter = dataset.__iter__()
iter=0
for i in range(0,num_train,opt.batchSize):
s=time.time()
batch_n= next(domainABdata_iter)
data={}
data['AB_image_1'] = batch_n['A']
data['AB_image_2'] = batch_n['B']
iter_start_time = time.time()
total_steps += opt.batchSize
epoch_iter = total_steps % num_train
model.set_input(data,'AB')
model.optimize_parameters()
e=time.time()
avgtimetaken.append(e-s)
if total_steps % opt.print_freq == 0:
errors = model.get_current_errors()
visualizer.print_current_errors(epoch, total_steps, errors, iter_start_time)
if total_steps % opt.display_freq == 0:
visualizer.display_current_results(model.get_current_visuals(), epoch)
if total_steps % opt.save_latest_freq == 0:
print('saving the latest model (epoch %d, total_steps %d)' %
(epoch, total_steps))
model.save('latest')
if epoch % opt.save_epoch_freq == 0:
print('saving the model at the end of epoch %d, iters %d' %
(epoch, total_steps))
model.save('latest')
model.save(epoch)
print('End of epoch %d / %d \t Time Taken: %d sec' %
(epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
if opt.finetune>1:
print 'FineTuning'
for epoch in range(1,opt.niter + opt.niter_decay + 1): #
epoch_start_time = time.time()
domainAdata_iter = domainAdataloader.__iter__()
iter=0
for i in range(0,len(domainAdataloader),opt.batchSize):
s=time.time()
batch_n= next(domainAdata_iter)
data={}
data['A_image'] = batch_n[0][0]
data['A_label'] = ds1.downsize(ds1.downsize(batch_n[1][0]).data).data
iter_start_time = time.time()
total_steps += opt.batchSize
epoch_iter = total_steps % num_train
model.set_input(data,'AC')
model.optimize_parameters()
e=time.time()
avgtimetaken.append(e-s)
if total_steps % opt.display_freq == 0:
visualizer.display_current_results(model.get_current_visuals(), epoch)
if total_steps % opt.print_freq == 0:
errors = model.get_current_errors()
visualizer.print_current_errors(epoch, total_steps, errors, iter_start_time)
if opt.display_id > 0:
visualizer.plot_current_errors(epoch, total_steps, opt, errors)
if total_steps % opt.save_latest_freq == 0:
print('saving the latest model (epoch %d, total_steps %d)' %
(epoch, total_steps))
model.save('latest')
if epoch % opt.save_epoch_freq == 0:
print('saving the model at the end of epoch %d, iters %d' %
(epoch, total_steps))
model.save('latest')
model.save(epoch)
print('End of epoch %d / %d \t Time Taken: %d sec' %
(epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
if epoch > opt.niter + opt.niter_decay*0.75:
model.update_learning_rate()
#----------------Begin Testing Now!!---------
print 'Testing Now'
import time
from options.train_options import TrainOptions
opt = TrainOptions().parse()
#opt.dataroot='/home/sloke/repos/nips2017/left8bit/gtacityscapes/test'
opt.split_ratio_A=1
opt.split_ratio_B=1
# set CUDA_VISIBLE_DEVICES before import torch
import pickle
from data.custom_transforms import ToLabelTensor
# with open("opt.obj",'wb') as f:
# pickle.dump(opt,f)
from data.segmentation import SegmentationDataset
from models.models import create_model
from data.unaligned_data_loader import UnalignedDataLoader
import torch.utils.data
import torchvision.transforms as transforms
#from models.models import create_model
from util.visualizer import Visualizer
from pdb import set_trace as st
import numpy as np
import gc
import evaluation.metrics
labels = __import__('data.labels')
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
opt.continue_train=True
from data.custom_transforms import DownSizeLabelTensor
ds1= DownSizeLabelTensor(opt.factor)
size= ds1.findDecreasedResolution(opt.fineSize)/2
transform = transforms.Compose([
transforms.CenterCrop(opt.fineSize),
transforms.Scale(size),
transforms.ToTensor(),
])
target_transform = transforms.Compose([
transforms.CenterCrop(opt.fineSize),transforms.ToTensor(),ToLabelTensor(labels.labels.labels)
])
target_transform2 = transforms.Compose([
transforms.CenterCrop(opt.fineSize),transforms.ToTensor(),ToLabelTensor(labels.labels.labels)
])
#mean_pixel_acc_test_epoch, mean_class_acc_test_epoch, mean_class_iou_test_epoch, per_class_acc_test_epoch, per_class_iou_test_epoch=[],[],[],[],[]
test_epoch_results=[]
mean_pixel_acc, mean_class_acc, mean_class_iou, per_class_acc, per_class_iou=0,0,0,np.zeros((opt.num_classes)),np.zeros((opt.num_classes))
avgcountAC=0
avgcountBC=0
total_steps=0
avgtimetaken=[]
model = create_model(opt)
visualizer = Visualizer(opt)
domainAdata_test= SegmentationDataset(root=opt.dataroot + '/' + opt.domain_A , split_ratio=opt.split_ratio_A,
transform=transform, target_transform=target_transform, return_paths=True)
domainBdata_test= SegmentationDataset(root=opt.dataroot + '/' + opt.domain_B , split_ratio=opt.split_ratio_B,
transform=transform, target_transform=target_transform2, return_paths=True)
print 'Dataset A Size:'+str(len(domainAdata_test))
print 'Dataset B Size:'+str(len(domainBdata_test))
domainAdataloader_test = torch.utils.data.DataLoader(
domainAdata_test,
batch_size=opt.batchSize,
shuffle=not opt.serial_batches,
num_workers=int(opt.nThreads))
domainBdataloader_test = torch.utils.data.DataLoader(
domainBdata_test,
batch_size=opt.batchSize,
shuffle=not opt.serial_batches,
num_workers=int(opt.nThreads))
domainAdata_iter_test = domainAdataloader_test.__iter__()
domainBdata_iter_test = domainBdataloader_test.__iter__()
mean_pixel_acc_test_A, mean_class_acc_test_A, mean_class_iou_test_A, per_class_acc_test_A, per_class_iou_test_A=0,0,0,np.zeros((opt.num_classes)),np.zeros((opt.num_classes))
for i in range(0,len(domainAdata_test)):
batch_n= next(domainAdata_iter_test)
data={}
data['A_image'] = batch_n[0][0]
data['A_label'] = ds1.downsize(ds1.downsize(batch_n[1][0]).data).data
model.set_input(data,'AC')
a,b,c,d,e=model.test()
mean_pixel_acc_test_A +=a
mean_class_acc_test_A +=b
mean_class_iou_test_A +=c
per_class_acc_test_A +=d
per_class_iou_test_A +=e
print 'Mean Pixel Accuracy (Domain A):'+str(a)
print 'Mean Class Accuracy (Domain A):'+str(b)
print 'Mean Class IoU (Domain A):'+str(c)
print 'Per Class Accuracy (Domain A):'+str(d)
print 'Per Class IoU (Domain A):'+str(e)
print 'Iteration:'+str(i)
print 'Model:'+opt.name
if total_steps % opt.display_freq == 0:
visualizer.display_current_results(model.get_current_visuals(), i)
mean_pixel_acc_test_A /= len(domainAdata_test)
cycle_data_loader=UnalignedDataLoader()
cycle_data_loader.initialize(opt,transform,transform)
|
python
|
from ...attribute import models as attribute_models
from ...discount import models as discount_models
from ...product import models as product_models
from ...shipping import models as shipping_models
def resolve_translation(instance, _info, language_code):
"""Get translation object from instance based on language code."""
return instance.translations.filter(language_code=language_code).first()
def resolve_shipping_methods(info):
return shipping_models.ShippingMethod.objects.all()
def resolve_attribute_values(info):
return attribute_models.AttributeValue.objects.all()
def resolve_products(_info):
return product_models.Product.objects.all()
def resolve_product_variants(_info):
return product_models.ProductVariant.objects.all()
def resolve_sales(_info):
return discount_models.Sale.objects.all()
def resolve_vouchers(_info):
return discount_models.Voucher.objects.all()
def resolve_collections(_info):
return product_models.Collection.objects.all()
|
python
|
# Generated by Django 3.0.13 on 2021-04-01 08:17
from django.db import migrations, models
import ensembl.production.djcore.models
class Migration(migrations.Migration):
dependencies = [
('ensembl_dbcopy', '0005_targethostgroup'),
]
operations = [
migrations.AlterField(
model_name='requestjob',
name='convert_innodb',
field=models.BooleanField(default=False, verbose_name='Convert Innodb=>MyISAM'),
),
migrations.AlterField(
model_name='requestjob',
name='dry_run',
field=models.BooleanField(default=False, verbose_name='Dry Run'),
),
migrations.AlterField(
model_name='requestjob',
name='email_list',
field=models.TextField(blank=True, max_length=2048, null=True, verbose_name='Notify Email(s)'),
),
migrations.AlterField(
model_name='requestjob',
name='end_date',
field=models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Ended on'),
),
migrations.AlterField(
model_name='requestjob',
name='request_date',
field=models.DateTimeField(auto_now_add=True, verbose_name='Submitted on'),
),
migrations.AlterField(
model_name='requestjob',
name='skip_optimize',
field=models.BooleanField(default=False, verbose_name='Optimize on target'),
),
migrations.AlterField(
model_name='requestjob',
name='src_host',
field=models.TextField(max_length=2048, verbose_name='Source Host'),
),
migrations.AlterField(
model_name='requestjob',
name='src_incl_db',
field=ensembl.production.djcore.models.NullTextField(blank=True, max_length=2048, null=True,
verbose_name='Included Db(s)'),
),
migrations.AlterField(
model_name='requestjob',
name='src_incl_tables',
field=ensembl.production.djcore.models.NullTextField(blank=True, max_length=2048, null=True,
verbose_name='Included Table(s)'),
),
migrations.AlterField(
model_name='requestjob',
name='src_skip_db',
field=ensembl.production.djcore.models.NullTextField(blank=True, max_length=2048, null=True,
verbose_name='Skipped Db(s)'),
),
migrations.AlterField(
model_name='requestjob',
name='src_skip_tables',
field=ensembl.production.djcore.models.NullTextField(blank=True, max_length=2048, null=True,
verbose_name='Skipped Table(s)'),
),
migrations.AlterField(
model_name='requestjob',
name='start_date',
field=models.DateTimeField(blank=True, editable=False, null=True, verbose_name='Started on'),
),
migrations.AlterField(
model_name='requestjob',
name='status',
field=models.CharField(blank=True, editable=False, max_length=20, null=True, verbose_name='Status'),
),
migrations.AlterField(
model_name='requestjob',
name='tgt_db_name',
field=ensembl.production.djcore.models.NullTextField(blank=True, max_length=2048, null=True,
verbose_name='Target DbName(s)'),
),
migrations.AlterField(
model_name='requestjob',
name='tgt_host',
field=models.TextField(max_length=2048, verbose_name='Target Host(s)'),
),
migrations.AlterField(
model_name='requestjob',
name='user',
field=models.CharField(blank=True, max_length=64, null=True, verbose_name='Submitter'),
),
migrations.AlterField(
model_name='requestjob',
name='wipe_target',
field=models.BooleanField(default=False, verbose_name='Wipe target'),
),
migrations.AlterModelTable(
name='group',
table='host_group',
),
]
|
python
|
from __future__ import absolute_import
import logging
# App must be initialized before models or ADDONS_AVAILABLE are available
from website.app import init_app
init_app()
from osf.models import OSFUser, AbstractNode
from framework.database import paginated
from scripts.analytics.base import SnapshotAnalytics
from website.settings import ADDONS_AVAILABLE
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
# Modified from scripts/analytics/benchmarks.py
def get_enabled_authorized_linked(user_settings_list, has_external_account, short_name):
""" Gather the number of users who have at least one node in each of the stages for an addon
:param user_settings_list: list of user_settings for a particualr addon
:param has_external_account: where addon is derrived from, determines method to load node settings
:param short_name: short name of addon to get correct node_settings
:return: dict with number of users that have at least one project at each stage
"""
from addons.forward.models import NodeSettings as ForwardNodeSettings
num_enabled = 0 # of users w/ 1+ addon account connected
num_authorized = 0 # of users w/ 1+ addon account connected to 1+ node
num_linked = 0 # of users w/ 1+ addon account connected to 1+ node and configured
# osfstorage and wiki don't have user_settings, so always assume they're enabled, authorized, linked
if short_name == 'osfstorage' or short_name == 'wiki':
num_enabled = num_authorized = num_linked = OSFUser.objects.filter(
is_registered=True,
password__isnull=False,
merged_by__isnull=True,
date_disabled__isnull=True,
date_confirmed__isnull=False
).count()
elif short_name == 'forward':
num_enabled = num_authorized = ForwardNodeSettings.objects.count()
num_linked = ForwardNodeSettings.objects.filter(url__isnull=False).count()
else:
for user_settings in paginated(user_settings_list):
node_settings_list = []
if has_external_account:
if user_settings.has_auth:
num_enabled += 1
node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.oauth_grants.keys()]
else:
num_enabled += 1
node_settings_list = [AbstractNode.load(guid).get_addon(short_name) for guid in user_settings.nodes_authorized]
if any([ns.has_auth for ns in node_settings_list if ns]):
num_authorized += 1
if any([(ns.complete and ns.configured) for ns in node_settings_list if ns]):
num_linked += 1
return {
'enabled': num_enabled,
'authorized': num_authorized,
'linked': num_linked
}
class AddonSnapshot(SnapshotAnalytics):
@property
def collection_name(self):
return 'addon_snapshot'
def get_events(self, date=None):
super(AddonSnapshot, self).get_events(date)
counts = []
addons_available = {k: v for k, v in [(addon.short_name, addon) for addon in ADDONS_AVAILABLE]}
for short_name, addon in addons_available.items():
has_external_account = hasattr(addon.models.get('nodesettings'), 'external_account')
connected_count = 0
deleted_count = 0
disconnected_count = 0
node_settings_model = addon.models.get('nodesettings')
if node_settings_model:
for node_settings in paginated(node_settings_model):
if node_settings.owner and not node_settings.owner.all_tags.filter(name='old_node_collection', system=True).exists():
connected_count += 1
deleted_count = addon.models['nodesettings'].objects.filter(deleted=True).count() if addon.models.get('nodesettings') else 0
if has_external_account:
disconnected_count = addon.models['nodesettings'].objects.filter(external_account__isnull=True, is_deleted=False).count() if addon.models.get('nodesettings') else 0
else:
if addon.models.get('nodesettings'):
for nsm in addon.models['nodesettings'].objects.filter(deleted=False):
if nsm.configured and not nsm.complete:
disconnected_count += 1
total = connected_count + deleted_count + disconnected_count
usage_counts = get_enabled_authorized_linked(addon.models.get('usersettings'), has_external_account, addon.short_name)
counts.append({
'provider': {
'name': short_name
},
'users': usage_counts,
'nodes': {
'total': total,
'connected': connected_count,
'deleted': deleted_count,
'disconnected': disconnected_count
}
})
logger.info(
'{} counted. Users with a linked node: {}, Total connected nodes: {}.'.format(
addon.short_name,
usage_counts['linked'],
total
)
)
return counts
def get_class():
return AddonSnapshot
if __name__ == '__main__':
addon_snapshot = AddonSnapshot()
events = addon_snapshot.get_events()
addon_snapshot.send_events(events)
|
python
|
#!/usr/bin/env python3
#class dedicated to archival functions for dnmt data
import re
import sys
import subprocess,platform,os,time,datetime,zipfile
import difflib
import pickle
import collections
# import pysvn
# import git
#3rd party imports
#local subroutine import
from DNMT.procedure.subroutines import SubRoutines
class Archivist:
def __init__(self, cmdargs, config):
# initialize values
self.log_array = []
self.cmdargs = cmdargs
self.config = config
self.subs = SubRoutines(cmdargs, config)
self.config.logpath = os.path.join(os.path.expanduser(self.config.logpath), "logs", "UpgradeCheck",
datetime.date.today().strftime('%Y%m%d'))
def basic_maintenance(self,maxfiles):
#
self.subs.verbose_printer("##### Cleaning up backup files #####")
#Remove oldest files (listed first on windows
filelist = os.listdir(os.path.join(self.subs.log_path, "activitycheck", "backups"))
if len(filelist) > 0 and len(filelist) > maxfiles:
# self.subs.verbose_printer("##### unsorted list:{} #####".format(filelist))
sortedfilelist = sorted(filelist)
# self.subs.verbose_printer("##### sorted list:{} #####".format(testlist))
filestoremove = sortedfilelist[0:(len(filelist)-maxfiles)]
self.subs.custom_printer("verbose", "total files:{}\nremoving files:{}".format(len(filelist),len(filestoremove)))
for file in filestoremove:
if file.endswith("-SwitchStatus.Backup.zip"):
# process
try:
self.subs.verbose_printer("##### File to remove:{} #####".format(file))
if 'check' in self.cmdargs and self.cmdargs.check is True :
self.subs.custom_printer("debug", "## DBG - testing, would have removed {} ##".format(file))
else:
self.subs.custom_printer("debug", "## Removing file {} ##".format(file))
os.remove(os.path.join(self.subs.log_path, "activitycheck", "backups", file))
except Exception as err: # currently a catch all to stop linux from having a conniption when reloading
print("FILE ERROR {}:{}".format(file, err.args[0]))
else:
self.subs.verbose_printer("total files:{} are less than max value:{}".format(len(filelist), maxfiles))
def basic_archival(self):
try:
working_folder = os.path.join(self.subs.log_path, "activitycheck", "rawfiles", "legacy")
zipfile_name = os.path.join(self.subs.log_path, "activitycheck", "backups",
"{}-SwitchStatus.Backup.zip".format(
datetime.datetime.now().strftime("%Y%m%d%H%M")))
files = os.listdir(working_folder)
files_py = files
# zipfile_name = "SwitchStatus Backup {}.zip".format(datetime.datetime.now().strftime("%Y%m%d%H%M"))
#check for existance of the directory (if a first run)
if not os.path.exists(os.path.join(self.subs.log_path, "activitycheck", "backups")):
self.subs.custom_printer("debug", "## DBG - Creating activitycheck/backups directory ##")
os.makedirs(os.path.join(self.subs.log_path, "activitycheck", "backups"))
ZipFile = zipfile.ZipFile(zipfile_name, "a")
self.subs.custom_printer("debug", "## DBG - adding files to backup zipfile:{} ##".format(zipfile_name))
for a in files_py:
full_file_path = os.path.join(working_folder,a)
# ZipFile.write(full_file_path, compress_type=zipfile.ZIP_DEFLATED)
ZipFile.write(full_file_path,a, compress_type=zipfile.ZIP_DEFLATED)
ZipFile.close()
self.subs.custom_printer("debug", "## DBG - zipfile backup created ##")
if 'email' in self.cmdargs and self.cmdargs.email is not None:
msg_subject = "SwitchStatus Backup {}".format(datetime.date.today().strftime('%Y-%m-%d'))
body = "Attached is the Legacy Backup files"
self.subs.custom_printer("debug", "## DBG - sending email ##")
self.subs.email_with_attachment(msg_subject, self.cmdargs.email, body, zipfile_name)
if 'remove' in self.cmdargs and self.cmdargs.remove:
if os.path.exists("{}".format(zipfile_name)):
os.remove("{}".format(zipfile_name))
self.subs.custom_printer("debug", "## DBG - zipfile {} removed ##".format(zipfile_name))
else:
print("The file does not exist")
if 'maintenance' in self.cmdargs and self.cmdargs.maintenance is not None:
try:
self.basic_maintenance(int(self.cmdargs.maintenance))
except ValueError:
self.subs.custom_printer("debug", "## DBG - maintenance({}) is not a number. maintenance not performed ##".format(self.cmdargs.maintenance))
except Exception as err:
print(err)
def test(self):
try:
# write a file foo.txt
pass
# repo = Repo(self.rorepo.working_tree_dir)
# assert not repo.bare
except Exception as err:
print(err)
|
python
|
# Django
from django.db import models
class UserAbstractModel(models.Model):
""" Modelo basico abstracto.
UserAbstractModel es una clase abstracta de la que heredan
todos los modelos de User de la API. Esta clase provee
los siguientes atributos:
+ created (DateTime): Almacena la fecha de creacion
+ modified (DateTime): Almacena la fecha de modificacion
+ is_active (Boolean): Si esta activo el valor o no
"""
created = models.DateTimeField(
"created at",
auto_now_add=True,
help_text="Date Time de la creacion del objeto"
)
modified = models.DateTimeField(
"modified at",
auto_now=True,
help_text="Date Time de la ultima modificacion del objeto"
)
is_active = models.BooleanField(
"is active",
default=True,
blank=True,
help_text="La fila esta activa o no"
)
class Meta:
""" Opciones del meta """
abstract = True
get_latest_by = "created"
ordering = ["-created", "modified"]
class FunctionAbstractModel(models.Model):
""" Modelo basico abstracto.
FunctionAbstractModel es una clase abstracta de la que heredan
todos los modelos de Management de la API. Esta clase provee
los siguientes atributos:
+ created (DateTime): Almacena la fecha de creacion
+ modified (DateTime): Almacena la fecha de modificacion
+ is_active (Boolean): Si esta activo el valor o no
+ user (Foreing Key): Almacena el usuario a quien pertenece
"""
created = models.DateTimeField(
"created at",
auto_now_add=True,
help_text="Date Time de la creacion del objeto"
)
modified = models.DateTimeField(
"modified at",
auto_now=True,
help_text="Date Time de la ultima modificacion del objeto"
)
is_active = models.BooleanField(
"is active",
default=True,
blank=True,
help_text="La fila esta activa o no"
)
user = models.ForeignKey("users.User", on_delete=models.CASCADE)
class Meta:
""" Opciones del meta """
abstract = True
get_latest_by = "created"
ordering = ["-created", "modified"]
|
python
|
from time import perf_counter
def timer(func):
def wrapper(*args, **kwargs):
start_t = perf_counter()
r_val = func(*args, **kwargs)
end_t = perf_counter()
elapsed = end_t - start_t
print(f"{func.__name__} took time: {elapsed} seconds, {elapsed/60} minutes")
return r_val
return wrapper
|
python
|
from django.shortcuts import render, get_object_or_404
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from .models import MetodPage
def metod(request):
metods = MetodPage.objects.all().order_by('title')
paginator = Paginator(metods, 10)
page = request.GET.get('page')
try:
metod = paginator.page(page)
except PageNotAnInteger:
metod = paginator.page(1)
except EmptyPage:
metod = paginator.page(paginator.num_pages)
return render(request, 'metodics/metod.html', {'page': page, 'metods': metod})
|
python
|
import itertools
def subpaths_for_path_range(path_range, hardening_chars="'pH"):
"""
Return an iterator of paths
# examples:
# 0/1H/0-4 => ['0/1H/0', '0/1H/1', '0/1H/2', '0/1H/3', '0/1H/4']
# 0/2,5,9-11 => ['0/2', '0/5', '0/9', '0/10', '0/11']
# 3H/2/5/15-20p => ['3H/2/5/15p', '3H/2/5/16p', '3H/2/5/17p', '3H/2/5/18p',
# '3H/2/5/19p', '3H/2/5/20p']
# 5-6/7-8p,15/1-2 => ['5/7H/1', '5/7H/2', '5/8H/1', '5/8H/2',
# '5/15/1', '5/15/2', '6/7H/1', '6/7H/2', '6/8H/1', '6/8H/2', '6/15/1', '6/15/2']
"""
if path_range == '':
yield ''
return
def range_iterator(the_range):
for r in the_range.split(","):
is_hardened = r[-1] in hardening_chars
hardened_char = hardening_chars[-1] if is_hardened else ''
if is_hardened:
r = r[:-1]
if '-' in r:
low, high = [int(x) for x in r.split("-", 1)]
for t in range(low, high+1):
yield "%d%s" % (t, hardened_char)
else:
yield "%s%s" % (r, hardened_char)
components = path_range.split("/")
iterators = [range_iterator(c) for c in components]
for v in itertools.product(*iterators):
yield '/'.join(v)
"""
The MIT License (MIT)
Copyright (c) 2013 by Richard Kiss
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
|
python
|
#!/usr/bin/env python3
"""
Play Nerdle (https://nerdlegame.com)
"""
import json
import logging
from itertools import product
from os import mkdir
from os.path import dirname, join, realpath
from typing import Any, Iterator, List, Optional, Tuple
from .exceptions import CorrectAnswer, OutOfEquations, OutOfGuesses
class NerdleSolver:
"""
Encapsulates the logic of Nerdle (https://nerdlegame.com)
"""
# I like using short variable names in my methods. And I really
# don't care if I burn a few hundred microseconds interpolating a
# string I'm not going to emit.
# pylint: disable="invalid-name"
# pylint: disable="logging-not-lazy"
# pylint: disable="logging-fstring-interpolation"
# pylint: disable="too-many-instance-attributes"
def __init__(
self,
answer: str = "",
debug: bool = False,
expression_length: int = 8,
guesses: int = 6,
initial_guess: str = "",
top: int = 5,
expr_file: str = "",
): # pylint: disable = "too-many-arguments"
# pylint: disable = "too-many-statements"
self.debug = debug
self.log = logging.getLogger(__name__)
level = logging.INFO
if self.debug:
level = logging.DEBUG
logging.basicConfig(level=level)
self.log.setLevel(level)
self.valid_guess: str = ""
self.guess: str = initial_guess
self.answer: str = answer
self.expr_value: int = -1
self.expression_length: int = expression_length
self.current_pattern: str = ""
self.guesses: int = 1
self.max_guesses: int = guesses
self.top: int = top
self.known_character_positions: str = "X" * self.expression_length
self.position_could_be: List(set[str]) = []
self.legal_chars: str = "0123456789+-*/="
# pylint: disable="consider-using-set-comprehension"
# pylint: disable="unnecessary-comprehension"
for _ in range(self.expression_length):
self.position_could_be.append(set([c for c in self.legal_chars]))
self.not_in_expr: set[str] = set()
self.in_expr: set[str] = set()
self.guesses_tried: list[str] = []
# in self._expr_by_str, a value of None means the expression does not
# parse. This lets us cache failed parses as well.
self._expr_by_str: dict[str, Optional[int]] = {}
self._expr_by_val: dict[int, List[str]] = {}
# We only actually use them, though, if we have to build our own
# list of valid equations
self._valid_equations: dict[str, int] = {}
expr_loaded = False
if not expr_file:
datadir = realpath(join(dirname(__file__), "static"))
expr_file = f"{datadir}/equations-{self.expression_length}.json"
try:
with open(expr_file, "r", encoding="utf-8") as f:
_exprs = json.load(f)
self._valid_equations = _exprs
expr_loaded = True
except Exception as exc: # pylint: disable="broad-except"
self.log.debug(f"Failed to read expr file {expr_file}: {exc}")
self.log.debug("Calculating legal expressions")
self.generate_legal_expressions()
if not expr_loaded:
try:
self.log.debug("Writing expression cache")
try:
mkdir(datadir)
except FileExistsError:
pass
with open(expr_file, "w", encoding="utf-8") as f:
json.dump(self._valid_equations, f)
except Exception as exc: # pylint: disable="broad-except"
self.log.debug(f"Failed to write expr file {expr_file}: {exc}")
self.remaining_possibilities: list[str] = list(
self._valid_equations.keys()
)
self.remaining_possibilities.sort() # Initial lexicographic sort
self._rp_hash = {}
for rp in self.remaining_possibilities:
# looking up a hash entry is constant-time. Looking up in a
# list is linear. Validating our equations relies on that
# lookup. This actually makes an obvious difference when iterating
# through all equations.
self._rp_hash[rp] = True
self.sort_remainder()
if not self.guess:
self.log.debug(
"Best initial guesses:"
+ f"{self.remaining_possibilities[:self.top]}"
)
def play(self) -> None:
"""
Main loop of the game. If we get the right answer, it will raise
CorrectAnswer; if we run out of guesses, OutOfGuesses. We catch
the first, report, and return. We let the second propagate.
"""
while True:
self.log.info(f"{self.guesses}/{self.max_guesses}")
try:
self.loop_once()
except CorrectAnswer as exc:
self.log.info(
f"Correct answer: '{exc}' in {self.guesses} guesses"
)
return
def loop_once(self) -> None:
"""
Single pass through the loop
"""
self.choose_or_show_next_guess()
self.get_current_guess()
self.update_pattern()
self.guess = ""
self.valid_guess = ""
self.restrict_possibilities()
self.sort_remainder()
def get_current_guess(self) -> None:
"""
Get and check a guess
"""
while not self.valid_guess:
self.solicit_current_guess()
self.check_current_guess()
def solicit_current_guess(self) -> None:
"""
Interactively get a guess
"""
if not self.guess: # Initial guess will do, if we have one.
self.guess = input("Guess expression > ")
def check_current_guess(self) -> None:
"""
Is the supplied guess valid?
"""
try:
self.validate_guess(self.guess)
self.valid_guess = self.guess
except ValueError as exc:
self.log.warning(f"'{self.guess}' failed: {exc}")
self.guess = ""
self.expr_value = -1
def parse_expr(self, expr: str) -> int:
"""
This is the central feature. Take a string consisting of digits and
operators (excluding '='), and try to reduce it to an integer.
If it succeeds, it stores the result in the self._expr_by_str cache,
and if it fails, it stores None in that cache.
"""
# Yes, it's a little complicated.
#
# pylint: disable = "too-many-statements"
# pylint: disable = "too-many-branches"
if expr in self._expr_by_str:
if self._expr_by_str[expr] is None:
raise ValueError(f"'{expr}' is known not to parse")
return self._expr_by_str[expr]
ttok = []
curstr = ""
for c in expr:
if c.isdigit():
curstr += c
else:
try:
# This will catch both leading zeroes and repeated
# operators
ttok.append(self.check_is_valid_number(curstr))
except ValueError:
# Mark as invalid
self.store_expr(expr, None)
raise
curstr = ""
ttok.append(c)
if curstr:
# The last token was a number
ttok.append(self.check_is_valid_number(curstr))
# Now ttok contains alternating ints and strings representing
# operations
while True:
if len(ttok) == 1:
if ttok[0] < 0:
raise ValueError("Only non-negative results allowed")
if ttok[0] != int(ttok[0]):
raise ValueError("Only integer expressions allowed")
ttok = [int(ttok[0])]
break
for idx, tok in enumerate(ttok):
if isinstance(tok, int) or isinstance(tok, float):
continue
if tok in ("*", "/"): # high-priority operator
if tok == "/":
# Division by zero can't happen without constant
# zero terms, because subtraction is lower-priority.
#
# However: we CAN have fractional terms, as long
# as they become integers by the time we have finished
# computing the expression.
result = ttok[idx - 1] / ttok[idx + 1]
else:
result = ttok[idx - 1] * ttok[idx + 1]
else:
if "*" in ttok or "/" in ttok:
# We can't parse low-priority operators until
# we have exhausted the high-priority operators.
continue
if tok == "+":
result = ttok[idx - 1] + ttok[idx + 1]
else:
result = ttok[idx - 1] - ttok[idx + 1]
# Replace the numbers on either side of the operator,
# and the operator itself, with the result. Restart
# parsing ttok.
first = []
last = []
if idx > 2:
first = ttok[: idx - 1]
if len(ttok) > idx + 1:
last = ttok[idx + 2 :]
ttok = first + [result] + last
break # From the inner for, not the 'while True'
lhs = ttok[0]
return lhs
def validate_guess(self, guess) -> None:
"""
Only returns if guess is plausible; raises ValueError otherwise
"""
if guess not in self._rp_hash:
raise ValueError(f"'{guess}' is not in remaining_possibilities.")
chars_in_guess = set(guess)
if chars_in_guess < self.in_expr:
raise ValueError(
f"{self.in_expr} are all in the expression, but "
+ f"{self.guess} only has {chars_in_guess}"
)
for idx, c in enumerate(guess):
if c in self.not_in_expr:
raise ValueError(f"'{c}' is known to not be in expression")
if c not in self.position_could_be[idx]:
raise ValueError(
f"'{c}' cannot be in position {idx}: "
+ f"not one of {self.position_could_be[idx]}"
)
# Well, it *could* be right.
def update_pattern(self) -> None:
"""
If we know the answer, figure out the pattern; if not, request it
from the user (who's presumably getting it from the game)
"""
if self.answer:
self.calculate_pattern()
else:
self.solicit_pattern()
if self.current_pattern == "!" * self.expression_length:
raise CorrectAnswer(self.guess)
self.guesses += 1
if self.guesses > self.max_guesses:
raise OutOfGuesses()
self.update_positions()
def calculate_pattern(self) -> None:
"""
If we know the answer, generate the response pattern
"""
pattern = ""
assert self.answer, "Cannot calculate pattern without the answer"
for idx, c in enumerate(self.valid_guess):
self.log.debug(f"considering '{c}' in position {idx}")
p = self.answer[idx]
if c == p:
self.log.debug(f"'{c}' is in position {idx}")
pattern += "!"
elif c not in self.answer:
self.log.debug(f"'{c}' does not appear in expression")
pattern += "."
else:
self.log.debug(
f"'{c}' appears in expression, but not in position {idx}"
)
pattern += "D"
# Just like update_positions, we do a second pass to catch multiples
# where we already have them all
for idx, c in enumerate(self.valid_guess):
if pattern[idx] != "D":
continue
actual_count = self.answer.count(c)
# How many do we have that we know where they are?
# There's gotta be a better way to do this, but let's get it
# working first.
pattern_count = 0
for a_idx, a_c in enumerate(self.answer):
if a_c == c:
if pattern[a_idx] == "!":
pattern_count += 1
assert pattern_count <= actual_count, f"Overcount of '{c}'"
# This might not be stable.
pattern_char = "?" # Default: we don't know where they all are
if pattern_count == actual_count:
self.log.debug(f"Already found all occurrences of '{c}'")
pattern_char = "."
else:
self.log.debug(f"'{c}' appears but position unknown")
# This should just replace this "D" with a resolved "?" or "."
pattern = pattern[:idx] + pattern_char + pattern[idx + 1 :]
self.current_pattern = pattern
def solicit_pattern(self) -> None:
"""
Since we don't know the answer, ask about the pattern
"""
while True:
response = input("Response pattern > ")
if len(response) != self.expression_length:
continue
rchars = set(response)
if not rchars <= set("!?."):
self.log.debug(f"rchars {rchars}; {set('!?.')}")
continue
self.current_pattern = response
break
def update_positions(self) -> None:
"""
For each position in the expression, update the set of possible
characters
"""
self.guesses_tried.append(self.valid_guess)
for idx, c in enumerate(self.current_pattern):
g = self.valid_guess[idx]
setc = set(g)
if c == "!":
self.position_could_be[idx] = setc # Fixed in place
self.in_expr |= setc
self.log.debug(f"position {idx}: '{g}'")
continue
if c == "?":
self.position_could_be[idx] ^= setc
self.in_expr |= setc
self.log.debug(f"position {idx}: not '{g}'")
self.log.debug(f"'{g}' in expression")
# Now we start over. This catches the case of "not in word" that
# really means "it's a multiple, and you have too many, and it's
# not here" because by the time we do this, if we have any
# occurrences, they will be in self.in_expr
for idx, c in enumerate(self.current_pattern):
if c == ".":
g = self.valid_guess[idx]
setc = set(g)
self.position_could_be[idx] ^= setc
self.log.debug(f"position {idx}: not '{g}'")
if g not in self.in_expr:
self.log.debug(f"'{g}' not in expression")
self.not_in_expr |= setc
def generate_legal_expressions(self):
"""
If we did not have an expression file to load, generate legal
equations. This takes a while to run.
"""
eqn: dict[str, bool] = {}
e_l = self.expression_length
equals_position = [e_l - 3, e_l - 2] # Two-digit answers, then one.
if e_l > 6:
for i in range(e_l - 3, 3, -1): # Then longer answers
equals_position.append(i)
# '=' cannot be farther to the left than the fourth character, because
# the first three (at least) must be a OPR b . Since the string length
# is even, both sides cannot just be numbers, and the right hand side
# has to be a non-negative integer without a leading zero (unless it
# is just zero), so the equal sign can't be at the end.
#
# This is dumb, but what we are going to do is brute-force the solution
# space, with the equals sign in the above place in each place in the
# sequence based on my intuition that that the given sequence
# represents the sequence of most likely places for it.
#
for eqp in equals_position:
for exp_tuple in self.generate_expressions(eqp):
q = "".join(exp_tuple)
try:
_ = int(q)
continue
# It's an integer constant. It evaluates to itself,
# and it is not worth storing.
except ValueError:
pass
try:
lhs = self.parse_expr(q)
eqn = f"{q}={lhs}"
self.store_expr(q, lhs)
except ValueError as exc:
self.log.debug(f"{q} does not parse: {exc}")
self.store_expr(q, None)
continue
# Mark the equation as true
self.store_expr(eqn, lhs)
# Well, it's true, buuuuut...not valid by our rules.
# So we don't store it as a valid equation.
#
# The LHS *is* permitted to be a lone zero.
#
if len(eqn) == self.expression_length:
self._valid_equations[eqn] = lhs
# I thought about storing all the equations that evaluated
# to invalid answers, but it takes a lot of memory for
# not much gain.
def store_expr(self, expr: str, val: Optional[int]):
"""
Determining whether an expression has a legal evaluation is
expensive, so we build a cache so we only evaluate each expression
once.
"""
if expr in self._expr_by_str:
oldval = self._expr_by_str[expr]
if oldval == val:
return
raise ValueError(f"Does '{expr}' evaluate to {oldval} or {val}?")
try:
# There's no point in storing integer constants: testing equality
# is faster than looking up the map and then testing equality.
_ = int(expr)
return
except ValueError:
pass
self._expr_by_str[expr] = val
if val is None:
return
self.log.debug(f"Stored '{expr}' -> {val}")
if val not in self._expr_by_val:
self._expr_by_val[val] = []
if expr not in self._expr_by_val[val]:
self._expr_by_val[val].append(expr)
def generate_expressions(self, e_l: int) -> Iterator[Tuple[Any, ...]]:
"""
Generate all expressions of length e_l. Returns an iterator so we
can chew through, and cache, all the ones that parse to an integer
value.
"""
legal_rhs_chars = set("=") ^ set(self.legal_chars)
digits = set("+-*/") ^ set(legal_rhs_chars)
assert e_l > 2, "expression length must be at least 3"
assert (
e_l < self.expression_length - 1
), f"expression length must be at most {self.expression_length - 2}"
# We know the first and last character are digits
exp_args = [digits]
for _ in range(e_l - 2):
exp_args.append(legal_rhs_chars)
exp_args.append(digits)
expr = product(*exp_args) # itertools is awesome
return expr
def check_is_valid_number(self, n: str) -> int:
"""
Check whether a string is a valid-by-Nerdle-rules number: return
the corresponding int if so.
"""
# It's prettier this way.
# pylint: disable="no-self-use"
if not n:
raise ValueError("The empty string is not a number")
for c in n:
if not c.isdigit():
raise ValueError("numbers are made of digits")
if len(n) > 1:
if n[0] == "0":
raise ValueError(
"Leading zeroes on multi-digit numbers are not allowed"
)
if n == "0":
raise ValueError("Lone zeroes are not allowed")
i_n = int(n)
return i_n
def restrict_possibilities(self) -> None:
"""
Iterate through our remaining valid equations, eliminating the ones
that don't fit the observed facts.
"""
remainder = []
rl = len(self.remaining_possibilities)
for s in self.remaining_possibilities:
try:
self.validate_guess(s)
remainder.append(s)
except ValueError as exc:
self.log.debug(f"'{s}' is eliminated: '{exc}'")
rr = len(remainder)
if rr == 0:
raise OutOfEquations("No possible valid equations remain")
self.log.debug(f"{rl - rr} equations eliminated: {rr} remain")
remainder.sort() # Having a stable order makes testing easier
self.remaining_possibilities = remainder
self._rp_hash = {}
for rp in self.remaining_possibilities:
self._rp_hash[rp] = True
def sort_remainder(self) -> None:
"""
Return the "best" remaining possibilities, for some metric of "best"
"""
# No idea what the best strategy here is. Let's pick the ones with
# the most unconfirmed characters? (Eliminated characters were
# eliminated from remaining_possibilities already)
#
# So, in order: most unconfirmed characters, most characters,
# mixing and matching from operator precedence, and finally we
# inherit from the lexigraphic sort.
self.remaining_possibilities.sort(
key=lambda e: (
len(set(e) - self.in_expr),
len(set(e)),
),
reverse=True,
)
def choose_or_show_next_guess(self) -> None:
"""
We have a sorted list of remaining guesses. If we know the answer,
pick the top one. If we don't, display some to the user to prompt
the next guess.
"""
if self.answer:
if self.guesses == 1:
if self.guess:
self.log.debug(f"Using initial guess '{self.guess}'")
if self.guess in self.remaining_possibilities:
self.valid_guess = self.guess
return
self.log.debug(f"Guess '{self.guess}' is invalid")
self.log.debug("Choosing best guess")
self.guess = self.remaining_possibilities[0]
self.valid_guess = self.guess
return
if not self.guess:
best = self.remaining_possibilities[: self.top]
print(f"Best remaining possibilities: {', '.join(best)}")
|
python
|
from dbt.contracts.graph.manifest import Manifest
import os
from test.integration.base import DBTIntegrationTest, use_profile
def get_manifest():
path = './target/partial_parse.msgpack'
if os.path.exists(path):
with open(path, 'rb') as fp:
manifest_mp = fp.read()
manifest: Manifest = Manifest.from_msgpack(manifest_mp)
return manifest
else:
return None
class TestAllExperimentalParser(DBTIntegrationTest):
@property
def schema(self):
return "072_experimental_parser"
@property
def models(self):
return "models"
@use_profile('postgres')
def test_postgres_experimental_parser(self):
results = self.run_dbt(['--use-experimental-parser', 'parse'])
manifest = get_manifest()
node = manifest.nodes['model.test.model_a']
self.assertEqual(node.refs, [['model_a']])
self.assertEqual(node.sources, [['my_src', 'my_tbl']])
self.assertEqual(node.config._extra, {'x': True})
self.assertEqual(node.config.tags, ['hello', 'world'])
|
python
|
import time
from base.common.skeleton_base import SkeletonBase
from base.constants import DEFAULT_BEFORE_EXPIRES
from base.exceptions import ChannelTemplateNotFound
from base.helpers import validate_channel
from base.utils import format_response
from typing import Dict
from .router import *
from .webhook import *
from .token_refresher import TokenRefresherManager
class SkeletonDevice(SkeletonBase):
def __init__(self, mqtt=None):
super(SkeletonDevice, self).__init__(mqtt)
# self.DEFAULT_BEFORE_EXPIRES = DEFAULT_BEFORE_EXPIRES
self.before_expires = settings.config_refresh.get('before_expires_seconds', DEFAULT_BEFORE_EXPIRES)
@property
def _swap_url(self) -> str:
server = settings.api_server
version = settings.api_version
client_id = settings.client_id
url = '{}/{}/managers/{}/swap-credentials'.format(server, version, client_id)
return url
@staticmethod
def _credentials_dict(credentials, sender):
credentials_dict = {
'key': sender['key'],
'value': credentials
}
return credentials_dict
def swap_credentials(self, credentials, sender, token_key='access_token') -> Dict:
url = self._swap_url
credentials = self.auth_response(credentials) or {}
if credentials:
payload = {
"client_id": sender.get('client_id', credentials.get('client_id', '')),
"owner_id": sender.get('owner_id', ''),
"credentials": {
token_key: credentials.get(token_key, '')
}
}
response = requests.request('POST', url, headers=self.header, json=payload)
else:
logger.warning("[swap_credentials] Credentials not sent")
return {}
if response and response.status_code == 200:
return response.json()
else:
payload.pop('credentials', None)
self.log(f'Error on request swap credentials. Status code: {response.status_code}; URL: {url}; '
f'Payload: {payload}', 3)
return {}
def check_manager_client_id(self, owner_id, channel_id, main_credentials, second_credentials=None):
"""
Check if credentials has manager_client_id. Update credentials calling swap credentials if not
"""
second_credentials = second_credentials or {}
credentials = self.auth_response(main_credentials)
has_error = False
if 'client_man_id' not in credentials:
sender = {
'client_id': credentials.get('client_id'),
'owner_id': owner_id,
'key': f"credential-owners/{owner_id}/channels/{channel_id}"
}
logger.debug(f"[check_manager_client_id] Will try to swap credentials for sender: {sender}")
swap_credentials = self.swap_credentials(credentials, sender)
if swap_credentials:
credentials['client_man_id'] = swap_credentials.get('client_id')
else:
logger.warning("[check_manager_client_id] Invalid swap credentials return with main credentials")
second_credentials = self.auth_response(second_credentials)
swap_credentials = self.swap_credentials(second_credentials, sender)
if swap_credentials:
credentials['client_man_id'] = swap_credentials.get('client_id')
else:
logger.warning("[check_manager_client_id] Invalid swap credentials return with secondary credentials")
has_error = True
return credentials, has_error
def auth_requests(self, sender):
"""
*** MANDATORY ***
Receives,
sender - A dictionary with keys 'channel_template_id', 'owner_id' and 'client_id'.
Returns a list of dictionaries with the structure,
[
{
"method" : "<get/post>"
"url" : "<manufacturer's authorize API uri and parameters>"
"headers" : {}
},
...
]
If the value of headers is {} for empty header, otherwise it follows the structure as of the
sample given below.
"headers" : {
"Accept": "application/json",
"Authorization": "Bearer {client_secret}"
}
Each dictionary in list represent an individual request to be made to manufacturer's API and
its position denotes the order of request.
"""
return NotImplemented
def get_devices(self, sender, credentials):
"""
*** MANDATORY ***
Receives,
credentials - All persisted user credentials.
sender - A dictionary with keys 'channel_template_id', 'owner_id' and 'client_id'.
Returns a list of dictionaries with the following structure ,
[
{
"content" : "<device name>",
"id" : "<manufacturer's device ID>",
"photoUrl" : "<url to device's image in cdn.muzzley.com>"
},
...
]
Each dictionary in list denotes a device of user.
"""
return NotImplemented
def update_channel_template(self, device_id):
"""
This method is used to return a channel_template other than the one sent in request on select_devices
:param device_id: Dict of device characteristcs
:return: new_channel_id or None
"""
return None
def did_pair_devices(self, credentials, sender, paired_devices, channels):
"""
*** MANDATORY ***
Invoked after successful device pairing.
Receives,
credentials - All persisted user credentials.
sender - A dictionary:
{'channel_template_id': xxxx-xxxxx-xxxxx-xxxx,
'owner_id': xxxx-xxxxx-xxxxx-xxxx,
'client_id': xxxx-xxxxx-xxxxx-xxxx}
paired_devices - A list of dictionaries with selected device's data
channels - A list of channels_id from paired_devices
"""
return NotImplemented
def access_check(self, mode, case, credentials, sender):
"""
*** MANDATORY ***
Checks for access to manufacture for a component, replace if requires a different process
Receives,
mode - 'r' or 'w'
r - read from manufacturer's API
w - write to manufacturer's API
case - A dictionary with keys 'device_id','channel_id','component' and 'property'.
credentials - credentials of user from database
sender - A dictionary with keys 'owner_id' and
'client_id'.
Returns updated valid credentials or current one or None if no access
"""
try:
now = int(time.time())
expiration_date = credentials['expiration_date']
if 'key' in sender:
if now >= expiration_date: # we should refresh the token
self.log('[access_check] token is expired trying to refresh {}'.format(sender['key']), 7)
credentials_dict = self._credentials_dict(credentials, sender)
credentials = self.refresh_token(credentials_dict)
return credentials
except KeyError as e:
self.log('Error: missing {} key'.format(e), 4)
except Exception:
self.log('Unexpected error {}'.format(traceback.format_exc(limit=5)), 3)
self.log(f'Missing info in access_check: \nsender: {sender} \ncase:{case}', 9)
return None
def polling(self, data):
"""
Invoked by the manager itself when performing a polling request to manufacturer's API
Receives,
data - A dictionary with keys 'channel_id', 'credentials' and 'response' where response is a json object
This function is in charge
"""
raise NotImplementedError('No polling handler implemented')
def get_channel_template(self, channel_id):
"""
Input :
channel_id - channel_id of the device.
Returns channel_template_id
"""
channel = validate_channel(channel_id)
return channel['channeltemplate_id'] if (channel and 'channeltemplate_id' in channel) else ''
def get_channels_by_channeltemplate(self, channeltemplate_id):
"""
Input :
channeltemplate_id - channeltemplate_id of the device.
Returns list of channels_id
"""
try:
if not channeltemplate_id:
logger.warning(f"[get_channels_by_channeltemplate] Invalid channeltemplate_id")
return ''
url = f"{settings.api_server_full}/managers/{settings.client_id}/channels?" \
f"page_size=9999&channel.channeltemplate_id={channeltemplate_id}&fields=channel.id"
resp = requests.get(url, headers=self.header)
logger.verbose("[get_channels_by_channeltemplate] Received response code[{}]".format(resp.status_code))
if int(resp.status_code) == 200:
return [client_channel.get('channel', {}).get("id") for client_channel in
resp.json().get("elements", [])]
else:
raise ChannelTemplateNotFound("Failed to retrieve channel_ids for {}".format(channeltemplate_id))
except (OSError, ChannelTemplateNotFound) as e:
logger.warning('[get_channels_by_channeltemplate] Error while making request to platform: {}'.format(e))
except Exception:
logger.alert("[get_channels_by_channeltemplate] Unexpected error: {}".format(traceback.format_exc(limit=5)))
return ''
def get_channel_by_owner(self, owner_id, channel_id):
"""
Input :
owner_id
channel_id
Returns channeltemplate_id
"""
url = "{}/users/{}/channels?channel_id={}".format(settings.api_server_full, owner_id, channel_id)
try:
resp = requests.get(url, headers=self.header)
if int(resp.status_code) == 200:
return resp.json()['elements'][0]['channel']["channeltemplate_id"]
elif int(resp.status_code) == 204: # No content
logger.verbose("[get_channel_by_owner] Received response code[{}]".format(resp.status_code))
return False
else:
logger.verbose("[get_channel_by_owner] Received response code[{}]".format(resp.status_code))
raise ChannelTemplateNotFound(f"[get_channel_by_owner] Failed to retrieve channel_template_id "
f"for {channel_id}")
except (OSError, ChannelTemplateNotFound) as e:
logger.warning('[get_channel_by_owner] Error while making request to platform: {}'.format(e))
except Exception:
logger.alert("[get_channel_by_owner] Unexpected error: {}".format(traceback.format_exc(limit=5)))
return ''
def get_device_id(self, channel_id):
"""
To retrieve device_id using channel_id
"""
return self.db.get_device_id(channel_id)
def get_channel_id(self, device_id):
"""
To retrieve channel_id using device_id
"""
return self.db.get_channel_id(device_id)
def get_polling_conf(self):
"""
Required configuration if polling is enabled
Returns a dictionary or a list of dictionaries:
{
url (required): polling manufacturer url
method (required): HTTP method to use: GET / POST
params: URL parameters to append to the URL (used by requests)
data: the body to attach to the request (used by requests)
}
"""
raise NotImplementedError('polling ENABLED but conf NOT DEFINED')
# -------------
# TOKEN REFRESH
# -------------
def get_refresh_token_conf(self):
"""
Required configuration if token refresher is enabled
Returns a dictionary
url - token refresh manufacturer url
headers - if required a dict with necessary headers
"""
raise NotImplementedError('token refresher ENABLED but conf NOT DEFINED')
def refresh_token(self, credentials_dict):
refresh_token = credentials_dict.get('value', {}).get('refresh_token', '')
refresher = TokenRefresherManager(implementer=self)
conf = self.get_refresh_token_conf()
response = refresher.send_request(refresh_token, credentials_dict, conf)
self.log('refresh_token response {}'.format(response), 7)
if type(response) is dict and 'credentials' in response:
self.after_refresh(response)
return response['credentials']
return None
def after_refresh(self, data):
"""
Invoked by the manager itself when successfully refreshing a token
Receives,
data - A dictionary with keys 'channel_id' and 'new_credentials'
+ not required +
"""
pass
def update_expiration_date(self, credentials):
now = int(time.time())
expires_in = int(credentials['expires_in']) - self.before_expires
expiration_date = now + expires_in
credentials['expiration_date'] = expiration_date
return credentials
def store_credentials(self, owner_id, client_app_id, channeltemplate_id, credentials):
try:
url = f"{settings.api_server_full}/managers/{settings.client_id}/store-credentials"
payload = {
'client_id': client_app_id,
'owner_id': owner_id,
'channeltemplate_id': channeltemplate_id,
'credentials': credentials
}
if not (client_app_id and owner_id and channeltemplate_id and credentials):
logger.warning(f'[store_credentials] Invalid payload request client_id: {client_app_id}; '
f'owner_id: {owner_id}; channeltemplate_id: {channeltemplate_id}')
return False
logger.verbose(f"[store_credentials] Try to update credentials for channeltemplate_id {channeltemplate_id}")
resp = requests.post(url, headers=self.header, json=payload)
logger.verbose(f"[store_credentials] Received response code[{resp.status_code}]")
if int(resp.status_code) == 200 and resp.json().get('n_updated'):
return True
elif int(resp.status_code) == 200 and resp.json().get('n_updated', 0) == 0:
payload.pop('credentials', None)
logger.warning(f'[store_credentials] credentials not found to patch with requested data: '
f'{payload}')
return False
else:
logger.warning(f'[store_credentials] Error while making request to platform: {format_response(resp)}')
return False
except Exception:
logger.alert(f"[store_credentials] Unexpected error store_credentials: {traceback.format_exc(limit=5)}")
return False
SkeletonBase.register(SkeletonDevice)
|
python
|
from django.forms.widgets import CheckboxSelectMultiple, RadioSelect
class RadioSelectBootstrap(RadioSelect):
template_name = "leprikon/widgets/multiple_input.html"
option_template_name = "leprikon/widgets/input_option.html"
class CheckboxSelectMultipleBootstrap(CheckboxSelectMultiple):
template_name = "leprikon/widgets/multiple_input.html"
option_template_name = "leprikon/widgets/input_option.html"
|
python
|
import pandas as pd
class LeakageInspector:
def __init__(self, df1, df2, patient_col):
"""
Args:
df1 (dataframe): dataframe describing first dataset
df2 (dataframe): dataframe describing second dataset
patient_col (str): string name of column with patient IDs
"""
self.df1 = df1
self.df2 = df2
self.patient_col = patient_col
def check_for_leakage(self):
"""
Checks for leakage in patient data if same patient crosses
into training and validation sets.
Returns:
leakage (bool): True if there is leakage, otherwise False
"""
df1_patients_unique = set(self.df1[self.patient_col].unique().tolist())
df2_patients_unique = set(self.df2[self.patient_col].unique().tolist())
patients_in_both_groups = df1_patients_unique.intersection(df2_patients_unique)
# leakage contains true if there is patient overlap, otherwise false.
leakage = len(patients_in_both_groups) >= 1 # boolean (true if there is at least 1 patient in both groups)
return leakage
|
python
|
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name="Pairtree",
version="0.7.5",
description="Pairtree FS implementation.",
long_description="""\
From http://www.cdlib.org/inside/diglib/pairtree/pairtreespec.html : Pairtree, a filesystem hierarchy for holding objects that are located by mapping identifier strings to object directory (or folder) paths two characters at a time. If an object directory (folder) holds all the files, and nothing but the files, that comprise the object, a "pairtree" can be imported by a system that knows nothing about the nature or structure of the objects but can still deliver any object's files by requested identifier. The mapping is reversible, so the importing system can also walk the pairtree and reliably enumerate all the contained object identifiers. To the extent that object dependencies are stored inside the pairtree (e.g., fast indexes stored outside contain only derivative data), simple or complex collections built on top of pairtrees can recover from index failures and reconstruct a collection view simply by walking the trees. Pairtrees have the advantage that many object operations, including backup and restore, can be performed with native operating system tools.
""",
author="Ben O'Steen",
author_email="[email protected]",
url="http://packages.python.org/Pairtree/",
scripts = ['bin/ppath'],
license="http://www.apache.org/licenses/LICENSE-2.0",
packages=find_packages(),
test_suite = "tests.test.TestPairtree",
)
|
python
|
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
from pages.createprojectpage.questionnaire_creation_options_page import QuestionnaireCreationOptionsPage
from pages.dashboardpage.dashboard_locator import *
from pages.page import Page
class DashboardPage(Page):
def __init__(self, driver):
Page.__init__(self, driver)
def navigate_to_create_project_page(self):
self.driver.find(CREATE_PROJECT_LINK).click()
return QuestionnaireCreationOptionsPage(self.driver)
|
python
|
'''Use this for development'''
from .base import *
ALLOWED_HOSTS += ['127.0.0.1']
DEBUG = True
WSGI_APPLICATION = 'home.wsgi.dev.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'sarytask',
'USER': 'postgres',
'PASSWORD': 'S3d66221@',
'HOST': 'localhost',
'PORT': '8081',
}
}
CORS_ORIGIN_WHITELIST = (
'http://localhost:3000',
)
# add third party apis keys here masked
|
python
|
import re
from jinja2.filters import contextfilter
@contextfilter
def to_dockbarx_items(context, pin_to_launcher_favorites):
'''
returns the DockbarX launcher items
'''
pin_to_launcher_favorites = list(pin_to_launcher_favorites)
omit = context.resolve('omit')
launcher_items = []
for favourite in pin_to_launcher_favorites:
application = favourite.get('application')
if application not in ('', None, omit):
when_desktop = favourite.get('when_desktop')
if when_desktop in ('dockbarx', None):
application_id = favourite.get('application_id')
if application_id is None:
application_id = re.sub(
'(.*)\\.desktop$', '\\1', application)
launcher_items.append(
application_id +
';/usr/share/applications/' +
application)
return launcher_items
@contextfilter
def to_gnome_items(context, pin_to_launcher_favorites):
'''
returns the Gnome launcher items
'''
pin_to_launcher_favorites = list(pin_to_launcher_favorites)
omit = context.resolve('omit')
launcher_items = []
for favourite in pin_to_launcher_favorites:
application = favourite.get('application')
if application not in ('', None, omit):
when_desktop = favourite.get('when_desktop')
if when_desktop in ('gnome', None):
launcher_items.append("'" + application + "'")
return launcher_items
@contextfilter
def to_unity_items(context, pin_to_launcher_favorites):
'''
returns the Unity launcher items
'''
pin_to_launcher_favorites = list(pin_to_launcher_favorites)
omit = context.resolve('omit')
launcher_items = []
for favourite in pin_to_launcher_favorites:
application = favourite.get('application')
if application not in ('', None, omit):
when_desktop = favourite.get('when_desktop')
if when_desktop in ('unity', None):
launcher_items.append("'application://" + application + "'")
unity = favourite.get('unity')
if unity not in ('', None, omit):
launcher_items.append("'unity://" + unity + "'")
return launcher_items
class FilterModule(object):
''' Launcher filter '''
def filters(self):
return {
'to_dockbarx_items': to_dockbarx_items,
'to_gnome_items': to_gnome_items,
'to_unity_items': to_unity_items
}
|
python
|
import collections
import itertools
import typing
from checkmerge import analysis, report
class AnalysisResultMaxSeverityMetric(report.Metric):
"""
Metric for the maximum analysis result severity within a type.
"""
name = 'Max. severity'
low = .5
high = 1.5
def __init__(self, items: typing.List[analysis.AnalysisResult]):
"""
:param cls: The type of analysis result.
:param items: The results of the given type.
"""
value = max((item.severity for item in items))
super(AnalysisResultMaxSeverityMetric, self).__init__(value)
class AnalysisResultAvgSeverityMetric(report.Metric):
"""
Metric for the average analysis result severity within a type.
"""
name = 'Avg. severity'
low = .5
high = 1.5
def __init__(self, items: typing.List[analysis.AnalysisResult]):
"""
:param cls: The type of analysis result.
:param items: The results of the given type.
"""
value = sum((item.severity for item in items)) / float(len(items))
super(AnalysisResultAvgSeverityMetric, self).__init__(value)
class AnalysisResultMetric(report.Metric):
"""
Parent metric for types of analysis results.
"""
low = 1
high = 5
def __init__(self, cls: typing.Type[analysis.AnalysisResult], items: typing.List[analysis.AnalysisResult]):
self.name = cls.name
items = list(items)
max_severity = AnalysisResultMaxSeverityMetric(items)
avg_severity = AnalysisResultAvgSeverityMetric(items)
super(AnalysisResultMetric, self).__init__(len(items), children=[max_severity, avg_severity])
class AnalysisReport(report.Report):
"""
Report for analysis results.
"""
has_metrics = True
has_conflicts = True
def __init__(self, results: typing.Iterable[analysis.AnalysisResult]):
self.results_by_type = collections.defaultdict(list)
for result in results:
self.results_by_type[result.__class__].append(result)
def get_metrics(self) -> typing.Iterable[report.Metric]:
for cls, items in sorted(self.results_by_type.items(), key=lambda i: i[0].name):
yield AnalysisResultMetric(cls, items)
def get_conflicts(self) -> typing.Iterable[analysis.AnalysisResult]:
return sorted(itertools.chain(*self.results_by_type.values()), key=lambda r: -r.severity)
|
python
|
"""
parser.py
products.json의 반찬 상세 페이지 url에 요청을 보내 크롤링하는 메소드 정의
메소들을 crawl.py 파일에서 사용함
"""
import requests
from bs4 import BeautifulSoup
import re
def get_soup(url):
webpage = requests.get(url).text
soup = BeautifulSoup(webpage, 'lxml')
return soup
def parse_name(name):
# content에 중량이 포함되어 있는지 체크
# (140g*2개)와 같은 표현은 중량으로 취급하지 않음(그대로 반찬이름으로 들어감)
if re.compile('.+?\(?(\d*,?\.?\d+)k?g(?!\*)\)?').match(name):
# 중량이 포함되어 있으면 생산자, 반찬이름, 중량을 함께 추출하는 정규표현식 작성 - 미노리키친, 아게다시두부곤약조림, 150
# 중량 단위가 kg이면 문자열에 k 포함
parse = re.findall('\[(.*)\]\s?(.+?)\s?\(?(\d*,?\.?\d+k?)g\)?(\s.+)?', name)
else:
# 중량이 없으면 생산자, 반찬이름만 추출하는 정규표현식 작성 - 소중한식사, 명절실속세트
parse = re.findall('\[(.*)\]\s?(.+)', name)
# 생산자가 없는 경우
if not parse:
supplier_name = ''
food_name = name
weight_check = '0'
else:
supplier_name = parse[0][0]
food_name = ''.join(parse[0][1::2])
# 중량이 없으면 문자열 0
weight_check = parse[0][2] if len(parse[0]) >= 4 else '0'
# 중량이 kg 단위인지 확인 후 그램 단위의 정수로 변환
if 'k' in weight_check:
weight_check = int(float(weight_check[:-1]) * 1000)
else:
weight_check = int(weight_check.replace(',', ''))
result = (supplier_name, food_name, weight_check)
return result
def parse_product(soup):
"""
반찬 상세 페이지에서 Product 인스턴스를 만들기 위한 정보 크롤링
:param soup: 상세 페이지의 BeautifulSoup 인스턴스
:return result: 크롤링한 정보를 저장한 딕셔너리 인스턴스
"""
result = dict()
product_name = soup.select_one('h1.desc_product_name').text
result['raw_name'] = product_name
result['supplier'], result['name'], result['weight'] = parse_name(product_name)
result['description'] = soup.select_one('p.desc_bt_txt').text if soup.select_one('p.desc_bt_txt') else ''
result['thumbnail_url1'] = soup.select_one('div.image_top > img').get('src')
for index, img in enumerate(soup.select('a.top_thumb > img'), start=2):
result[f'thumbnail_url{index}'] = img.get('src')
origin_price = soup.select_one('del.origin-price')
sale_price = int(soup.select_one('strong.sale-price').text[:-1].replace(',', ''))
result['sale_price'] = sale_price
if origin_price is not None:
price = int(soup.select_one('del.origin-price').text[:-1].replace(',', ''))
result['price'] = price
result['discount_rate'] = round((1 - sale_price / price) * 100)
else:
result['price'] = 0
result['discount_rate'] = 0
details = soup.select('table.table_detail_info > tbody > tr')
for detail in details:
if detail.select_one('th').text == '식품의 유형':
result['type'] = detail.select_one('td').text
elif detail.select_one('th').text == '원재료명 및 함량':
result['materials'] = detail.select_one('td').text
elif detail.select_one('th').text == '알레르기 유발물질':
result['alert_allergy'] = detail.select_one('td').text
result['stock'] = 10
result['available'] = True
details = soup.select('dl.desc_info > dt')
for i, detail in enumerate(details):
if detail.text == '적립금':
result['point_amount'] = soup.select_one(f'dl.desc_info > dd:nth-of-type({i+1})').text[:-1].replace(',', '')
elif detail.text == '배송타입':
result['delivery_type'] = soup.select_one(f'dl.desc_info > dd:nth-of-type({i+1})').text.strip()
elif detail.text == '수령요일':
result['delivery_days'] = soup.select_one('dl.desc_info > dd > strong').text
print(result)
return result
def parse_category(soup):
result = dict()
result['category'] = soup.select_one('ul.breadcrumb > li:nth-of-type(3) > a').text
return result
def parse_product_image(soup):
result = list()
imgs = soup.select('div.product_detail_img_box > img')
for img in imgs:
result.append(img.get('src'))
return result
|
python
|
import pandas as pd
import os
import sys
import subprocess
import numpy as np
import streamlit as st
import time
import random
from random import randint
from streamlit_player import st_player
from streamlit_autorefresh import st_autorefresh
import altair as alt
import back as fl
import usessss as fh
#from pytransform import _load_library
#m = _load_library(path='kryp/dist/pytransform')
#m
#
#[browser]
#serverAddress = "Nikolai"
#from dist.tost import pyarmor_runtime
#import dist.tost as fl
#import dist.tost as fl
#fl.pyarmor_runtime()
#st.set_page_config(
page_title="Really cool app",
page_icon="random",
#page_icon="🧊",
layout="centered",
initial_sidebar_state="collapsed",
#)
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
footer {
footer:after {
content:'goodbye';
visibility: visible;
display: block;
position: relative;
#background-color: red;
padding: 5px;
top: 2px;
}
"""
padding = 0
st.markdown(f""" <style>
.reportview-container .main .block-container{{
padding-top: {padding}rem;
padding-right: {padding}rem;
padding-left: {padding}rem;
padding-bottom: {padding}rem;
}} </style> """, unsafe_allow_html=True)
#st.button(f"Click Me {st.session_state.emoji}", on_click=random_emoji)
option1, option2, option3, option4, option5, usertext1 = False, False, False, False, False, "default_text"
st.title('Welcome!')
emojis = ["🐶", "🐱", "🐭", "🐹", "🐰", "🦊", "🐻", "🐼"]
#st.session_state.emoji = random.choice(emojis)
# initialize emoji as a Session State variable
#if "emoji" not in st.session_state:
#st.session_state.emoji = "👈"
#font_size = st.sidebar.number_input(
#"emoji", min_value=0.5, max_value=4.0, value=2.0, step=0.1
#)
# Run the autorefresh about every 2000 milliseconds (2 seconds) and stop
# after it's been refreshed 100 times.
def hello():
count = st_autorefresh(interval=2000,limit=2, key="fizzbuzzcounter")
#The function returns a counter for number of refreshes. This allows the
#ability to make special requests at different intervals based on the count
if count == 0:
st.write("Count is zero")
elif count % 3 == 0 and count % 5 == 0:
st.write("FizzBuzz")
elif count % 3 == 0:
st.write("Fizz")
elif count % 5 == 0:
st.write("Buzz")
else:
st.write(f"Count: {count}")
#https://discuss.streamlit.io/t/regarding-layout-of-streamlit-web-app/9602/2
#st.write(f"\n|Vil du Søge i databasen så skriv 1.|\n|Vil du se hele databasen skriv 2. |\n|Vil du tilføje til databasen tast 3.|\n|Vil du slette fra databasen tast 4: |\n|Vil du ændre på værdier i databasen tast 6.|\n| Vil du clear cmd tast 5:|\n|")
st.text('Made by Nikolai Berthelsen')
#os.system('python dist/usessss.py')
#input = ""
#st.text_area("Input text")
b = True
c1, c2, c3 = st.columns(3)
with c1:
if st.button("Contact me", st.write(random.choice(emojis))):
option4 = True
code ='''[email protected]'''
st.code(code, language='python')
#st.write("https://discuss.codecademy.com/")
#st.write("https://www.w3schools.com/")
#st.write("Vil du have flere kode øvelser? prøv:","https://www.codingame.com/home")
b = False
with c2:
#l = c2.button("Et eller andet")
if st.button("Social media", st.write(random.choice(emojis))):
option3 = True
b = False
st.write("Here")
with c3:
o=st.button("Tilbage til forsiden", st.write(random.choice(emojis)))
#st.header("Tighten up left buttons with empty right columns!")
cont1, cont2, _, _, _, _ = st.columns(6)
#cont1.button("Tight")
#with cont2:
#st.button("Tighter")
#st.header("You can even control relative size of columns")
#tc1, tc2, _= st.columns([1,1,9])
#tc1.button("Tighty")
#with tc2:
#st.button("Tighterer")
key = (np.arange(9) * 2)
x3 = ('')
x2 = ('')
x1 = ('')
x5 = ()
#https://www.delftstack.com/howto/python/python-clear-console/
def clearConsole():
command = 'clear'
if os.name in ('nt', 'cls'):
command = 'cls'
os.system(command)
df = pd.read_csv('out.zip')
total_rows2= len(df.index)+1
total_rows= len(df.index)-1
df4 = pd.DataFrame({'Produkt': 'Cheeseburger nuggets chilicheesetops milkshake bigmac apple cola water bigTastyBacon'.split(),
'Butik': 'Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds '.split(),
'Pris': 'Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds Macdonalds 20'.split(),
'iD': (total_rows)})
#st.write(df)
u = ()
#https://stackoverflow.com/questions/15943769/how-do-i-get-the-row-count-of-a-pandas-dataframe
submit1 = ()
submit = ()
submit2 = ()
submit3 = ()
so = ()
#https://discuss.streamlit.io/t/the-button-inside-a-button-seems-to-reset-the-whole-app-why/1051/6
c1, c2, c3 = st.columns([50,60,70])
#if p:
if st.sidebar.checkbox(f"Projekt 1 - Database"):
fh.hej()
input = ""
#os.system('streamlit run dist/usessss.py')
#input=""
#from dist.pytransform import pyarmor_runtime
#pyarmor_runtime()
#__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x08\x2d\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xef\x00\x00\x00\x00\x00\x00\x00\xd4\x0c\x74\xc0\x06\x34\x69\x5a\x4e\xd5\x0f\x20\xb3\x45\xfb\xbd\x00\x00\x00\x00\x00\x00\x00\x00\x71\x82\x82\x64\x72\x3a\xdf\x24\xb1\xd6\x39\x33\xdf\x8c\xe8\xc5\x04\xa3\x86\xa3\x78\x15\xdf\x21\x0a\x1e\x71\xd9\xf4\x3e\xd2\x7b\xa3\x84\xa1\x45\xdf\xaf\x18\x5f\x5d\x9e\x64\x56\xed\xa6\xbc\x71\x53\x02\x1e\xe7\x41\xbb\x13\xb8\xda\x00\x2e\xae\x8e\x4a\x93\x5c\xf4\xd7\xe7\x33\x35\xe8\x94\xde\xac\x52\xeb\x20\xce\xa9\x07\xe9\xee\x43\x6e\x96\x91\x74\x17\xcc\x38\xe5\x65\xed\x8c\xd5\xf6\xaa\x4d\x30\xc3\x3c\xc3\x97\xf4\x5f\x26\x4d\x82\xb2\xcb\xe0\x0f\xed\x18\x0d\x2a\x80\x81\x38\x3a\x7e\x9f\x8f\x5f\x96\x1e\x65\x96\x63\xef\x09\x32\x45\x77\xaa\x07\x9f\xd0\xf1\x40\x1a\xdc\x4d\xbb\xd6\xcb\x61\x7e\x02\x0e\x12\x62\xf8\x3f\xea\xeb\x03\xc9\x03\xf3\x9c\xe5\xb4\x52\x56\xcb\x4e\xf4\x96\x92\x31\x1a\xe3\x81\x50\x02\x1a\x3d\x91\x36\x70\x21\x1a\xa8\xed\x96\xdb\x35\xc6\xd7\xb9\x32\x57\xac\x24\x72\xec\xad\x1c\xe6\x8d\x56\x58\x16\x11\xe1\xe6\x13\xd4\xdc\x9c\x9c\x3b\xc4\x1f\x9b\x8d\x5e\xce\x4c\xba\x68\x8c\xf4\xaf\x50\x05\xd6\x6a\x59\x52\xf2\xd5\xd3\xaa\xd0\x5c\x08\xb5\x88', 2)
option2 = True
st.write(fl.g())
slider_ph = st.empty()
info_ph = st.empty()
b = False
value = slider_ph.slider("slider", 1, 10, 1, 1)
info_ph.info(value)
st.image(str(value) + ".png",)
if st.button('Vis alle slides'):
b = False
for x in range(10):
value = int(value)
value = slider_ph.slider("slider", 0, 10, value + 1, 1)
info_ph.info(value)
time.sleep(4)
value = str(value)
st.image(str(value) + ".png",)
st.title("What is Recursion?")
st.write(f"Recursion Defined What is recursion? Sometimes a problem is too difficult or too complex to solve because it is too big. If the problem can be broken down into smaller versions of itself, we may be able to find a way to solve one of these smaller versions and then be able to build up to a solution to the entire problem. This is the idea behind recursion; recursive algorithms break down a problem into smaller pieces which you either already know the answer to, or can solve by applying the same algorithm to each piece, and then combining the results. Stated more concisely, a recursive definition is defined in terms of itself. Recursion is a computer programming technique involving the use of a procedure, subroutine, function, or algorithm that calls itself in a step having a termination condition so that successive repetitions are processed up to the critical step where the condition is met at which time the rest of each repetition is processed from the last one called to the first. \n Don't worry about the details of that definition. The main point of it is that it is defined in terms of itself: Recursion: ... for more information, see Recursion. \n")
st.write("https://www.sparknotes.com/cs/recursion/whatisrecursion/section1/")
if st.sidebar.checkbox("Project 2 - programming quiz"):
option1 = True
b = False
st.write("Hvad skal der i opg.1 for at køre loopet 10 gange?")
with c1:
st.header("Opg. 1 \n for (let i = 0; i < ")
#st.write("for (let i = 0; i < ")
st.write("text += cars[i]; }")
new_title = '<p style="font-family:sans-serif; color:Green; font-size: 42px;">New image</p>'
with c2:
st.header("")
so = (st.text_input((""), key = '91' ))
st.write({so})
if so != ("10"):
st.write("False")
x1 = int(1)
#form6 = st.form(key='my-form6')
#x45 = form6.text_input('')
#submit4 = form6.form_submit_button('Submit')
#st.write(x45)
#so = x45
#st.write(so)
with c3:
st.header(".\n ;i++) {")
if so == ("10"):
original_title = '<p style="font-family:Courier; color:Blue; font-size: 20px;">True</p>'
st.markdown(original_title, unsafe_allow_html=True)
x2 = int(2)
c4, c5, c6 = st.columns([50,60,70])
#if p:
st.write("Hvad skal der i opg.2 for at køre loopet 20 gange?")
with c4:
st.header("Opg. 2 \n for (let i = 0; i < ")
#st.write("for (let i = 0; i < ")
st.write("text += cars[i]; }")
new_title = '<p style="font-family:sans-serif; color:Green; font-size: 42px;">New image</p>'
with c5:
st.header("")
su = (st.text_input((""), key = '910' ))
st.write({su})
if su != ("20"):
st.write("False")
x1 = int(1)
#form6 = st.form(key='my-form6')
#x45 = form6.text_input('')
#submit4 = form6.form_submit_button('Submit')
#st.write(x45)
#so = x45
#st.write(so)
with c6:
st.header(".\n ;i++) {")
if su == ("20"):
original_title3 = '<p style="font-family:Courier; color:Blue; font-size: 20px;">True</p>'
st.markdown(original_title3, unsafe_allow_html=True)
x2 = int(2)
form = st.form(key='my-form')
submit = form.form_submit_button('Tilføj til Databasen')
if submit:
df2 = pd.DataFrame({'Antal forkerte svar': [x1],
'Antal rigtige svar': [x2]})
df = df.append((df2), ignore_index=False)
df = df.append((df2), ignore_index=False)
#https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_csv.html
compression_opts = dict(method = 'zip',
archive_name='out.csv')
df.to_csv('out.zip', index=False,
compression=compression_opts)
st.write(df)
if st.sidebar.checkbox("Tilbage til forsiden"):
option5 = True
initial_sidebar_state: "expanded"
option1, option2, option3, option4, option5, usertext1 = False, False, False, False, False, "default_text"
if o:
#form1 = st.form(key='my-form1')
#x10 = form1.text_input('Indtast Burger')
#submit1 = form1.form_submit_button('Submit')
#form2 = st.form(key='my-form2')
#x20 = form2.text_input('Indtast Butik')
#submit2 = form2.form_submit_button('Submit')
#form3 = st.form(key='my-form3')
#x30 = form3.text_input('Indtast Pris')
#submit3 = form3.form_submit_button('Submit')
#st.write(f'hello {x1}')
#st.write("hvilket produkt vil du tilføje?")
#x11= st.text_input((''), key = '25')
#st.write("Hvilket butik er produktet fra?")
#x22= st.text_input((''), key = '24')
#st.write("Hvad kostede produktet?")
#x33= st.text_input((''), key = '23')
#st.write('Press submit to have your name printed below')
#form = st.form(key='my-form')
#submit = form.form_submit_button('Tilføj til Databasen')
#if submit1:
#x1 = ({x10})
#if submit2:
#x2 = ({x20})
if submit3:
df2 = pd.DataFrame({'Antal forkerte svar': [x1],
'Antal rigtige svar': [x2]})
df = df.append((df2), ignore_index=False)
#https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_csv.html
compression_opts = dict(method = 'zip',
archive_name='out.csv')
df.to_csv('out.zip', index=False,
compression=compression_opts)
#df = df.drop([(total_rows)], axis = 0)
#df = df.drop([(total_rows)], axis = 0)
#i = st.text_input((''), key = "<25>")
#if i == str("ok"):
#hello()
submit4 = ()
if u == str("4"):
print("Hvilket række vil du slette?")
#x5 = int(input())
#x5 = int(st.text_input(''),key='30')
#df = df.drop([x5], axis = 0)
form4 = st.form(key='my-form4')
x40 = form4.text_input('')
submit4 = form4.form_submit_button('Submit')
st.write(x40)
x5 = x40
if submit4:
x5 = int(x5)
st.write(x5)
df = df.drop([x5], axis=0)
compression_opts = dict(method = 'zip',
archive_name='out.csv')
df.to_csv('out.zip', index=False,
compression=compression_opts)
#df = df.drop([0], axis = 0)
#df = df.set_index("Burger")
#df = df.drop('hej', axis = 0)
x10 = ()
x11 = ()
options = [x10]
search = [x11]
x9 = ()
x8 = ()
if u == str("6"):
st.write("Hvilken burger vil du ændre på?")
x10 = st.text_input((''), key = '60')
options = [x10]
st.write("Indtast gamle værdi")
x8 = st.text_input((''), key = '61')
st.write("Indtast nye værdi")
x9 = st.text_input((''), key = '62')
if st.button('Click func too'):
df[df['Produkt'].isin(options)] = df[df['Produkt'].isin(options)].replace(x8,x9)
compression_opts = dict(method = 'zip',
archive_name='out.csv')
df.to_csv('out.zip', index=False,
compression=compression_opts)
#https://youtu.be/F-gDgQ6kuuk?t=460
#https://www.geeksforgeeks.org/selecting-rows-in-pandas-dataframe-based-on-conditions/
#rslt_df = df[df['Burger'].isin(options)]
#print (rslt_df)
#if u == str("3"):
#df = df.append((df2), ignore_index=False)
#https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.to_csv.html
#compression_opts = dict(method = 'zip',
#archive_name='out.csv')
#df.to_csv('out.zip', index=False,
#compression=compression_opts)
#x5 = int(input())
# print(type(x5))
line2 = ()
def søg(l):
#st.write(df.loc[df[g]])
df.sort_values(by='Produkt', inplace=True, key=lambda col: col.str.lower())
for line in df['Produkt']:
if line == '':
pass
elif l in str(line).lower():
st.write(line)
st.write(df.loc[df['Produkt'] == (line)])
elif l in str(line):
st.write(df.loc[df['Produkt'] == (line)])
#st.write(df.loc(line).isin(df['Produkt']))
if u == str("5"):
clearConsole()
#if o:
#st.title("Sorteret fra a-z")
#df.sort_values(by='Produkt',inplace=True)
#st.write(df)
ur2=()
ur=5000
if u == str("1"):
st.title("Hvad vil ud søge på?")
ur = st.text_input(("").lower(), key = '70' )
søg(str(ur))
if b == True:
code = '''def hello():
print("Hello, and welcome to my portefolio website!!")'''
st.code(code, language='python')
st_player("https://www.youtube.com/watch?v=r5kfkpYtOiw")
#if ur == str("5"):
# = st.text_input((''), key = '77' )
# for x in range(len([df['Butik']])):
# if df['Butik'[x]] in h:
#st.write(df.loc[df['Butik'] == h])
#if ur == str("6"):
#b = (input(''))
#b = st.text_input((''), key = '71' )
#x11 = input('')
#search = [x11]
#st.write(df.loc[df['Pris'] == b])
#print(df.loc[['Pris'].isin[(search)] == b])
#print(df.loc[['Pris'] == b])
#if ur == str("7"):
#c = st.text_input((''), key = '73' )
#st.write(df.loc[df['iD'] == c])
#if ur == str("8"):
#d = st.text_input((''), key = '72' )
#st.write(df.loc[df['Produkt'] == d])
#os.system("web.py")
#input = ""
|
python
|
GREY = (0.78, 0.78, 0.78) # uninfected
RED = (0.96, 0.15, 0.15) # infected
GREEN = (0, 0.86, 0.03) # recovered
BLACK = (0, 0, 0) # dead
COVID19_PARAMS = {
"r0": 2.28,
"incubation": 5,
"percent_mild": 0.8,
"mild_recovery": (7, 14),
"percent_severe": 0.2,
"severe_recovery": (21, 42),
"severe_death": (14, 56),
"fatality_rate": 0.034,
"serial_interval": 7,
"population": 10000
}
|
python
|
from troposphere import FindInMap, GetAtt, Join, Output, Parameter, Ref, Template
from troposphere.awslambda import MINIMUM_MEMORY, MAXIMUM_MEMORY, Code, Function
from troposphere.cloudformation import CustomResource
from troposphere.constants import NUMBER
from troposphere.ec2 import Instance, SecurityGroup
from troposphere.iam import Policy, Role
t = Template()
t.set_version("2010-09-09")
ExistingVPC = t.add_parameter(
Parameter(
"ExistingVPC",
Type="AWS::EC2::VPC::Id",
Description=(
"The VPC ID that includes the security groups in the "
"ExistingSecurityGroups parameter."
),
)
)
InstanceType = t.add_parameter(
Parameter(
"InstanceType",
Default="t2.micro",
Type="String",
AllowedValues=["t2.micro", "m1.small"],
)
)
ExistingSecurityGroups = t.add_parameter(
Parameter(
"ExistingSecurityGroups",
Type="List<AWS::EC2::SecurityGroup::Id>",
)
)
MemorySize = t.add_parameter(
Parameter(
"LambdaMemorySize",
Type=NUMBER,
Description="Amount of memory to allocate to the Lambda Function",
Default="128",
MinValue=MINIMUM_MEMORY,
MaxValue=MAXIMUM_MEMORY,
)
)
Timeout = t.add_parameter(
Parameter(
"LambdaTimeout",
Type=NUMBER,
Description="Timeout in seconds for the Lambda function",
Default="60",
)
)
t.add_mapping(
"AWSInstanceType2Arch",
{"m1.small": {"Arch": "PV64"}, "t2.micro": {"Arch": "HVM64"}},
)
t.add_mapping(
"AWSRegionArch2AMI",
{
"ap-northeast-1": {"HVM64": "ami-cbf90ecb", "PV64": "ami-27f90e27"},
"ap-southeast-1": {"HVM64": "ami-68d8e93a", "PV64": "ami-acd9e8fe"},
"ap-southeast-2": {"HVM64": "ami-fd9cecc7", "PV64": "ami-ff9cecc5"},
"cn-north-1": {"HVM64": "ami-f239abcb", "PV64": "ami-fa39abc3"},
"eu-central-1": {"HVM64": "ami-a8221fb5", "PV64": "ami-ac221fb1"},
"eu-west-1": {"HVM64": "ami-a10897d6", "PV64": "ami-bf0897c8"},
"sa-east-1": {"HVM64": "ami-b52890a8", "PV64": "ami-bb2890a6"},
"us-east-1": {"HVM64": "ami-1ecae776", "PV64": "ami-1ccae774"},
"us-west-1": {"HVM64": "ami-d114f295", "PV64": "ami-d514f291"},
"us-west-2": {"HVM64": "ami-e7527ed7", "PV64": "ami-ff527ecf"},
},
)
code = [
"var response = require('cfn-response');",
"exports.handler = function(event, context) {",
" var responseData = {Value: event.ResourceProperties.List};",
" responseData.Value.push(event.ResourceProperties.AppendedItem);",
" response.send(event, context, response.SUCCESS, responseData);",
"};",
]
AppendItemToListFunction = t.add_resource(
Function(
"AppendItemToListFunction",
Code=Code(ZipFile=Join("", code)),
Handler="index.handler",
Role=GetAtt("LambdaExecutionRole", "Arn"),
Runtime="nodejs",
MemorySize=Ref(MemorySize),
Timeout=Ref(Timeout),
)
)
LambdaExecutionRole = t.add_resource(
Role(
"LambdaExecutionRole",
Path="/",
Policies=[
Policy(
PolicyName="root",
PolicyDocument={
"Version": "2012-10-17",
"Statement": [
{
"Action": ["logs:*"],
"Resource": "arn:aws:logs:*:*:*",
"Effect": "Allow",
}
],
},
)
],
AssumeRolePolicyDocument={
"Version": "2012-10-17",
"Statement": [
{
"Action": ["sts:AssumeRole"],
"Effect": "Allow",
"Principal": {"Service": ["lambda.amazonaws.com"]},
}
],
},
)
)
MyEC2Instance = t.add_resource(
Instance(
"MyEC2Instance",
SecurityGroupIds=GetAtt("AllSecurityGroups", "Value"),
InstanceType=Ref(InstanceType),
ImageId=FindInMap(
"AWSRegionArch2AMI",
Ref("AWS::Region"),
FindInMap("AWSInstanceType2Arch", Ref(InstanceType), "Arch"),
),
)
)
AllSecurityGroups = t.add_resource(
CustomResource(
"AllSecurityGroups",
List=Ref(ExistingSecurityGroups),
AppendedItem=Ref("SecurityGroup"),
ServiceToken=GetAtt(AppendItemToListFunction, "Arn"),
)
)
SecurityGroup = t.add_resource(
SecurityGroup(
"SecurityGroup",
SecurityGroupIngress=[
{
"ToPort": "80",
"IpProtocol": "tcp",
"CidrIp": "0.0.0.0/0",
"FromPort": "80",
}
],
VpcId=Ref(ExistingVPC),
GroupDescription="Allow HTTP traffic to the host",
SecurityGroupEgress=[
{
"ToPort": "80",
"IpProtocol": "tcp",
"CidrIp": "0.0.0.0/0",
"FromPort": "80",
}
],
)
)
AllSecurityGroups = t.add_output(
Output(
"AllSecurityGroups",
Description="Security Groups that are associated with the EC2 instance",
Value=Join(", ", GetAtt(AllSecurityGroups, "Value")),
)
)
print(t.to_json())
|
python
|
from flask import Flask, render_template, redirect
app = Flask(__name__)
#--- routes start ---
from user import routes
@app.route("/")
def home():
return render_template('home.html')
@app.route("/login/")
def login_page():
return render_template('login.html')
@app.route("/about/")
def about():
return render_template('aboutus.html')
@app.route("/dashboard/")
def dashboard():
return redirect("http://ml-hub.herokuapp.com")
|
python
|
# qmpy/materials/element
"""
Django models representing elements and species.
"""
from django.db import models
from qmpy.db.custom import DictField
from qmpy.utils import *
class Element(models.Model):
"""
Core model for an element.
Relationships:
| :mod:`~qmpy.Atom` via atom_set
| :mod:`~qmpy.Species` via species_set
| :mod:`~qmpy.Structure` via structure_set
| :mod:`~qmpy.Entry` via entry_set
| :mod:`~qmpy.Composition` via composition_set
| :mod:`~qmpy.Calculation` via calculation_set
| :mod:`~qmpy.Potential` via potential_set
| :mod:`~qmpy.Hubbard` via hubbards
| :mod:`~qmpy.HubbardCorrection` via hubbardcorrection_set
| :mod:`~qmpy.ReferenceEnergy` via referenceenergy_set
Attributes:
| **Identification**
| z: atomic number
| name: full atomic name
| symbol: atomic symbol
| group: group in the periodic table
| period: period in the periodic table
|
| **Physical properties**
| mass: Atomic mass, in AMU (float)
| density: Density at STP, in g/cm^3 (float)
| volume: Atomic volume at STP, in A^3/atom (float)
| atomic_radii: in A (float)
| van_der_waals radii: in A (float)
| covalent_radii: in A (float)
| scattering_factors: A dictionary of scattering factor coeffs.
|
| **Thermodynamic properties**
| melt: melting point in K
| boil: boiling point in K
| specific_heat: C_p in J/K
|
| **Electronic properties**
| electronegativity: Pauling electronegativity
| ion_energy: First ionization energy. (eV)
| s_elec: # of s electrons
| p_elec: # of p electrons
| d_elec: # of d electrons
| f_elec: # of f electrons
|
| **Additional information**
| production: Annual tons of element produced.
| abundance: Amount in earths crust (ppm)
| radioactive: Are all isotopes unstable?
| HHI_P: Herfindahl-Hirschman Index for production.
| HHI_R: Herfindahl-Hirschman Index for reserve
Note:
HHI values from Gaultois, M. et al. Chem. Mater. 25, 2911-2920 (2013).
"""
### Identification
z = models.IntegerField()
name = models.CharField(max_length=20)
symbol = models.CharField(max_length=9, primary_key=True)
### Periodic table
group = models.IntegerField()
period = models.IntegerField()
### Phyical characteristics
mass = models.FloatField()
density = models.FloatField()
volume = models.FloatField()
atomic_radii = models.IntegerField()
van_der_waals_radii = models.IntegerField()
covalent_radii = models.IntegerField()
scattering_factors = DictField()
### Thermodynamics
melt = models.FloatField()
boil = models.FloatField()
specific_heat = models.FloatField()
### Electonic structure
electronegativity = models.FloatField()
first_ionization_energy = models.FloatField()
s_elec = models.IntegerField()
p_elec = models.IntegerField()
d_elec = models.IntegerField()
f_elec = models.IntegerField()
### misc
HHI_P = models.FloatField(default=0)
HHI_R = models.FloatField(default=0)
production = models.FloatField(default=0)
radioactive = models.BooleanField(default=False)
class Meta:
app_label = "qmpy"
db_table = "elements"
# builtins
def __str__(self):
return self.symbol
# accessor
@classmethod
def get(cls, value):
"""
Return an element object. Accepts symbols and atomic numbers, or a list
of symbols/atomic numbers.
Examples::
>>> Element.get('Fe')
>>> Element.get(26)
>>> Element.get(['Fe', 'O'])
"""
if isinstance(value, cls):
return value
elif isinstance(value, list):
return [cls.get(v) for v in value]
elif isinstance(value, int):
return cls.objects.get(z=value)
elif isinstance(value, str):
return cls.objects.get(symbol=value)
# methods
def species_distribution(self):
counts = {}
for s in self.species_set.all():
counts[s.ox] = s.structure_set.count()
return counts
class Species(models.Model):
"""
Base model for an atomic species. (Element + charge state).
Relationships:
| :mod:`~qmpy.Element` via element
| :mod:`~qmpy.Entry` via entry_set
| :mod:`~qmpy.Structure` via structure_set
Attributes:
| name: Species name. e.g. Fe3+, O2-
| ox: Oxidation state (float)
"""
name = models.CharField(max_length=8, primary_key=True)
element = models.ForeignKey(
Element, blank=True, null=True, on_delete=models.CASCADE
)
ox = models.FloatField(blank=True, null=True)
class Meta:
app_label = "qmpy"
db_table = "species"
# builtins
def __str__(self):
return str(self.name)
# accessor
@classmethod
def get(cls, value):
"""
Gets or creates the specified species.
Arguments:
value:
Accepts multiple input types. Can be a string, e.g. Fe3+
or a tuple of (symbol, oxidation state) pairs, e.g. (Fe, 3).
Return:
A :mod:`~qmpy.Species` or list of :mod:`~qmpy.Species`.
Examples::
>>> Species.get('Fe3+')
>>> Species.get('Fe3')
>>> Species.get(('Fe', 3))
>>> Species.get([ 'Fe3+', 'O2-', 'Li1+'])
"""
if isinstance(value, cls):
return value
elif isinstance(value, str):
spec, new = cls.objects.get_or_create(name=value)
if new:
elt, ox = parse_species(value)
spec.element_id = elt
spec.ox = ox
spec.save()
return spec
elif isinstance(value, list):
return [cls.get(value) for value in list]
@property
def ox_format(self):
if self.ox is None:
return 0
elif is_integer(self.ox):
return int(self.ox)
else:
return float(round(self.ox, 3))
|
python
|
# # coding=utf-8
import unittest
import uuid
from google.appengine.ext import testbed
from application import create_app
from application.routes import create_routes
class FlaskClient(unittest.TestCase):
def setUp(self):
self.tb = testbed.Testbed()
self.tb.activate()
self.tb.init_memcache_stub()
self.tb.init_urlfetch_stub()
self.app = create_app('testing')
self.client = self.app.test_client(use_cookies=True)
def tearDown(self):
self.tb.deactivate()
def test_flask(self):
self.assertIsNotNone(self.app)
def test_routes(self):
create_routes(self.app)
self.assertIsNotNone(self.app.url_map)
rule_count = 0
expected_static_endpoints = 5
for _ in self.app.url_map.iter_rules():
rule_count += 1
self.assertGreaterEqual(rule_count, expected_static_endpoints)
def test_404(self):
response = self.client.get('/%s' % uuid.uuid4())
self.assertEqual(response.status_code, 404)
def test_static_route_for_404(self):
response = self.client.get('/404')
self.assertEqual(response.status_code, 404)
|
python
|
# -*- coding: utf-8 -*-
from django.db import models
from django.template.defaultfilters import date
from django.core.validators import MinValueValidator
from django.urls import reverse_lazy
from decimal import Decimal
from djangosige.apps.vendas.models import TIPOS_DESCONTO_ESCOLHAS, MOD_FRETE_ESCOLHAS, STATUS_ORCAMENTO_ESCOLHAS
from djangosige.apps.estoque.models import DEFAULT_LOCAL_ID
import locale
locale.setlocale(locale.LC_ALL, '')
STATUS_PEDIDO_COMPRA_ESCOLHAS = (
(u'0', u'Aberto'),
(u'1', u'Realizado'),
(u'2', u'Cancelado'),
(u'3', u'Importado por XML'),
(u'4', u'Recebido')
)
class ItensCompra(models.Model):
produto = models.ForeignKey('cadastro.Produto', related_name="compra_produto",
on_delete=models.CASCADE, null=True, blank=True)
compra_id = models.ForeignKey(
'compras.Compra', related_name="itens_compra", on_delete=models.CASCADE)
quantidade = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
valor_unit = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
tipo_desconto = models.CharField(
max_length=1, choices=TIPOS_DESCONTO_ESCOLHAS, null=True, blank=True)
desconto = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
subtotal = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
inf_ad_prod = models.CharField(max_length=500, null=True, blank=True)
vicms = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
vipi = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
p_icms = models.DecimalField(max_digits=5, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
p_ipi = models.DecimalField(max_digits=5, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], null=True, blank=True)
# Opcoes
icms_incluido_preco = models.BooleanField(default=False)
ipi_incluido_preco = models.BooleanField(default=False)
incluir_bc_icms = models.BooleanField(
default=False) # incluir IPI na BC do ICMS
auto_calcular_impostos = models.BooleanField(default=True)
@property
def vprod(self):
return round(self.quantidade * self.valor_unit, 2)
def get_total_sem_desconto(self):
if self.tipo_desconto == '0':
return self.subtotal + self.desconto
else:
tot_sem_desc = (self.subtotal * 100) / (100 - self.desconto)
return tot_sem_desc
def get_valor_desconto(self):
if self.tipo_desconto == '0':
return self.desconto
else:
tot_sem_desc = self.get_total_sem_desconto()
v_desconto = tot_sem_desc * (self.desconto / 100)
return v_desconto
def get_total_impostos(self):
return sum(filter(None, [self.vicms, self.vipi]))
def get_total_com_impostos(self):
total_com_impostos = self.subtotal + self.get_total_impostos()
return total_com_impostos
def format_total_impostos(self):
return locale.format(u'%.2f', self.get_total_impostos(), 1)
def format_total_com_imposto(self):
return locale.format(u'%.2f', self.get_total_com_impostos(), 1)
def format_desconto(self):
return '{0}'.format(locale.format(u'%.2f', self.get_valor_desconto(), 1))
def format_quantidade(self):
return locale.format(u'%.2f', self.quantidade, 1)
def format_valor_unit(self):
return locale.format(u'%.2f', self.valor_unit, 1)
def format_total(self):
return locale.format(u'%.2f', self.subtotal, 1)
def format_vprod(self):
return locale.format(u'%.2f', self.vprod, 1)
def format_valor_attr(self, nome_attr):
valor = getattr(self, nome_attr)
if valor is not None:
return locale.format(u'%.2f', valor, 1)
class Compra(models.Model):
# Fornecedor
fornecedor = models.ForeignKey(
'cadastro.Fornecedor', related_name="compra_fornecedor", on_delete=models.CASCADE)
# Transporte
mod_frete = models.CharField(
max_length=1, choices=MOD_FRETE_ESCOLHAS, default='9')
# Estoque
local_dest = models.ForeignKey(
'estoque.LocalEstoque', related_name="compra_local_estoque", default=DEFAULT_LOCAL_ID, on_delete=models.PROTECT)
movimentar_estoque = models.BooleanField(default=True)
# Info
data_emissao = models.DateField(null=True, blank=True)
valor_total = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], default=Decimal('0.00'))
tipo_desconto = models.CharField(
max_length=1, choices=TIPOS_DESCONTO_ESCOLHAS, default='0')
desconto = models.DecimalField(max_digits=15, decimal_places=4, validators=[
MinValueValidator(Decimal('0.00'))], default=Decimal('0.00'))
despesas = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], default=Decimal('0.00'))
frete = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], default=Decimal('0.00'))
seguro = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], default=Decimal('0.00'))
total_icms = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], default=Decimal('0.00'))
total_ipi = models.DecimalField(max_digits=13, decimal_places=2, validators=[
MinValueValidator(Decimal('0.00'))], default=Decimal('0.00'))
cond_pagamento = models.ForeignKey(
'vendas.CondicaoPagamento', related_name="compra_pagamento", on_delete=models.SET_NULL, null=True, blank=True)
observacoes = models.CharField(max_length=1055, null=True, blank=True)
def get_total_sem_imposto(self):
total_sem_imposto = self.valor_total - self.impostos
return total_sem_imposto
def get_total_produtos(self):
itens = ItensCompra.objects.filter(compra_id=self.id)
tot = 0
for it in itens:
tot += it.vprod
return tot
def get_total_produtos_estoque(self):
itens = self.itens_compra.all()
tot = 0
for it in itens:
if it.produto.controlar_estoque:
tot += it.vprod
return tot
def format_total_produtos(self):
return locale.format(u'%.2f', self.get_total_produtos(), 1)
@property
def impostos(self):
return (self.total_icms + self.total_ipi)
@property
def format_data_emissao(self):
return '%s' % date(self.data_emissao, "d/m/Y")
def format_valor_total(self):
return locale.format(u'%.2f', self.valor_total, 1)
def format_frete(self):
return locale.format(u'%.2f', self.frete, 1)
def format_impostos(self):
return locale.format(u'%.2f', self.impostos, 1)
def format_vicms(self):
return locale.format(u'%.2f', self.total_icms, 1)
def format_vipi(self):
return locale.format(u'%.2f', self.total_ipi, 1)
def format_total_sem_imposto(self):
return locale.format(u'%.2f', self.get_total_sem_imposto(), 1)
def format_desconto(self):
if self.tipo_desconto == '0':
return locale.format(u'%.2f', self.desconto, 1)
else:
itens = ItensCompra.objects.filter(compra_id=self.id)
tot = 0
for it in itens:
tot += it.get_total_sem_desconto()
v_desconto = tot * (self.desconto / 100)
return locale.format(u'%.2f', v_desconto, 1)
def format_seguro(self):
return locale.format(u'%.2f', self.seguro, 1)
def format_despesas(self):
return locale.format(u'%.2f', self.despesas, 1)
def format_total_sem_desconto(self):
total_sem_desconto = self.valor_total - self.desconto
return locale.format(u'%.2f', total_sem_desconto, 1)
def get_forma_pagamento(self):
if self.cond_pagamento:
return self.cond_pagamento.get_forma_display()
else:
return ""
def get_local_dest_id(self):
if self.local_dest:
return self.local_dest.id
else:
return ""
def get_child(self):
try:
return PedidoCompra.objects.get(id=self.id)
except PedidoCompra.DoesNotExist:
return OrcamentoCompra.objects.get(id=self.id)
def __unicode__(self):
s = u'Compra nº %s' % (self.id)
return s
def __str__(self):
s = u'Compra nº %s' % (self.id)
return s
class OrcamentoCompra(Compra):
data_vencimento = models.DateField(null=True, blank=True)
status = models.CharField(
max_length=1, choices=STATUS_ORCAMENTO_ESCOLHAS, default='0')
class Meta:
verbose_name = "Orçamento de Compra"
@property
def format_data_vencimento(self):
return '%s' % date(self.data_vencimento, "d/m/Y")
@property
def tipo_compra(self):
return 'Orçamento'
def edit_url(self):
return reverse_lazy('compras:editarorcamentocompraview', kwargs={'pk': self.id})
def __unicode__(self):
s = u'Orçamento nº %s' % (self.id)
return s
def __str__(self):
s = u'Orçamento nº %s' % (self.id)
return s
class PedidoCompra(Compra):
orcamento = models.ForeignKey(
'compras.OrcamentoCompra', related_name="orcamento_pedido", on_delete=models.SET_NULL, null=True, blank=True)
data_entrega = models.DateField(null=True, blank=True)
status = models.CharField(
max_length=1, choices=STATUS_PEDIDO_COMPRA_ESCOLHAS, default='0')
class Meta:
verbose_name = "Pedido de Compra"
permissions = (
("faturar_pedidocompra", "Pode faturar Pedidos de Compra"),
)
@property
def format_data_entrega(self):
return '%s' % date(self.data_entrega, "d/m/Y")
@property
def tipo_compra(self):
return 'Pedido'
def edit_url(self):
return reverse_lazy('compras:editarpedidocompraview', kwargs={'pk': self.id})
def __unicode__(self):
s = u'Pedido de compra nº %s (%s)' % (
self.id, self.get_status_display())
return s
def __str__(self):
s = u'Pedido de compra nº %s (%s)' % (
self.id, self.get_status_display())
return s
|
python
|
class Stack(object):
"""This class implements all the need functions for a stack"""
def __init__(self):
self.items = []
def is_empty(self):
"""Returns true if stack is empty, returns false if stack has an
item"""
return self.items == []
def push(self, item):
"""pushes an item onto the top of the stack"""
self.items.append(item)
def pop(self):
"""pops an item off of the top of the stack and returns it"""
return self.items.pop()
def peek(self):
"""returns the item on the top of the stack but does NOT pop it off!"""
return self.items[len(self.items)-1]
def size(self):
"""returns the size of the stack, how many items are in the stack"""
return len(self.items)
|
python
|
# Implementacion generica de Arboles de Decisiones.
from math import log, inf
class Node:
def __init__(self, parent, X, Y, atr_types, default):
self.parent = parent
# Ejemplos del entrenamiento que pertenecen a este nodo.
self.X = X
# Etiquetas de los ejemplos.
self.Y = Y
# Tipos de atributos de los ejemplos.
self.atr_types = atr_types
# Moda de las etiquetas.
self.default = default
self.childs = []
# La i-esima condicion corresponde al i-esimo hijo.
self.cond = []
self.leaf = True
# Etiqueta que recibe el patron al alcanzar esta nodo en caso de ser hoja.
self.value = None
class DecisionTree:
def __init__(self, X, Y, atr_types, atr_name, atr_avail):
# Ejemplos de entrenamiento.
self.X = X
# Etiquetas de los ejemplos.
self.Y = Y
# Tipos de atributos de los ejemplos de entrenamiento. Hay dos tipos:
# "Catg" -> Categorico
# "Cont" -> Continuo
self.atr_types = atr_types
# Nombres de los atributos de los ejemplos de entrenamiento.
self.atr_name = atr_name
# Atributos disponibles
self.atr_avail = atr_avail
def gini(self, *P):
""" Gini impurity."""
return 1 - sum(p**2 for p in P)
def entropy(self, *P):
""" Entropy for measure of randomness."""
r = 0
for p in P:
if p==1: return 0
elif p>0: r -= p*log(p,2)
return r
def mayoria(self, Y):
""" Retorna la moda de un arreglo de elementos unitarios, ejemplo:
[[0], [1], [0]] -> mayoria = 0"""
dic = {}
for y in Y:
if y[0] in dic: dic[y[0]] += 1
else: dic[y[0]] = 1
best = None
max_c = 0
for d in dic:
if dic[d] > max_c:
max_c = dic[d]
best = d
return d
def get_values(self, X, a):
""" Obtenemos los posibles valores de un determinado atributo."""
n = len(X[0])
values = []
for x in X:
if not x[a] in values: values.append(x[a])
return values
def gain_catg(self, a, values, X, Y):
""" Calculamos la ganancia de un atributo categorico en especifico. """
# Calculamos la probabilidad de aparicion de cada etiqueta.
N = len(Y)
dic = {}
for y in Y:
if y[0] in dic: dic[y[0]] += 1/N
else: dic[y[0]] = 1/N
# Calculamos la entropia del nodo actual.
r = self.crit(*[dic[d] for d in dic])
# Calculamos la entropia de cada nodo luego de la division
# y se lo restamos a la entropia del nodo actual.
# Por cada valor del atributo indicado.
for v in values:
# Calculamos la probabilidad de aparicion de cada etiqueta dado
# que el atributo indicado tiene el valor v.
dic = {}
N_i = 0
for i, y in enumerate(Y):
if y[0] in dic and X[i][a]==v:
dic[y[0]] += 1
N_i += 1
elif X[i][a]==v:
dic[y[0]] = 1
N_i += 1
# Calculamos la entropia de una de las divisiones.
r -= N_i*self.crit(*[dic[d]/N_i for d in dic])/N
return r
def gain_cont(self, a, values, X, Y):
""" Calculamos la ganancia de un atributo continuo en especifico. """
# Calculamos la probabilidad de aparicion de cada etiqueta.
N = len(Y)
dic = {}
for y in Y:
if y[0] in dic: dic[y[0]] += 1/N
else: dic[y[0]] = 1/N
# Calculamos la entropia del nodo actual.
r = self.crit(*[dic[d] for d in dic])
# Obtenemos las posibles divisiones binarias
values.sort()
divs = [(values[i]+values[i+1])/2 for i in range(len(values)-1)]
# Elegimos la division con la entropia minima
min_e = inf
best_d = -1
for d in divs:
# Calculamos la probabilidad de aparicion de cada etiqueta dado
# que el atributo es mayor o igual a la division.
dic = {}
N_i = 0
for i, y in enumerate(Y):
if y[0] in dic and X[i][a]>=d:
dic[y[0]] += 1
N_i += 1
elif X[i][a]>=d:
dic[y[0]] = 1
N_i += 1
# Calculamos la entropia de una de las divisiones.
e = N_i*self.crit(*[dic[d]/N_i for d in dic])/N
# Calculamos la probabilidad de aparicion de cada etiqueta dado
# que el atributo es menor a la division.
dic = {}
N_i = 0
for i, y in enumerate(Y):
if y[0] in dic and X[i][a]<d:
dic[y[0]] += 1
N_i += 1
elif X[i][a]<d:
dic[y[0]] = 1
N_i += 1
# Calculamos la entropia de una de las divisiones.
e += N_i*self.crit(*[dic[d]/N_i for d in dic])/N
if e < min_e:
min_e = e
best_d = d
# Retornamos la entropia actual menos la de las divisiones
return r - min_e, best_d
def train(self, splits = -1, criterio="Gini"):
""" Entrenamos el arbol de decisiones segun el numero de divisiones
y el criterio de division."""
if criterio == "Entropy": self.crit = self.entropy
elif criterio == "Gini": self.crit = self.gini
root = Node(None, self.X, self.Y, self.atr_types, self.mayoria(self.Y))
queue = [root]
self.tree = root
atr_avail = self.atr_avail
# Usaremos un BFS en vez de DFS.
while len(queue) > 0:
node = queue.pop(0)
# Si no hay mas ejemplos, tomamos el default del padre.
if len(node.X) == 0: node.value = node.parent.default
# Si todos los ejemplos tienen la misma etiqueta, entonces sesa etiqueta
# sera el valor del nodo.
elif all(node.Y[0] == y for y in node.Y): node.value = node.Y[0][0]
# Si los ejemplos no tienen mas atributos, tomamos la moda de las etiquetas.
elif all(atr == 0 for atr in atr_avail) or splits == 0: node.value = self.mayoria(node.Y)
# Si no, se realizara una division.
else:
node.leaf = False
splits -= 1
# Obtenemos el mejor atributo calculando la ganancia de informacion
# de cada uno de ellos.
best = -1
best_g = -1
div = -1
for a in range(len(node.X[0])):
if atr_avail[a] != 0:
values = self.get_values(node.X, a)
if node.atr_types[a] == "Catg":
g = self.gain_catg(a, values, node.X, node.Y)
if g > best_g:
best_g = g
best = a
else:
g, div = self.gain_cont(a, values, node.X, node.Y)
if g > best_g:
best_g = g
best = a
best_d = div
# Verificamos si el mejor atributo es categorico o continuo.
if node.atr_types[best] == "Catg":
atr_avail[best] = 0
# Particionamos los ejemplos segun cada valor del mejor atributo.
for v in self.get_values(node.X, best):
X_i, Y_i = [], []
for i in range(len(node.X)):
if node.X[i][best] == v:
x = node.X[i].copy()
x[best] = None
X_i.append(x)
Y_i.append(node.Y[i])
# Creamos un nuevo nodo hijo con un bloque de la particion de los ejemplos.
atr_types_i = node.atr_types.copy()
atr_types_i[best] = None
child = Node(node, X_i, Y_i, atr_types_i, self.mayoria(Y_i))
node.childs.append(child)
node.cond.append((best, v))
queue.append(child)
else:
# Particionamos los ejemplos en menor y mayor o igual que la divison obtenida.
X_M, X_m, Y_M, Y_m = [], [], [], []
for i in range(len(node.X)):
x = node.X[i].copy()
if node.X[i][best] < best_d:
X_m.append(x)
Y_m.append(node.Y[i])
else:
X_M.append(x)
Y_M.append(node.Y[i])
# Con esa particion creamos dos nuevos nodos.
atr_types_i = node.atr_types.copy()
child_m = Node(node, X_m, Y_m, atr_types_i, self.mayoria(Y_m))
child_M = Node(node, X_M, Y_M, atr_types_i, self.mayoria(Y_M))
node.childs.append(child_m)
node.childs.append(child_M)
node.cond.append((best, "<", best_d))
node.cond.append((best, ">=", best_d))
queue.append(child_m)
queue.append(child_M)
def predict(self, x):
""" Predecimos la etiqueta de un patron recorriendo el arbol."""
# Partimos de la raiz.
node_i = self.tree
x_i = x.copy()
# Mientras no estemos en una hoja
while not node_i.leaf:
# En caso contrario, verificamos cual condicion del nodo cumple el patron
# y lo enviamos al hijo correspondiente.
cond = False
for i, c in enumerate(node_i.cond):
if len(c) == 2:
if x_i[c[0]] == c[1]:
node_i = node_i.childs[i]
cond = True
break
elif (c[1] == "<" and x_i[c[0]] < c[2]) or \
(c[1] == ">=" and x_i[c[0]] >= c[2]):
node_i = node_i.childs[i]
cond = True
break
if not cond: return node_i.default
return node_i.value
def print_tree(self, node_i = None, level = 0, atr = None):
""" Retornamos una representacion del arbol. """
if node_i == None: node_i = self.tree
if atr == None: atr_i = self.atr_name.copy()
else: atr_i = atr.copy()
if node_i.leaf:
text = " -> " + str(node_i.value)
else:
best = node_i.cond[0][0]
if len(node_i.cond[0]) == 2: text = "\n" + level*"| " + "_ " + atr_i[best]
else: text = "\n" + level*"| " + "_ " + atr_i[best]
for i, c in enumerate(node_i.cond):
text += "\n" + (level+1)*"| " + " * " + str(c[1])
if len(c) == 3: text += str(c[2])
text += self.print_tree(node_i.childs[i], level+1, atr_i)
text += "\n" + (level)*"| " + "|_"
return text
if __name__ == "__main__":
X = [
[5,"Esp"], [9,"Esp"], [0,"Eur"], [3,"Esp"], [8,"Eur"], [7,"Esp"],
[11,"Esp"], [45,"Esp"], [24,"Eur"], [30,"Eur"], [25,"Eur"],
[58,"Esp"], [60,"Esp"], [65,"Eur"], [78,"Esp"], [52,"Eur"],
[40,"Esp"],
]
Y = [[0],[0],[0],[0],[0],[0],[1],[1],[0],[0],[0],[2],[2],[1],[2],[1],[1]]
IA = DecisionTree(X, Y,
["Cont", "Catg"],
["UNIDADES", "DESTINO"],
[1,1]
)
IA.train(5, "Gini")
print(IA.print_tree())
|
python
|
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class ISCSIDataIn(Base):
__slots__ = ()
_SDM_NAME = 'iSCSI_Data_In'
_SDM_ATT_MAP = {
'HeaderOpcode': 'iSCSI_Data_In.header.Opcode-1',
'HeaderFlags': 'iSCSI_Data_In.header.Flags-2',
'HeaderTotalAHSLength': 'iSCSI_Data_In.header.TotalAHSLength-3',
'HeaderUnknown': 'iSCSI_Data_In.header.Unknown-4',
'HeaderDataSegmentLength': 'iSCSI_Data_In.header.DataSegmentLength-5',
'HeaderLUN': 'iSCSI_Data_In.header.LUN-6',
'HeaderInitiatorTaskTag': 'iSCSI_Data_In.header.InitiatorTaskTag-7',
'HeaderTargetTransferTag': 'iSCSI_Data_In.header.TargetTransferTag-8',
'HeaderStatSN': 'iSCSI_Data_In.header.StatSN-9',
'HeaderExpCmdSN': 'iSCSI_Data_In.header.ExpCmdSN-10',
'HeaderMaxCmdSN': 'iSCSI_Data_In.header.MaxCmdSN-11',
'HeaderDataSN': 'iSCSI_Data_In.header.DataSN-12',
'HeaderBufferoffset': 'iSCSI_Data_In.header.Bufferoffset-13',
'HeaderResidualCount': 'iSCSI_Data_In.header.ResidualCount-14',
'HeaderHeaderDigest': 'iSCSI_Data_In.header.HeaderDigest-15',
}
def __init__(self, parent, list_op=False):
super(ISCSIDataIn, self).__init__(parent, list_op)
@property
def HeaderOpcode(self):
"""
Display Name: Opcode
Default Value: 0x25
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderOpcode']))
@property
def HeaderFlags(self):
"""
Display Name: Flags
Default Value: 0x80
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderFlags']))
@property
def HeaderTotalAHSLength(self):
"""
Display Name: TotalAHSLength
Default Value: 0x00
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderTotalAHSLength']))
@property
def HeaderUnknown(self):
"""
Display Name: Unknown
Default Value: 0x0000
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderUnknown']))
@property
def HeaderDataSegmentLength(self):
"""
Display Name: DataSegmentLength
Default Value: 0x000016
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderDataSegmentLength']))
@property
def HeaderLUN(self):
"""
Display Name: LUN
Default Value: 0x0000000000000000
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderLUN']))
@property
def HeaderInitiatorTaskTag(self):
"""
Display Name: InitiatorTaskTag
Default Value: 0x00000010
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderInitiatorTaskTag']))
@property
def HeaderTargetTransferTag(self):
"""
Display Name: TargetTransferTag
Default Value: 0xFFFFFFFF
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderTargetTransferTag']))
@property
def HeaderStatSN(self):
"""
Display Name: StatSN
Default Value: 0x00000000
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderStatSN']))
@property
def HeaderExpCmdSN(self):
"""
Display Name: ExpCmdSN
Default Value: 0x00000010
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderExpCmdSN']))
@property
def HeaderMaxCmdSN(self):
"""
Display Name: MaxCmdSN
Default Value: 0x00000051
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderMaxCmdSN']))
@property
def HeaderDataSN(self):
"""
Display Name: DataSN
Default Value: 0x00000001
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderDataSN']))
@property
def HeaderBufferoffset(self):
"""
Display Name: Bufferoffset
Default Value: 0x00000000
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderBufferoffset']))
@property
def HeaderResidualCount(self):
"""
Display Name: ResidualCount
Default Value: 0x00000000
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderResidualCount']))
@property
def HeaderHeaderDigest(self):
"""
Display Name: HeaderDigest
Default Value: 0x5F7F4CAE
Value Format: hex
"""
from ixnetwork_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderHeaderDigest']))
def add(self):
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
|
python
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is more akin to a .pyl/JSON file, so it's expected to be long.
# pylint: disable=too-many-lines
from gpu_tests import common_browser_args as cba
from gpu_tests import skia_gold_matching_algorithms as algo
CRASH_TYPE_GPU = 'gpu'
# Meant to be used when we know a test is going to be noisy, and we want any
# images it generates to be auto-triaged until we have enough data to calculate
# more suitable/less permissive parameters.
VERY_PERMISSIVE_SOBEL_ALGO = algo.SobelMatchingAlgorithm(
max_different_pixels=100000000,
pixel_delta_threshold=255,
edge_threshold=0,
ignored_border_thickness=1)
class PixelTestPage(object):
"""A wrapper class mimicking the functionality of the PixelTestsStorySet
from the old-style GPU tests.
"""
def __init__( # pylint: disable=too-many-arguments
self,
url,
name,
test_rect,
tolerance=2,
browser_args=None,
expected_colors=None,
gpu_process_disabled=False,
optional_action=None,
restart_browser_after_test=False,
other_args=None,
grace_period_end=None,
expected_per_process_crashes=None,
matching_algorithm=None):
super(PixelTestPage, self).__init__()
self.url = url
self.name = name
self.test_rect = test_rect
# The tolerance when comparing against the reference image.
self.tolerance = tolerance
self.browser_args = browser_args
# The expected colors can be specified as a list of dictionaries,
# in which case these specific pixels will be sampled instead of
# comparing the entire image snapshot. The format is only defined
# by contract with _CompareScreenshotSamples in
# cloud_storage_integration_test_base.py.
self.expected_colors = expected_colors
# Only a couple of tests run with the GPU process completely
# disabled. To prevent regressions, only allow the GPU information
# to be incomplete in these cases.
self.gpu_process_disabled = gpu_process_disabled
# Some of the tests require custom actions to be run. These are
# specified as a string which is the name of a method to call in
# PixelIntegrationTest. For example if the action here is
# "CrashGpuProcess" then it would be defined in a
# "_CrashGpuProcess" method in PixelIntegrationTest.
self.optional_action = optional_action
# Whether the browser should be forcibly restarted after the test
# runs. The browser is always restarted after running tests with
# optional_actions.
self.restart_browser_after_test = restart_browser_after_test
# These are used to pass additional arguments to the test harness.
# VideoPathTraceTest and OverlayModeTest support the following boolean
# arguments: expect_yuy2, zero_copy, video_is_rotated, and no_overlay.
self.other_args = other_args
# This allows a newly added test to be exempted from failures for a
# (hopefully) short period after being added. This is so that any slightly
# different but valid images that get produced by the waterfall bots can
# be triaged without turning the bots red.
# This should be a datetime.date object.
self.grace_period_end = grace_period_end
# This lets the test runner know that one or more crashes are expected as
# part of the test. Should be a map of process type (str) to expected number
# of crashes (int).
self.expected_per_process_crashes = expected_per_process_crashes or {}
# This should be a child of
# skia_gold_matching_algorithms.SkiaGoldMatchingAlgorithm. This specifies
# which matching algorithm Skia Gold should use for the test.
self.matching_algorithm = (matching_algorithm
or algo.ExactMatchingAlgorithm())
def CopyWithNewBrowserArgsAndSuffix(self, browser_args, suffix):
return PixelTestPage(self.url, self.name + suffix, self.test_rect,
self.tolerance, browser_args, self.expected_colors)
def CopyWithNewBrowserArgsAndPrefix(self, browser_args, prefix):
# Assuming the test name is 'Pixel'.
split = self.name.split('_', 1)
return PixelTestPage(self.url, split[0] + '_' + prefix + split[1],
self.test_rect, self.tolerance, browser_args,
self.expected_colors)
def CopyPagesWithNewBrowserArgsAndSuffix(pages, browser_args, suffix):
return [
p.CopyWithNewBrowserArgsAndSuffix(browser_args, suffix) for p in pages
]
def CopyPagesWithNewBrowserArgsAndPrefix(pages, browser_args, prefix):
return [
p.CopyWithNewBrowserArgsAndPrefix(browser_args, prefix) for p in pages
]
# TODO(kbr): consider refactoring this into pixel_integration_test.py.
SCALE_FACTOR_OVERRIDES = {
"comment":
"scale factor overrides",
"scale_factor_overrides": [
{
"device_type": "Nexus 5",
"scale_factor": 1.105
},
{
"device_type": "Nexus 5X",
"scale_factor": 1.105
},
{
"device_type": "Nexus 6",
"scale_factor": 1.47436
},
{
"device_type": "Nexus 6P",
"scale_factor": 1.472
},
{
"device_type": "Nexus 9",
"scale_factor": 1.566
},
{
"comment": "NVIDIA Shield",
"device_type": "sb_na_wf",
"scale_factor": 1.226
},
{
"device_type": "Pixel 2",
"scale_factor": 1.1067
},
]
}
class PixelTestPages(object):
@staticmethod
def DefaultPages(base_name):
sw_compositing_args = [cba.DISABLE_GPU_COMPOSITING]
# The optimizer script spat out pretty similar values for most MP4 tests, so
# combine into a single set of parameters.
general_mp4_algo = algo.SobelMatchingAlgorithm(max_different_pixels=56300,
pixel_delta_threshold=35,
edge_threshold=80)
return [
PixelTestPage('pixel_background_image.html',
base_name + '_BackgroundImage',
test_rect=[20, 20, 370, 370]),
PixelTestPage('pixel_reflected_div.html',
base_name + '_ReflectedDiv',
test_rect=[0, 0, 100, 300]),
PixelTestPage('pixel_canvas2d.html',
base_name + '_Canvas2DRedBox',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_canvas2d_untagged.html',
base_name + '_Canvas2DUntagged',
test_rect=[0, 0, 257, 257]),
PixelTestPage('pixel_css3d.html',
base_name + '_CSS3DBlueBox',
test_rect=[0, 0, 300, 300],
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=0,
pixel_delta_threshold=0,
edge_threshold=100)),
PixelTestPage('pixel_webgl_aa_alpha.html',
base_name + '_WebGLGreenTriangle_AA_Alpha',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_webgl_noaa_alpha.html',
base_name + '_WebGLGreenTriangle_NoAA_Alpha',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_webgl_aa_noalpha.html',
base_name + '_WebGLGreenTriangle_AA_NoAlpha',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_webgl_noaa_noalpha.html',
base_name + '_WebGLGreenTriangle_NoAA_NoAlpha',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_webgl_noalpha_implicit_clear.html',
base_name +
'_WebGLTransparentGreenTriangle_NoAlpha_ImplicitClear',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_webgl_sad_canvas.html',
base_name + '_WebGLSadCanvas',
test_rect=[0, 0, 300, 300],
optional_action='CrashGpuProcess'),
PixelTestPage('pixel_scissor.html',
base_name + '_ScissorTestWithPreserveDrawingBuffer',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_canvas2d_webgl.html',
base_name + '_2DCanvasWebGL',
test_rect=[0, 0, 300, 300]),
PixelTestPage('pixel_background.html',
base_name + '_SolidColorBackground',
test_rect=[500, 500, 100, 100]),
PixelTestPage(
'pixel_video_mp4.html',
base_name + '_Video_MP4',
test_rect=[0, 0, 240, 135],
# Most images are actually very similar, but Pixel 2
# tends to produce images with all colors shifted by a
# small amount.
matching_algorithm=general_mp4_algo),
# Surprisingly stable, does not appear to require inexact matching.
PixelTestPage('pixel_video_mp4.html',
base_name + '_Video_MP4_DXVA',
browser_args=[cba.DISABLE_FEATURES_D3D11_VIDEO_DECODER],
test_rect=[0, 0, 240, 135]),
PixelTestPage('pixel_video_mp4_four_colors_aspect_4x3.html',
base_name + '_Video_MP4_FourColors_Aspect_4x3',
test_rect=[0, 0, 240, 135],
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=41700,
pixel_delta_threshold=15,
edge_threshold=40)),
PixelTestPage('pixel_video_mp4_four_colors_rot_90.html',
base_name + '_Video_MP4_FourColors_Rot_90',
test_rect=[0, 0, 270, 240],
matching_algorithm=general_mp4_algo),
PixelTestPage('pixel_video_mp4_four_colors_rot_180.html',
base_name + '_Video_MP4_FourColors_Rot_180',
test_rect=[0, 0, 240, 135],
matching_algorithm=general_mp4_algo),
PixelTestPage('pixel_video_mp4_four_colors_rot_270.html',
base_name + '_Video_MP4_FourColors_Rot_270',
test_rect=[0, 0, 270, 240],
matching_algorithm=general_mp4_algo),
PixelTestPage('pixel_video_mp4_rounded_corner.html',
base_name + '_Video_MP4_Rounded_Corner',
test_rect=[0, 0, 240, 135],
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=30500,
pixel_delta_threshold=15,
edge_threshold=70)),
PixelTestPage('pixel_video_vp9.html',
base_name + '_Video_VP9',
test_rect=[0, 0, 240, 135],
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=114000,
pixel_delta_threshold=30,
edge_threshold=20)),
PixelTestPage('pixel_video_vp9.html',
base_name + '_Video_VP9_DXVA',
browser_args=[cba.DISABLE_FEATURES_D3D11_VIDEO_DECODER],
test_rect=[0, 0, 240, 135],
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=31100,
pixel_delta_threshold=30,
edge_threshold=250)),
# The MP4 contains H.264 which is primarily hardware decoded on bots.
PixelTestPage(
'pixel_video_context_loss.html?src='
'/media/test/data/four-colors.mp4',
base_name + '_Video_Context_Loss_MP4',
test_rect=[0, 0, 240, 135],
# Optimizer script spat out a value of 255 for the Sobel edge
# threshold, so use fuzzy for now since it's slightly more
# efficient.
matching_algorithm=algo.FuzzyMatchingAlgorithm(
max_different_pixels=31700, pixel_delta_threshold=20),
expected_per_process_crashes={
CRASH_TYPE_GPU: 1,
}),
# The VP9 test clip is primarily software decoded on bots.
PixelTestPage(('pixel_video_context_loss.html'
'?src=/media/test/data/four-colors-vp9.webm'),
base_name + '_Video_Context_Loss_VP9',
test_rect=[0, 0, 240, 135],
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=54400,
pixel_delta_threshold=30,
edge_threshold=250),
expected_per_process_crashes={
CRASH_TYPE_GPU: 1,
}),
PixelTestPage('pixel_video_backdrop_filter.html',
base_name + '_Video_BackdropFilter',
test_rect=[0, 0, 240, 135],
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=1000,
pixel_delta_threshold=20,
edge_threshold=40,
ignored_border_thickness=1)),
PixelTestPage('pixel_webgl_premultiplied_alpha_false.html',
base_name + '_WebGL_PremultipliedAlpha_False',
test_rect=[0, 0, 150, 150]),
PixelTestPage('pixel_webgl2_blitframebuffer_result_displayed.html',
base_name + '_WebGL2_BlitFramebuffer_Result_Displayed',
test_rect=[0, 0, 200, 200]),
PixelTestPage('pixel_webgl2_clearbufferfv_result_displayed.html',
base_name + '_WebGL2_ClearBufferfv_Result_Displayed',
test_rect=[0, 0, 200, 200]),
PixelTestPage('pixel_repeated_webgl_to_2d.html',
base_name + '_RepeatedWebGLTo2D',
test_rect=[0, 0, 256, 256]),
PixelTestPage('pixel_repeated_webgl_to_2d.html',
base_name + '_RepeatedWebGLTo2D_SoftwareCompositing',
test_rect=[0, 0, 256, 256],
browser_args=sw_compositing_args),
PixelTestPage('pixel_canvas2d_tab_switch.html',
base_name + '_Canvas2DTabSwitch',
test_rect=[0, 0, 100, 100],
optional_action='SwitchTabs'),
PixelTestPage('pixel_canvas2d_tab_switch.html',
base_name + '_Canvas2DTabSwitch_SoftwareCompositing',
test_rect=[0, 0, 100, 100],
browser_args=sw_compositing_args,
optional_action='SwitchTabs'),
PixelTestPage('pixel_webgl_copy_image.html',
base_name + '_WebGLCopyImage',
test_rect=[0, 0, 200, 100]),
PixelTestPage('pixel_webgl_read_pixels_tab_switch.html',
base_name + '_WebGLReadPixelsTabSwitch',
test_rect=[0, 0, 100, 100],
optional_action='SwitchTabs'),
PixelTestPage('pixel_webgl_read_pixels_tab_switch.html',
base_name +
'_WebGLReadPixelsTabSwitch_SoftwareCompositing',
test_rect=[0, 0, 100, 100],
browser_args=sw_compositing_args,
optional_action='SwitchTabs'),
PixelTestPage('pixel_offscreen_canvas_ibrc_webgl_main.html',
base_name + '_OffscreenCanvasIBRCWebGLMain',
test_rect=[0, 0, 300, 300],
optional_action='RunOffscreenCanvasIBRCWebGLTest'),
PixelTestPage('pixel_offscreen_canvas_ibrc_webgl_worker.html',
base_name + '_OffscreenCanvasIBRCWebGLWorker',
test_rect=[0, 0, 300, 300],
optional_action='RunOffscreenCanvasIBRCWebGLTest'),
]
# Pages that should be run with GPU rasterization enabled.
@staticmethod
def GpuRasterizationPages(base_name):
browser_args = [
cba.FORCE_GPU_RASTERIZATION,
cba.DISABLE_SOFTWARE_COMPOSITING_FALLBACK,
]
return [
PixelTestPage('pixel_background.html',
base_name + '_GpuRasterization_BlueBox',
test_rect=[0, 0, 220, 220],
browser_args=browser_args),
PixelTestPage('concave_paths.html',
base_name + '_GpuRasterization_ConcavePaths',
test_rect=[0, 0, 100, 100],
browser_args=browser_args),
PixelTestPage('pixel_precision_rounded_corner.html',
base_name + '_PrecisionRoundedCorner',
test_rect=[0, 0, 400, 400],
browser_args=browser_args,
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=10,
pixel_delta_threshold=30,
edge_threshold=100)),
]
# Pages that should be run with off-thread paint worklet flags.
@staticmethod
def PaintWorkletPages(base_name):
browser_args = [
'--enable-blink-features=OffMainThreadCSSPaint',
'--enable-gpu-rasterization', '--enable-oop-rasterization'
]
return [
PixelTestPage(
'pixel_paintWorklet_transform.html',
base_name + '_PaintWorkletTransform',
test_rect=[0, 0, 200, 200],
browser_args=browser_args),
]
# Pages that should be run with experimental canvas features.
@staticmethod
def ExperimentalCanvasFeaturesPages(base_name):
browser_args = [
cba.ENABLE_EXPERIMENTAL_WEB_PLATFORM_FEATURES,
]
accelerated_args = [
cba.DISABLE_SOFTWARE_COMPOSITING_FALLBACK,
]
unaccelerated_args = [
cba.DISABLE_ACCELERATED_2D_CANVAS,
cba.DISABLE_GPU_COMPOSITING,
]
return [
PixelTestPage('pixel_offscreenCanvas_transfer_after_style_resize.html',
base_name + '_OffscreenCanvasTransferAfterStyleResize',
test_rect=[0, 0, 350, 350],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_transfer_before_style_resize.html',
base_name + '_OffscreenCanvasTransferBeforeStyleResize',
test_rect=[0, 0, 350, 350],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_webgl_paint_after_resize.html',
base_name + '_OffscreenCanvasWebGLPaintAfterResize',
test_rect=[0, 0, 200, 200],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_transferToImageBitmap_main.html',
base_name + '_OffscreenCanvasTransferToImageBitmap',
test_rect=[0, 0, 300, 300],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_transferToImageBitmap_worker.html',
base_name + '_OffscreenCanvasTransferToImageBitmapWorker',
test_rect=[0, 0, 300, 300],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_webgl_commit_main.html',
base_name + '_OffscreenCanvasWebGLDefault',
test_rect=[0, 0, 360, 200],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_webgl_commit_worker.html',
base_name + '_OffscreenCanvasWebGLDefaultWorker',
test_rect=[0, 0, 360, 200],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_webgl_commit_main.html',
base_name + '_OffscreenCanvasWebGLSoftwareCompositing',
test_rect=[0, 0, 360, 200],
browser_args=browser_args +
[cba.DISABLE_GPU_COMPOSITING]),
PixelTestPage(
'pixel_offscreenCanvas_webgl_commit_worker.html',
base_name + '_OffscreenCanvasWebGLSoftwareCompositingWorker',
test_rect=[0, 0, 360, 200],
browser_args=browser_args + [cba.DISABLE_GPU_COMPOSITING]),
PixelTestPage('pixel_offscreenCanvas_2d_commit_main.html',
base_name + '_OffscreenCanvasAccelerated2D',
test_rect=[0, 0, 360, 200],
browser_args=browser_args + accelerated_args),
PixelTestPage('pixel_offscreenCanvas_2d_commit_worker.html',
base_name + '_OffscreenCanvasAccelerated2DWorker',
test_rect=[0, 0, 360, 200],
browser_args=browser_args + accelerated_args),
PixelTestPage('pixel_offscreenCanvas_2d_commit_main.html',
base_name + '_OffscreenCanvasUnaccelerated2D',
test_rect=[0, 0, 360, 200],
browser_args=browser_args + unaccelerated_args),
PixelTestPage('pixel_offscreenCanvas_2d_commit_worker.html',
base_name + '_OffscreenCanvasUnaccelerated2DWorker',
test_rect=[0, 0, 360, 200],
browser_args=browser_args + unaccelerated_args),
PixelTestPage(
'pixel_offscreenCanvas_2d_commit_main.html',
base_name + '_OffscreenCanvasUnaccelerated2DGPUCompositing',
test_rect=[0, 0, 360, 200],
browser_args=browser_args + [cba.DISABLE_ACCELERATED_2D_CANVAS]),
PixelTestPage(
'pixel_offscreenCanvas_2d_commit_worker.html',
base_name + '_OffscreenCanvasUnaccelerated2DGPUCompositingWorker',
test_rect=[0, 0, 360, 200],
browser_args=browser_args + [cba.DISABLE_ACCELERATED_2D_CANVAS]),
PixelTestPage('pixel_offscreenCanvas_2d_resize_on_worker.html',
base_name + '_OffscreenCanvas2DResizeOnWorker',
test_rect=[0, 0, 200, 200],
browser_args=browser_args),
PixelTestPage('pixel_offscreenCanvas_webgl_resize_on_worker.html',
base_name + '_OffscreenCanvasWebglResizeOnWorker',
test_rect=[0, 0, 200, 200],
browser_args=browser_args),
PixelTestPage('pixel_canvas_display_srgb.html',
base_name + '_CanvasDisplaySRGBAccelerated2D',
test_rect=[0, 0, 140, 140],
browser_args=browser_args + accelerated_args),
PixelTestPage('pixel_canvas_display_srgb.html',
base_name + '_CanvasDisplaySRGBUnaccelerated2D',
test_rect=[0, 0, 140, 140],
browser_args=browser_args + unaccelerated_args),
PixelTestPage(
'pixel_canvas_display_srgb.html',
base_name + '_CanvasDisplaySRGBUnaccelerated2DGPUCompositing',
test_rect=[0, 0, 140, 140],
browser_args=browser_args + [cba.DISABLE_ACCELERATED_2D_CANVAS]),
]
@staticmethod
def LowLatencyPages(base_name):
unaccelerated_args = [
cba.DISABLE_ACCELERATED_2D_CANVAS,
cba.DISABLE_GPU_COMPOSITING,
]
return [
PixelTestPage('pixel_canvas_low_latency_2d.html',
base_name + '_CanvasLowLatency2D',
test_rect=[0, 0, 100, 100]),
PixelTestPage('pixel_canvas_low_latency_2d.html',
base_name + '_CanvasUnacceleratedLowLatency2D',
test_rect=[0, 0, 100, 100],
browser_args=unaccelerated_args),
PixelTestPage('pixel_canvas_low_latency_webgl.html',
base_name + '_CanvasLowLatencyWebGL',
test_rect=[0, 0, 200, 200]),
PixelTestPage('pixel_canvas_low_latency_webgl_alpha_false.html',
base_name + '_CanvasLowLatencyWebGLAlphaFalse',
test_rect=[0, 0, 200, 200]),
PixelTestPage('pixel_canvas_low_latency_2d_draw_image.html',
base_name + '_CanvasLowLatency2DDrawImage',
test_rect=[0, 0, 200, 100]),
PixelTestPage('pixel_canvas_low_latency_webgl_draw_image.html',
base_name + '_CanvasLowLatencyWebGLDrawImage',
test_rect=[0, 0, 200, 100]),
PixelTestPage('pixel_canvas_low_latency_2d_image_data.html',
base_name + '_CanvasLowLatency2DImageData',
test_rect=[0, 0, 200, 100]),
]
# Only add these tests on platforms where SwiftShader is enabled.
# Currently this is Windows and Linux.
@staticmethod
def SwiftShaderPages(base_name):
browser_args = [cba.DISABLE_GPU]
suffix = "_SwiftShader"
return [
PixelTestPage('pixel_canvas2d.html',
base_name + '_Canvas2DRedBox' + suffix,
test_rect=[0, 0, 300, 300],
browser_args=browser_args),
PixelTestPage('pixel_css3d.html',
base_name + '_CSS3DBlueBox' + suffix,
test_rect=[0, 0, 300, 300],
browser_args=browser_args),
PixelTestPage('pixel_webgl_aa_alpha.html',
base_name + '_WebGLGreenTriangle_AA_Alpha' + suffix,
test_rect=[0, 0, 300, 300],
browser_args=browser_args),
PixelTestPage('pixel_repeated_webgl_to_2d.html',
base_name + '_RepeatedWebGLTo2D' + suffix,
test_rect=[0, 0, 256, 256],
browser_args=browser_args),
]
# Test rendering where GPU process is blocked.
@staticmethod
def NoGpuProcessPages(base_name):
browser_args = [cba.DISABLE_GPU, cba.DISABLE_SOFTWARE_RASTERIZER]
suffix = "_NoGpuProcess"
return [
PixelTestPage(
'pixel_canvas2d.html',
base_name + '_Canvas2DRedBox' + suffix,
test_rect=[0, 0, 300, 300],
browser_args=browser_args,
gpu_process_disabled=True),
PixelTestPage(
'pixel_css3d.html',
base_name + '_CSS3DBlueBox' + suffix,
test_rect=[0, 0, 300, 300],
browser_args=browser_args,
gpu_process_disabled=True),
]
# Pages that should be run with various macOS specific command line
# arguments.
@staticmethod
def MacSpecificPages(base_name):
iosurface_2d_canvas_args = ['--enable-accelerated-2d-canvas']
non_chromium_image_args = ['--disable-webgl-image-chromium']
# This disables the Core Animation compositor, falling back to the
# old GLRenderer path, but continuing to allocate IOSurfaces for
# WebGL's back buffer.
no_overlays_args = ['--disable-mac-overlays']
# The filter effect tests produce images with lots of gradients and blurs
# which don't play nicely with Sobel filters, so a fuzzy algorithm instead
# of Sobel. The images are also relatively large (360k pixels), and large
# portions of the image are prone to noise, hence the large max different
# pixels value.
filter_effect_fuzzy_algo = algo.FuzzyMatchingAlgorithm(
max_different_pixels=57500, pixel_delta_threshold=10)
return [
# On macOS, test the IOSurface 2D Canvas compositing path.
PixelTestPage('pixel_canvas2d_accelerated.html',
base_name + '_IOSurface2DCanvas',
test_rect=[0, 0, 400, 400],
browser_args=iosurface_2d_canvas_args),
PixelTestPage('pixel_canvas2d_webgl.html',
base_name + '_IOSurface2DCanvasWebGL',
test_rect=[0, 0, 300, 300],
browser_args=iosurface_2d_canvas_args),
# On macOS, test WebGL non-Chromium Image compositing path.
PixelTestPage('pixel_webgl_aa_alpha.html',
base_name +
'_WebGLGreenTriangle_NonChromiumImage_AA_Alpha',
test_rect=[0, 0, 300, 300],
browser_args=non_chromium_image_args),
PixelTestPage('pixel_webgl_noaa_alpha.html',
base_name +
'_WebGLGreenTriangle_NonChromiumImage_NoAA_Alpha',
test_rect=[0, 0, 300, 300],
browser_args=non_chromium_image_args),
PixelTestPage('pixel_webgl_aa_noalpha.html',
base_name +
'_WebGLGreenTriangle_NonChromiumImage_AA_NoAlpha',
test_rect=[0, 0, 300, 300],
browser_args=non_chromium_image_args),
PixelTestPage('pixel_webgl_noaa_noalpha.html',
base_name +
'_WebGLGreenTriangle_NonChromiumImage_NoAA_NoAlpha',
test_rect=[0, 0, 300, 300],
browser_args=non_chromium_image_args),
# On macOS, test CSS filter effects with and without the CA compositor.
PixelTestPage('filter_effects.html',
base_name + '_CSSFilterEffects',
test_rect=[0, 0, 300, 300],
matching_algorithm=filter_effect_fuzzy_algo),
PixelTestPage('filter_effects.html',
base_name + '_CSSFilterEffects_NoOverlays',
test_rect=[0, 0, 300, 300],
tolerance=10,
browser_args=no_overlays_args,
matching_algorithm=filter_effect_fuzzy_algo),
# Test WebGL's premultipliedAlpha:false without the CA compositor.
PixelTestPage('pixel_webgl_premultiplied_alpha_false.html',
base_name + '_WebGL_PremultipliedAlpha_False_NoOverlays',
test_rect=[0, 0, 150, 150],
browser_args=no_overlays_args),
]
# Pages that should be run only on dual-GPU MacBook Pros (at the
# present time, anyway).
@staticmethod
def DualGPUMacSpecificPages(base_name):
return [
PixelTestPage('pixel_webgl_high_to_low_power.html',
base_name + '_WebGLHighToLowPower',
test_rect=[0, 0, 300, 300],
optional_action='RunTestWithHighPerformanceTab'),
PixelTestPage('pixel_webgl_low_to_high_power.html',
base_name + '_WebGLLowToHighPower',
test_rect=[0, 0, 300, 300],
optional_action='RunLowToHighPowerTest'),
PixelTestPage('pixel_webgl_low_to_high_power_alpha_false.html',
base_name + '_WebGLLowToHighPowerAlphaFalse',
test_rect=[0, 0, 300, 300],
optional_action='RunLowToHighPowerTest'),
PixelTestPage(
'pixel_offscreen_canvas_ibrc_webgl_main.html',
base_name + '_OffscreenCanvasIBRCWebGLHighPerfMain',
test_rect=[0, 0, 300, 300],
optional_action='RunOffscreenCanvasIBRCWebGLHighPerfTest'),
PixelTestPage(
'pixel_offscreen_canvas_ibrc_webgl_worker.html',
base_name + '_OffscreenCanvasIBRCWebGLHighPerfWorker',
test_rect=[0, 0, 300, 300],
optional_action='RunOffscreenCanvasIBRCWebGLHighPerfTest'),
]
@staticmethod
def DirectCompositionPages(base_name):
browser_args = [
'--enable-direct-composition-video-overlays',
# All bots are connected with a power source, however, we want to to
# test with the code path that's enabled with battery power.
cba.DISABLE_VP_SCALING,
]
browser_args_YUY2 = browser_args + [
'--disable-features=DirectCompositionPreferNV12Overlays'
]
browser_args_DXVA = browser_args + [
cba.DISABLE_FEATURES_D3D11_VIDEO_DECODER
]
# Most tests fall roughly into 3 tiers of noisiness.
# Parameter values were determined using the automated optimization script,
# and similar values combined into a single set using the most permissive
# value for each parameter in that tier.
strict_dc_sobel_algorithm = algo.SobelMatchingAlgorithm(
max_different_pixels=1000,
pixel_delta_threshold=5,
edge_threshold=250,
ignored_border_thickness=1)
permissive_dc_sobel_algorithm = algo.SobelMatchingAlgorithm(
max_different_pixels=16800,
pixel_delta_threshold=20,
edge_threshold=30,
ignored_border_thickness=1)
very_permissive_dc_sobel_algorithm = algo.SobelMatchingAlgorithm(
max_different_pixels=30400,
pixel_delta_threshold=45,
edge_threshold=10,
ignored_border_thickness=1,
)
return [
PixelTestPage('pixel_video_mp4.html',
base_name + '_DirectComposition_Video_MP4',
test_rect=[0, 0, 240, 135],
browser_args=browser_args,
matching_algorithm=permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4.html',
base_name + '_DirectComposition_Video_MP4_DXVA',
browser_args=browser_args_DXVA,
test_rect=[0, 0, 240, 135],
matching_algorithm=permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4_fullsize.html',
base_name + '_DirectComposition_Video_MP4_Fullsize',
browser_args=browser_args,
test_rect=[0, 0, 960, 540],
other_args={'zero_copy': True},
matching_algorithm=strict_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4.html',
base_name + '_DirectComposition_Video_MP4_YUY2',
test_rect=[0, 0, 240, 135],
browser_args=browser_args_YUY2,
other_args={'expect_yuy2': True},
matching_algorithm=permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4_four_colors_aspect_4x3.html',
base_name +
'_DirectComposition_Video_MP4_FourColors_Aspect_4x3',
test_rect=[0, 0, 240, 135],
browser_args=browser_args,
matching_algorithm=permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4_four_colors_rot_90.html',
base_name +
'_DirectComposition_Video_MP4_FourColors_Rot_90',
test_rect=[0, 0, 270, 240],
browser_args=browser_args,
other_args={'video_is_rotated': True},
matching_algorithm=strict_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4_four_colors_rot_180.html',
base_name +
'_DirectComposition_Video_MP4_FourColors_Rot_180',
test_rect=[0, 0, 240, 135],
browser_args=browser_args,
other_args={'video_is_rotated': True},
matching_algorithm=strict_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4_four_colors_rot_270.html',
base_name +
'_DirectComposition_Video_MP4_FourColors_Rot_270',
test_rect=[0, 0, 270, 240],
browser_args=browser_args,
other_args={'video_is_rotated': True},
matching_algorithm=strict_dc_sobel_algorithm),
PixelTestPage('pixel_video_vp9.html',
base_name + '_DirectComposition_Video_VP9',
test_rect=[0, 0, 240, 135],
browser_args=browser_args,
matching_algorithm=very_permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_vp9.html',
base_name + '_DirectComposition_Video_VP9_DXVA',
browser_args=browser_args_DXVA,
test_rect=[0, 0, 240, 135],
matching_algorithm=very_permissive_dc_sobel_algorithm),
PixelTestPage(
'pixel_video_vp9_fullsize.html',
base_name + '_DirectComposition_Video_VP9_Fullsize',
test_rect=[0, 0, 960, 540],
browser_args=browser_args,
other_args={'zero_copy': True},
# Much larger image than other VP9 tests.
matching_algorithm=algo.SobelMatchingAlgorithm(
max_different_pixels=504000,
pixel_delta_threshold=10,
edge_threshold=10,
ignored_border_thickness=1,
)),
PixelTestPage('pixel_video_vp9.html',
base_name + '_DirectComposition_Video_VP9_YUY2',
test_rect=[0, 0, 240, 135],
browser_args=browser_args_YUY2,
other_args={'expect_yuy2': True},
matching_algorithm=very_permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_vp9_i420a.html',
base_name + '_DirectComposition_Video_VP9_I420A',
test_rect=[0, 0, 240, 135],
browser_args=browser_args,
other_args={'no_overlay': True},
matching_algorithm=strict_dc_sobel_algorithm),
PixelTestPage('pixel_video_underlay.html',
base_name + '_DirectComposition_Underlay',
test_rect=[0, 0, 240, 136],
browser_args=browser_args,
matching_algorithm=permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_underlay.html',
base_name + '_DirectComposition_Underlay_DXVA',
test_rect=[0, 0, 240, 136],
browser_args=browser_args_DXVA,
matching_algorithm=permissive_dc_sobel_algorithm),
PixelTestPage('pixel_video_underlay_fullsize.html',
base_name + '_DirectComposition_Underlay_Fullsize',
test_rect=[0, 0, 960, 540],
browser_args=browser_args,
other_args={'zero_copy': True},
matching_algorithm=strict_dc_sobel_algorithm),
PixelTestPage('pixel_video_mp4_rounded_corner.html',
base_name + '_DirectComposition_Video_MP4_Rounded_Corner',
test_rect=[0, 0, 240, 135],
browser_args=browser_args,
other_args={'no_overlay': True}),
PixelTestPage('pixel_video_backdrop_filter.html',
base_name + '_DirectComposition_Video_BackdropFilter',
test_rect=[0, 0, 240, 135],
browser_args=browser_args,
other_args={'no_overlay': True}),
PixelTestPage(
'pixel_video_mp4.html',
base_name + '_DirectComposition_Video_Disable_Overlays',
test_rect=[0, 0, 240, 135],
browser_args=['--disable-direct-composition-video-overlays'],
other_args={'no_overlay': True},
matching_algorithm=very_permissive_dc_sobel_algorithm),
]
@staticmethod
def HdrTestPages(base_name):
return [
PixelTestPage(
'pixel_canvas2d.html',
base_name + '_Canvas2DRedBoxScrgbLinear',
test_rect=[0, 0, 300, 300],
browser_args=['--force-color-profile=scrgb-linear']),
PixelTestPage(
'pixel_canvas2d.html',
base_name + '_Canvas2DRedBoxHdr10',
test_rect=[0, 0, 300, 300],
browser_args=['--force-color-profile=hdr10']),
]
|
python
|
import plotly.plotly as py
import plotly.graph_objs as go
import pandas as pd
import numpy as np
import math
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
#transform to [[x,y,z]...] [[x...],[y...],[z...]]
#plot
def plot_sample_list(sample_list,lim_val=10):
sample_mat = np.array(sample_list)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(sample_mat[:,0], sample_mat[:,1], sample_mat[:,2], c='r', marker='o')
ax.set_xlim(-lim_val, lim_val)
ax.set_ylim(-lim_val, lim_val)
ax.set_zlim(-lim_val, lim_val)
plt.show()
Axes3D.plot()
|
python
|
### ---------------------------------------------------------------------------
from .core.startup import initialize
from .sublime import commands, events
__version_tuple = (1, 0, 0)
__version__ = ".".join([str(num) for num in __version_tuple])
### ---------------------------------------------------------------------------
__all__ = [
"initialize",
"commands",
"events",
"version"
]
### ---------------------------------------------------------------------------
def version():
"""
Get the version of the installed dependency package as a tuple. This is
used during the bootstrap check to see if the version of the dependency has
changed.
"""
return __version_tuple
### ---------------------------------------------------------------------------
|
python
|
# coding:utf-8
import redis
SCRIPT_PUSH = '''
local q = KEYS[1]
local q_set = KEYS[1] .. "_set"
local v = redis.call("SADD", q_set, ARGV[1])
if v == 1
then
return redis.call("RPUSH", q, ARGV[1]) and 1
else
return 0
end
'''
SCRIPT_POP = '''
local q = KEYS[1]
local q_set = KEYS[1] .. "_set"
local v = redis.call("LPOP", q)
if v then
redis.call("SREM", q_set, v)
end
return v
'''
r = redis.Redis()
queue_push = r.register_script(SCRIPT_PUSH)
queue_pop = r.register_script(SCRIPT_POP)
if __name__ == "__main__":
mq_key = "ct:kafka:mq"
count = 10
for i in range(count):
for i in range(count):
queue_push(keys=[mq_key], args=['Test_key_{}'.format(i)])
for i in range(count + 10):
print queue_pop(keys=[mq_key])
|
python
|
import os
import datetime
os.system("powercfg /batteryreport")
d = datetime.datetime.now()
try:
os.rename('battery-report.html', f'battery-health/reports/battery-report-{d.month}-{d.day}-{d.year}.html')
os.startfile(f'battery-health\\reports\\battery-report-{d.month}-{d.day}-{d.year}.html')
except WindowsError:
print(' > Data already collected today')
files = os.listdir('battery-health/reports')
for i in range(len(files)):
f = open(f'battery-health/reports/{files[i]}', 'r')
lines = f.readlines()
for line in lines:
if 'FULL CHARGE CAPACITY' in line and 'mWh' in line:
l = line.split('<td>')
l = l[2].replace('\n', '')
name = files[i].split('-')
files[i] = f'{name[2]}/{name[3]}/{name[4][:4]}: {l}\n'
log = open(f'battery-health/log.txt', 'w')
log.writelines(files)
log.close()
print(' > Done')
|
python
|
import aspose.words as aw
from docs_examples_base import DocsExamplesBase, MY_DIR, ARTIFACTS_DIR
class WorkingWithPclSaveOptions(DocsExamplesBase):
def test_rasterize_transformed_elements(self):
#ExStart:RasterizeTransformedElements
doc = aw.Document(MY_DIR + "Rendering.docx")
save_options = aw.saving.PclSaveOptions()
save_options.save_format = aw.SaveFormat.PCL
save_options.rasterize_transformed_elements = False
doc.save(ARTIFACTS_DIR + "WorkingWithPclSaveOptions.rasterize_transformed_elements.pcl", save_options)
#ExEnd:RasterizeTransformedElements
|
python
|
# MINLP written by GAMS Convert at 04/21/18 13:54:00
#
# Equation counts
# Total E G L N X C B
# 750 619 34 97 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 785 725 60 0 0 0 0 0
# FX 10 10 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 2445 2433 12 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x2 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0.1,39.6),initialize=1)
m.x14 = Var(within=Reals,bounds=(0.1,39.6),initialize=1)
m.x15 = Var(within=Reals,bounds=(0.1,39.6),initialize=1)
m.x16 = Var(within=Reals,bounds=(0.1,39.6),initialize=1)
m.x17 = Var(within=Reals,bounds=(0.1,39.6),initialize=1)
m.x18 = Var(within=Reals,bounds=(0.1,39.6),initialize=1)
m.x19 = Var(within=Reals,bounds=(0.1,49.5),initialize=1)
m.x20 = Var(within=Reals,bounds=(0.1,49.5),initialize=1)
m.x21 = Var(within=Reals,bounds=(0.1,49.5),initialize=1)
m.x22 = Var(within=Reals,bounds=(0.1,49.5),initialize=1)
m.x23 = Var(within=Reals,bounds=(0.1,49.5),initialize=1)
m.x24 = Var(within=Reals,bounds=(0.1,49.5),initialize=1)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,85),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,35),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,65),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,60),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x547 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x548 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x549 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x550 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x551 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x552 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x553 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x554 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x555 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x556 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x557 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x558 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x559 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x560 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x561 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x562 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x563 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x564 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x565 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x566 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x567 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x568 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x569 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x570 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x571 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x572 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x573 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x574 = Var(within=Reals,bounds=(0,0),initialize=0)
m.x575 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x576 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x577 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x578 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x579 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x580 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x581 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x582 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x583 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x584 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x585 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x586 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x587 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x588 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x589 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x590 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x591 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x592 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x593 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x594 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x595 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x596 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x597 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x598 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x599 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x600 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x601 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x602 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x603 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x604 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x605 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x606 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x607 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x608 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x609 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x610 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x611 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x612 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x613 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x614 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x615 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x616 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x617 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x618 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x619 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x620 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x621 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x622 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x623 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x624 = Var(within=Reals,bounds=(0,6),initialize=0)
m.x626 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x627 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x628 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x629 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x630 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x631 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x632 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x633 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x634 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x635 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x636 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x637 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x638 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x639 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x640 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x641 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x642 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x643 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x644 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x645 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x646 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x647 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x648 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x649 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x650 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x651 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x652 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x653 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x654 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x655 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x656 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x657 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x658 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x659 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x660 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x661 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x662 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x663 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x664 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x665 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x666 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x667 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x668 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x669 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x670 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x671 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x672 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x673 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x674 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x675 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x676 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x677 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x678 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x679 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x680 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x681 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x682 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x683 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x684 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x685 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x686 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x687 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x688 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x689 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x690 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x691 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x692 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x693 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x694 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x695 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x696 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x697 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x698 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x699 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x700 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x701 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x702 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x703 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x704 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x705 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x706 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x707 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x708 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x709 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x710 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x711 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x712 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x713 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x714 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x715 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x716 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x717 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x718 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x719 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x720 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x721 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x722 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x723 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x724 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x725 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b726 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b727 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b728 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b729 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b730 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b731 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b732 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b733 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b734 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b735 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b736 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b737 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b738 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b739 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b740 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b741 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b742 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b743 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b744 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b745 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b746 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b747 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b748 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b749 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b750 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b751 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b752 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b753 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b754 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b755 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b756 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b757 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b758 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b759 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b760 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b761 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b762 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b763 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b764 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b765 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b766 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b767 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b768 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b769 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b770 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b771 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b772 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b773 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b774 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b775 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b776 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b777 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b778 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b779 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b780 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b781 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b782 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b783 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b784 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b785 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr=0.1*((2.85714285714286 + 1.42857142857143*log((40 - m.x13)/m.x13))*m.x13 - 0.17779*m.x13 + (
2.85714285714286 + 1.42857142857143*log((40 - m.x14)/m.x14))*m.x14 - 0.17779*m.x14 + (
2.85714285714286 + 1.42857142857143*log((40 - m.x15)/m.x15))*m.x15 - 0.17779*m.x15 + (
2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 - 0.17779*m.x16 + (
2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 - 0.17779*m.x17 + (
2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 - 0.17779*m.x18 + (3 + log((50
- m.x19)/m.x19))*m.x19 - 0.1063176*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 - 0.1063176*m.x20
+ (3 + log((50 - m.x21)/m.x21))*m.x21 - 0.1063176*m.x21 + (3 + log((50 - m.x22)/m.x22))*m.x22 -
0.1063176*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 - 0.1063176*m.x23 + (3 + log((50 - m.x24)/
m.x24))*m.x24 - 0.1063176*m.x24) + 0.1*((2.85714285714286 + 1.42857142857143*log((40 - m.x13)/
m.x13))*m.x13 - 0.1699941*m.x13 + (2.85714285714286 + 1.42857142857143*log((40 - m.x14)/m.x14))*
m.x14 - 0.1699941*m.x14 + (2.85714285714286 + 1.42857142857143*log((40 - m.x15)/m.x15))*m.x15 -
0.1699941*m.x15 + (2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 - 0.1699941
*m.x16 + (2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 - 0.1699941*m.x17 +
(2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 - 0.1699941*m.x18 + (3 + log(
(50 - m.x19)/m.x19))*m.x19 + 0.510392*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 + 0.510392*
m.x20 + (3 + log((50 - m.x21)/m.x21))*m.x21 + 0.510392*m.x21 + (3 + log((50 - m.x22)/m.x22))*
m.x22 + 0.510392*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 + 0.510392*m.x23 + (3 + log((50 -
m.x24)/m.x24))*m.x24 + 0.510392*m.x24) + 0.1*((2.85714285714286 + 1.42857142857143*log((40 -
m.x13)/m.x13))*m.x13 - 1.3723019*m.x13 + (2.85714285714286 + 1.42857142857143*log((40 - m.x14)/
m.x14))*m.x14 - 1.3723019*m.x14 + (2.85714285714286 + 1.42857142857143*log((40 - m.x15)/m.x15))*
m.x15 - 1.3723019*m.x15 + (2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 -
1.3723019*m.x16 + (2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 - 1.3723019
*m.x17 + (2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 - 1.3723019*m.x18 +
(3 + log((50 - m.x19)/m.x19))*m.x19 + 3.411928*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 +
3.411928*m.x20 + (3 + log((50 - m.x21)/m.x21))*m.x21 + 3.411928*m.x21 + (3 + log((50 - m.x22)/
m.x22))*m.x22 + 3.411928*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 + 3.411928*m.x23 + (3 + log(
(50 - m.x24)/m.x24))*m.x24 + 3.411928*m.x24) + 0.1*((2.85714285714286 + 1.42857142857143*log((40
- m.x13)/m.x13))*m.x13 - 0.1737872*m.x13 + (2.85714285714286 + 1.42857142857143*log((40 - m.x14)
/m.x14))*m.x14 - 0.1737872*m.x14 + (2.85714285714286 + 1.42857142857143*log((40 - m.x15)/m.x15))*
m.x15 - 0.1737872*m.x15 + (2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 -
0.1737872*m.x16 + (2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 - 0.1737872
*m.x17 + (2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 - 0.1737872*m.x18 +
(3 + log((50 - m.x19)/m.x19))*m.x19 + 2.0030265*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 +
2.0030265*m.x20 + (3 + log((50 - m.x21)/m.x21))*m.x21 + 2.0030265*m.x21 + (3 + log((50 - m.x22)/
m.x22))*m.x22 + 2.0030265*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 + 2.0030265*m.x23 + (3 +
log((50 - m.x24)/m.x24))*m.x24 + 2.0030265*m.x24) + 0.1*((2.85714285714286 + 1.42857142857143*
log((40 - m.x13)/m.x13))*m.x13 + 0.8502323*m.x13 + (2.85714285714286 + 1.42857142857143*log((40
- m.x14)/m.x14))*m.x14 + 0.8502323*m.x14 + (2.85714285714286 + 1.42857142857143*log((40 - m.x15)
/m.x15))*m.x15 + 0.8502323*m.x15 + (2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*
m.x16 + 0.8502323*m.x16 + (2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 +
0.8502323*m.x17 + (2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 + 0.8502323
*m.x18 + (3 + log((50 - m.x19)/m.x19))*m.x19 - 0.9911669*m.x19 + (3 + log((50 - m.x20)/m.x20))*
m.x20 - 0.9911669*m.x20 + (3 + log((50 - m.x21)/m.x21))*m.x21 - 0.9911669*m.x21 + (3 + log((50 -
m.x22)/m.x22))*m.x22 - 0.9911669*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 - 0.9911669*m.x23 +
(3 + log((50 - m.x24)/m.x24))*m.x24 - 0.9911669*m.x24) + 0.1*((2.85714285714286 +
1.42857142857143*log((40 - m.x13)/m.x13))*m.x13 + 0.6976087*m.x13 + (2.85714285714286 +
1.42857142857143*log((40 - m.x14)/m.x14))*m.x14 + 0.6976087*m.x14 + (2.85714285714286 +
1.42857142857143*log((40 - m.x15)/m.x15))*m.x15 + 0.6976087*m.x15 + (2.85714285714286 +
1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 + 0.6976087*m.x16 + (2.85714285714286 +
1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 + 0.6976087*m.x17 + (2.85714285714286 +
1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 + 0.6976087*m.x18 + (3 + log((50 - m.x19)/m.x19))
*m.x19 + 0.7111006*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 + 0.7111006*m.x20 + (3 + log((50
- m.x21)/m.x21))*m.x21 + 0.7111006*m.x21 + (3 + log((50 - m.x22)/m.x22))*m.x22 + 0.7111006*m.x22
+ (3 + log((50 - m.x23)/m.x23))*m.x23 + 0.7111006*m.x23 + (3 + log((50 - m.x24)/m.x24))*m.x24 +
0.7111006*m.x24) + 0.1*((2.85714285714286 + 1.42857142857143*log((40 - m.x13)/m.x13))*m.x13 +
0.5499974*m.x13 + (2.85714285714286 + 1.42857142857143*log((40 - m.x14)/m.x14))*m.x14 + 0.5499974
*m.x14 + (2.85714285714286 + 1.42857142857143*log((40 - m.x15)/m.x15))*m.x15 + 0.5499974*m.x15 +
(2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 + 0.5499974*m.x16 + (
2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 + 0.5499974*m.x17 + (
2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 + 0.5499974*m.x18 + (3 + log((
50 - m.x19)/m.x19))*m.x19 - 2.2692161*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 - 2.2692161*
m.x20 + (3 + log((50 - m.x21)/m.x21))*m.x21 - 2.2692161*m.x21 + (3 + log((50 - m.x22)/m.x22))*
m.x22 - 2.2692161*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 - 2.2692161*m.x23 + (3 + log((50 -
m.x24)/m.x24))*m.x24 - 2.2692161*m.x24) + 0.1*((2.85714285714286 + 1.42857142857143*log((40 -
m.x13)/m.x13))*m.x13 - 0.402732*m.x13 + (2.85714285714286 + 1.42857142857143*log((40 - m.x14)/
m.x14))*m.x14 - 0.402732*m.x14 + (2.85714285714286 + 1.42857142857143*log((40 - m.x15)/m.x15))*
m.x15 - 0.402732*m.x15 + (2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 -
0.402732*m.x16 + (2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 - 0.402732*
m.x17 + (2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 - 0.402732*m.x18 + (3
+ log((50 - m.x19)/m.x19))*m.x19 + 1.7564073*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 +
1.7564073*m.x20 + (3 + log((50 - m.x21)/m.x21))*m.x21 + 1.7564073*m.x21 + (3 + log((50 - m.x22)/
m.x22))*m.x22 + 1.7564073*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 + 1.7564073*m.x23 + (3 +
log((50 - m.x24)/m.x24))*m.x24 + 1.7564073*m.x24) + 0.1*((2.85714285714286 + 1.42857142857143*
log((40 - m.x13)/m.x13))*m.x13 - 0.1915938*m.x13 + (2.85714285714286 + 1.42857142857143*log((40
- m.x14)/m.x14))*m.x14 - 0.1915938*m.x14 + (2.85714285714286 + 1.42857142857143*log((40 - m.x15)
/m.x15))*m.x15 - 0.1915938*m.x15 + (2.85714285714286 + 1.42857142857143*log((40 - m.x16)/m.x16))*
m.x16 - 0.1915938*m.x16 + (2.85714285714286 + 1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 -
0.1915938*m.x17 + (2.85714285714286 + 1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 - 0.1915938
*m.x18 + (3 + log((50 - m.x19)/m.x19))*m.x19 + 1.9458335*m.x19 + (3 + log((50 - m.x20)/m.x20))*
m.x20 + 1.9458335*m.x20 + (3 + log((50 - m.x21)/m.x21))*m.x21 + 1.9458335*m.x21 + (3 + log((50 -
m.x22)/m.x22))*m.x22 + 1.9458335*m.x22 + (3 + log((50 - m.x23)/m.x23))*m.x23 + 1.9458335*m.x23 +
(3 + log((50 - m.x24)/m.x24))*m.x24 + 1.9458335*m.x24) + 0.1*((2.85714285714286 +
1.42857142857143*log((40 - m.x13)/m.x13))*m.x13 - 1.1945279*m.x13 + (2.85714285714286 +
1.42857142857143*log((40 - m.x14)/m.x14))*m.x14 - 1.1945279*m.x14 + (2.85714285714286 +
1.42857142857143*log((40 - m.x15)/m.x15))*m.x15 - 1.1945279*m.x15 + (2.85714285714286 +
1.42857142857143*log((40 - m.x16)/m.x16))*m.x16 - 1.1945279*m.x16 + (2.85714285714286 +
1.42857142857143*log((40 - m.x17)/m.x17))*m.x17 - 1.1945279*m.x17 + (2.85714285714286 +
1.42857142857143*log((40 - m.x18)/m.x18))*m.x18 - 1.1945279*m.x18 + (3 + log((50 - m.x19)/m.x19))
*m.x19 + 4.2422342*m.x19 + (3 + log((50 - m.x20)/m.x20))*m.x20 + 4.2422342*m.x20 + (3 + log((50
- m.x21)/m.x21))*m.x21 + 4.2422342*m.x21 + (3 + log((50 - m.x22)/m.x22))*m.x22 + 4.2422342*m.x22
+ (3 + log((50 - m.x23)/m.x23))*m.x23 + 4.2422342*m.x23 + (3 + log((50 - m.x24)/m.x24))*m.x24 +
4.2422342*m.x24) - 0.026*m.x85 - 0.026*m.x86 - 0.026*m.x87 - 0.026*m.x88 - 0.026*m.x89
- 0.026*m.x90 - 0.026*m.x91 - 0.026*m.x92 - 0.026*m.x93 - 0.026*m.x94 - 0.026*m.x95
- 0.026*m.x96 - 0.026*m.x97 - 0.026*m.x98 - 0.026*m.x99 - 0.026*m.x100 - 0.026*m.x101
- 0.026*m.x102 - 0.026*m.x103 - 0.026*m.x104 - 0.026*m.x105 - 0.026*m.x106 - 0.026*m.x107
- 0.026*m.x108 - 0.026*m.x109 - 0.026*m.x110 - 0.026*m.x111 - 0.026*m.x112 - 0.026*m.x113
- 0.026*m.x114 - 0.014*m.x115 - 0.014*m.x116 - 0.014*m.x117 - 0.014*m.x118 - 0.014*m.x119
- 0.014*m.x120 - 0.014*m.x121 - 0.014*m.x122 - 0.014*m.x123 - 0.014*m.x124 - 0.014*m.x125
- 0.014*m.x126 - 0.014*m.x127 - 0.014*m.x128 - 0.014*m.x129 - 0.014*m.x130 - 0.014*m.x131
- 0.014*m.x132 - 0.014*m.x133 - 0.014*m.x134 - 0.014*m.x135 - 0.014*m.x136 - 0.014*m.x137
- 0.014*m.x138 - 0.014*m.x139 - 0.014*m.x140 - 0.014*m.x141 - 0.014*m.x142 - 0.014*m.x143
- 0.014*m.x144 - 0.016*m.x205 - 0.016*m.x206 - 0.016*m.x207 - 0.016*m.x208 - 0.016*m.x209
- 0.016*m.x210 - 0.016*m.x211 - 0.016*m.x212 - 0.016*m.x213 - 0.016*m.x214 - 0.016*m.x215
- 0.016*m.x216 - 0.016*m.x217 - 0.016*m.x218 - 0.016*m.x219 - 0.016*m.x220 - 0.016*m.x221
- 0.016*m.x222 - 0.016*m.x223 - 0.016*m.x224 - 0.016*m.x225 - 0.016*m.x226 - 0.016*m.x227
- 0.016*m.x228 - 0.016*m.x229 - 0.016*m.x230 - 0.016*m.x231 - 0.016*m.x232 - 0.016*m.x233
- 0.016*m.x234 - 0.013*m.x235 - 0.013*m.x236 - 0.013*m.x237 - 0.013*m.x238 - 0.013*m.x239
- 0.013*m.x240 - 0.013*m.x241 - 0.013*m.x242 - 0.013*m.x243 - 0.013*m.x244 - 0.013*m.x245
- 0.013*m.x246 - 0.013*m.x247 - 0.013*m.x248 - 0.013*m.x249 - 0.013*m.x250 - 0.013*m.x251
- 0.013*m.x252 - 0.013*m.x253 - 0.013*m.x254 - 0.013*m.x255 - 0.013*m.x256 - 0.013*m.x257
- 0.013*m.x258 - 0.013*m.x259 - 0.013*m.x260 - 0.013*m.x261 - 0.013*m.x262 - 0.013*m.x263
- 0.013*m.x264 - 0.032*m.x325 - 0.032*m.x326 - 0.032*m.x327 - 0.032*m.x328 - 0.032*m.x329
- 0.032*m.x330 - 0.032*m.x331 - 0.032*m.x332 - 0.032*m.x333 - 0.032*m.x334 - 0.032*m.x335
- 0.032*m.x336 - 0.032*m.x337 - 0.032*m.x338 - 0.032*m.x339 - 0.032*m.x340 - 0.032*m.x341
- 0.032*m.x342 - 0.032*m.x343 - 0.032*m.x344 - 0.032*m.x345 - 0.032*m.x346 - 0.032*m.x347
- 0.032*m.x348 - 0.032*m.x349 - 0.032*m.x350 - 0.032*m.x351 - 0.032*m.x352 - 0.032*m.x353
- 0.032*m.x354 - 0.032*m.x355 - 0.032*m.x356 - 0.032*m.x357 - 0.032*m.x358 - 0.032*m.x359
- 0.032*m.x360 - 0.032*m.x361 - 0.032*m.x362 - 0.032*m.x363 - 0.032*m.x364 - 0.032*m.x365
- 0.032*m.x366 - 0.032*m.x367 - 0.032*m.x368 - 0.032*m.x369 - 0.032*m.x370 - 0.032*m.x371
- 0.032*m.x372 - 0.032*m.x373 - 0.032*m.x374 - 0.032*m.x375 - 0.032*m.x376 - 0.032*m.x377
- 0.032*m.x378 - 0.032*m.x379 - 0.032*m.x380 - 0.032*m.x381 - 0.032*m.x382 - 0.032*m.x383
- 0.032*m.x384 - 0.1*m.x505 - 0.1*m.x506 - 0.1*m.x507 - 0.1*m.x508 - 0.1*m.x509 - 0.1*m.x510
- 0.1*m.x511 - 0.1*m.x512 - 0.1*m.x513 - 0.1*m.x514 - 0.1*m.x515 - 0.1*m.x516 - 0.1*m.x517
- 0.1*m.x518 - 0.1*m.x519 - 0.1*m.x520 - 0.1*m.x521 - 0.1*m.x522 - 0.1*m.x523 - 0.1*m.x524
- 0.1*m.x525 - 0.1*m.x526 - 0.1*m.x527 - 0.1*m.x528 - 0.1*m.x529 - 0.1*m.x530 - 0.1*m.x531
- 0.1*m.x532 - 0.1*m.x533 - 0.1*m.x534 - 0.1*m.x535 - 0.1*m.x536 - 0.1*m.x537 - 0.1*m.x538
- 0.1*m.x539 - 0.1*m.x540 - 0.1*m.x541 - 0.1*m.x542 - 0.1*m.x543 - 0.1*m.x544 - 0.1*m.x545
- 0.1*m.x546 - 0.1*m.x547 - 0.1*m.x548 - 0.1*m.x549 - 0.1*m.x550 - 0.1*m.x551 - 0.1*m.x552
- 0.1*m.x553 - 0.1*m.x554 - 0.1*m.x555 - 0.1*m.x556 - 0.1*m.x557 - 0.1*m.x558 - 0.1*m.x559
- 0.1*m.x560 - 0.1*m.x561 - 0.1*m.x562 - 0.1*m.x563 - 0.1*m.x564 - 0.003*m.x565 - 0.003*m.x566
- 0.003*m.x567 - 0.003*m.x568 - 0.003*m.x569 - 0.003*m.x570 - 0.003*m.x571 - 0.003*m.x572
- 0.003*m.x573 - 0.003*m.x574 - 0.003*m.x575 - 0.003*m.x576 - 0.003*m.x577 - 0.003*m.x578
- 0.003*m.x579 - 0.003*m.x580 - 0.003*m.x581 - 0.003*m.x582 - 0.003*m.x583 - 0.003*m.x584
- 0.003*m.x585 - 0.003*m.x586 - 0.003*m.x587 - 0.003*m.x588 - 0.003*m.x589 - 0.003*m.x590
- 0.003*m.x591 - 0.003*m.x592 - 0.003*m.x593 - 0.003*m.x594 - 0.003*m.x595 - 0.003*m.x596
- 0.003*m.x597 - 0.003*m.x598 - 0.003*m.x599 - 0.003*m.x600 - 0.003*m.x601 - 0.003*m.x602
- 0.003*m.x603 - 0.003*m.x604 - 0.003*m.x605 - 0.003*m.x606 - 0.003*m.x607 - 0.003*m.x608
- 0.003*m.x609 - 0.003*m.x610 - 0.003*m.x611 - 0.003*m.x612 - 0.003*m.x613 - 0.003*m.x614
- 0.003*m.x615 - 0.003*m.x616 - 0.003*m.x617 - 0.003*m.x618 - 0.003*m.x619 - 0.003*m.x620
- 0.003*m.x621 - 0.003*m.x622 - 0.003*m.x623 - 0.003*m.x624, sense=maximize)
m.c1 = Constraint(expr= m.x25 - 0.83*m.x85 == 0)
m.c2 = Constraint(expr= m.x26 - 0.83*m.x86 == 0)
m.c3 = Constraint(expr= m.x27 - 0.83*m.x87 == 0)
m.c4 = Constraint(expr= m.x28 - 0.83*m.x88 == 0)
m.c5 = Constraint(expr= m.x29 - 0.83*m.x89 == 0)
m.c6 = Constraint(expr= m.x30 - 0.83*m.x90 == 0)
m.c7 = Constraint(expr= m.x31 - 0.83*m.x91 == 0)
m.c8 = Constraint(expr= m.x32 - 0.83*m.x92 == 0)
m.c9 = Constraint(expr= m.x33 - 0.83*m.x93 == 0)
m.c10 = Constraint(expr= m.x34 - 0.83*m.x94 == 0)
m.c11 = Constraint(expr= m.x35 - 0.83*m.x95 == 0)
m.c12 = Constraint(expr= m.x36 - 0.83*m.x96 == 0)
m.c13 = Constraint(expr= m.x37 - 0.83*m.x97 == 0)
m.c14 = Constraint(expr= m.x38 - 0.83*m.x98 == 0)
m.c15 = Constraint(expr= m.x39 - 0.83*m.x99 == 0)
m.c16 = Constraint(expr= m.x40 - 0.83*m.x100 == 0)
m.c17 = Constraint(expr= m.x41 - 0.83*m.x101 == 0)
m.c18 = Constraint(expr= m.x42 - 0.83*m.x102 == 0)
m.c19 = Constraint(expr= m.x43 - 0.83*m.x103 == 0)
m.c20 = Constraint(expr= m.x44 - 0.83*m.x104 == 0)
m.c21 = Constraint(expr= m.x45 - 0.83*m.x105 == 0)
m.c22 = Constraint(expr= m.x46 - 0.83*m.x106 == 0)
m.c23 = Constraint(expr= m.x47 - 0.83*m.x107 == 0)
m.c24 = Constraint(expr= m.x48 - 0.83*m.x108 == 0)
m.c25 = Constraint(expr= m.x49 - 0.83*m.x109 == 0)
m.c26 = Constraint(expr= m.x50 - 0.83*m.x110 == 0)
m.c27 = Constraint(expr= m.x51 - 0.83*m.x111 == 0)
m.c28 = Constraint(expr= m.x52 - 0.83*m.x112 == 0)
m.c29 = Constraint(expr= m.x53 - 0.83*m.x113 == 0)
m.c30 = Constraint(expr= m.x54 - 0.83*m.x114 == 0)
m.c31 = Constraint(expr= m.x55 - 0.83*m.x115 == 0)
m.c32 = Constraint(expr= m.x56 - 0.83*m.x116 == 0)
m.c33 = Constraint(expr= m.x57 - 0.83*m.x117 == 0)
m.c34 = Constraint(expr= m.x58 - 0.83*m.x118 == 0)
m.c35 = Constraint(expr= m.x59 - 0.83*m.x119 == 0)
m.c36 = Constraint(expr= m.x60 - 0.83*m.x120 == 0)
m.c37 = Constraint(expr= m.x61 - 0.83*m.x121 == 0)
m.c38 = Constraint(expr= m.x62 - 0.83*m.x122 == 0)
m.c39 = Constraint(expr= m.x63 - 0.83*m.x123 == 0)
m.c40 = Constraint(expr= m.x64 - 0.83*m.x124 == 0)
m.c41 = Constraint(expr= m.x65 - 0.83*m.x125 == 0)
m.c42 = Constraint(expr= m.x66 - 0.83*m.x126 == 0)
m.c43 = Constraint(expr= m.x67 - 0.83*m.x127 == 0)
m.c44 = Constraint(expr= m.x68 - 0.83*m.x128 == 0)
m.c45 = Constraint(expr= m.x69 - 0.83*m.x129 == 0)
m.c46 = Constraint(expr= m.x70 - 0.83*m.x130 == 0)
m.c47 = Constraint(expr= m.x71 - 0.83*m.x131 == 0)
m.c48 = Constraint(expr= m.x72 - 0.83*m.x132 == 0)
m.c49 = Constraint(expr= m.x73 - 0.83*m.x133 == 0)
m.c50 = Constraint(expr= m.x74 - 0.83*m.x134 == 0)
m.c51 = Constraint(expr= m.x75 - 0.83*m.x135 == 0)
m.c52 = Constraint(expr= m.x76 - 0.83*m.x136 == 0)
m.c53 = Constraint(expr= m.x77 - 0.83*m.x137 == 0)
m.c54 = Constraint(expr= m.x78 - 0.83*m.x138 == 0)
m.c55 = Constraint(expr= m.x79 - 0.83*m.x139 == 0)
m.c56 = Constraint(expr= m.x80 - 0.83*m.x140 == 0)
m.c57 = Constraint(expr= m.x81 - 0.83*m.x141 == 0)
m.c58 = Constraint(expr= m.x82 - 0.83*m.x142 == 0)
m.c59 = Constraint(expr= m.x83 - 0.83*m.x143 == 0)
m.c60 = Constraint(expr= m.x84 - 0.83*m.x144 == 0)
m.c61 = Constraint(expr= m.x145 - 0.95*m.x205 == 0)
m.c62 = Constraint(expr= m.x146 - 0.95*m.x206 == 0)
m.c63 = Constraint(expr= m.x147 - 0.95*m.x207 == 0)
m.c64 = Constraint(expr= m.x148 - 0.95*m.x208 == 0)
m.c65 = Constraint(expr= m.x149 - 0.95*m.x209 == 0)
m.c66 = Constraint(expr= m.x150 - 0.95*m.x210 == 0)
m.c67 = Constraint(expr= m.x151 - 0.95*m.x211 == 0)
m.c68 = Constraint(expr= m.x152 - 0.95*m.x212 == 0)
m.c69 = Constraint(expr= m.x153 - 0.95*m.x213 == 0)
m.c70 = Constraint(expr= m.x154 - 0.95*m.x214 == 0)
m.c71 = Constraint(expr= m.x155 - 0.95*m.x215 == 0)
m.c72 = Constraint(expr= m.x156 - 0.95*m.x216 == 0)
m.c73 = Constraint(expr= m.x157 - 0.95*m.x217 == 0)
m.c74 = Constraint(expr= m.x158 - 0.95*m.x218 == 0)
m.c75 = Constraint(expr= m.x159 - 0.95*m.x219 == 0)
m.c76 = Constraint(expr= m.x160 - 0.95*m.x220 == 0)
m.c77 = Constraint(expr= m.x161 - 0.95*m.x221 == 0)
m.c78 = Constraint(expr= m.x162 - 0.95*m.x222 == 0)
m.c79 = Constraint(expr= m.x163 - 0.95*m.x223 == 0)
m.c80 = Constraint(expr= m.x164 - 0.95*m.x224 == 0)
m.c81 = Constraint(expr= m.x165 - 0.95*m.x225 == 0)
m.c82 = Constraint(expr= m.x166 - 0.95*m.x226 == 0)
m.c83 = Constraint(expr= m.x167 - 0.95*m.x227 == 0)
m.c84 = Constraint(expr= m.x168 - 0.95*m.x228 == 0)
m.c85 = Constraint(expr= m.x169 - 0.95*m.x229 == 0)
m.c86 = Constraint(expr= m.x170 - 0.95*m.x230 == 0)
m.c87 = Constraint(expr= m.x171 - 0.95*m.x231 == 0)
m.c88 = Constraint(expr= m.x172 - 0.95*m.x232 == 0)
m.c89 = Constraint(expr= m.x173 - 0.95*m.x233 == 0)
m.c90 = Constraint(expr= m.x174 - 0.95*m.x234 == 0)
m.c91 = Constraint(expr= m.x175 - 0.95*m.x235 == 0)
m.c92 = Constraint(expr= m.x176 - 0.95*m.x236 == 0)
m.c93 = Constraint(expr= m.x177 - 0.95*m.x237 == 0)
m.c94 = Constraint(expr= m.x178 - 0.95*m.x238 == 0)
m.c95 = Constraint(expr= m.x179 - 0.95*m.x239 == 0)
m.c96 = Constraint(expr= m.x180 - 0.95*m.x240 == 0)
m.c97 = Constraint(expr= m.x181 - 0.95*m.x241 == 0)
m.c98 = Constraint(expr= m.x182 - 0.95*m.x242 == 0)
m.c99 = Constraint(expr= m.x183 - 0.95*m.x243 == 0)
m.c100 = Constraint(expr= m.x184 - 0.95*m.x244 == 0)
m.c101 = Constraint(expr= m.x185 - 0.95*m.x245 == 0)
m.c102 = Constraint(expr= m.x186 - 0.95*m.x246 == 0)
m.c103 = Constraint(expr= m.x187 - 0.95*m.x247 == 0)
m.c104 = Constraint(expr= m.x188 - 0.95*m.x248 == 0)
m.c105 = Constraint(expr= m.x189 - 0.95*m.x249 == 0)
m.c106 = Constraint(expr= m.x190 - 0.95*m.x250 == 0)
m.c107 = Constraint(expr= m.x191 - 0.95*m.x251 == 0)
m.c108 = Constraint(expr= m.x192 - 0.95*m.x252 == 0)
m.c109 = Constraint(expr= m.x193 - 0.95*m.x253 == 0)
m.c110 = Constraint(expr= m.x194 - 0.95*m.x254 == 0)
m.c111 = Constraint(expr= m.x195 - 0.95*m.x255 == 0)
m.c112 = Constraint(expr= m.x196 - 0.95*m.x256 == 0)
m.c113 = Constraint(expr= m.x197 - 0.95*m.x257 == 0)
m.c114 = Constraint(expr= m.x198 - 0.95*m.x258 == 0)
m.c115 = Constraint(expr= m.x199 - 0.95*m.x259 == 0)
m.c116 = Constraint(expr= m.x200 - 0.95*m.x260 == 0)
m.c117 = Constraint(expr= m.x201 - 0.95*m.x261 == 0)
m.c118 = Constraint(expr= m.x202 - 0.95*m.x262 == 0)
m.c119 = Constraint(expr= m.x203 - 0.95*m.x263 == 0)
m.c120 = Constraint(expr= m.x204 - 0.95*m.x264 == 0)
m.c121 = Constraint(expr= m.x265 - 1.11*m.x325 == 0)
m.c122 = Constraint(expr= m.x266 - 1.11*m.x326 == 0)
m.c123 = Constraint(expr= m.x267 - 1.11*m.x327 == 0)
m.c124 = Constraint(expr= m.x268 - 1.11*m.x328 == 0)
m.c125 = Constraint(expr= m.x269 - 1.11*m.x329 == 0)
m.c126 = Constraint(expr= m.x270 - 1.11*m.x330 == 0)
m.c127 = Constraint(expr= m.x271 - 1.11*m.x331 == 0)
m.c128 = Constraint(expr= m.x272 - 1.11*m.x332 == 0)
m.c129 = Constraint(expr= m.x273 - 1.11*m.x333 == 0)
m.c130 = Constraint(expr= m.x274 - 1.11*m.x334 == 0)
m.c131 = Constraint(expr= m.x275 - 1.11*m.x335 == 0)
m.c132 = Constraint(expr= m.x276 - 1.11*m.x336 == 0)
m.c133 = Constraint(expr= m.x277 - 1.11*m.x337 == 0)
m.c134 = Constraint(expr= m.x278 - 1.11*m.x338 == 0)
m.c135 = Constraint(expr= m.x279 - 1.11*m.x339 == 0)
m.c136 = Constraint(expr= m.x280 - 1.11*m.x340 == 0)
m.c137 = Constraint(expr= m.x281 - 1.11*m.x341 == 0)
m.c138 = Constraint(expr= m.x282 - 1.11*m.x342 == 0)
m.c139 = Constraint(expr= m.x283 - 1.11*m.x343 == 0)
m.c140 = Constraint(expr= m.x284 - 1.11*m.x344 == 0)
m.c141 = Constraint(expr= m.x285 - 1.11*m.x345 == 0)
m.c142 = Constraint(expr= m.x286 - 1.11*m.x346 == 0)
m.c143 = Constraint(expr= m.x287 - 1.11*m.x347 == 0)
m.c144 = Constraint(expr= m.x288 - 1.11*m.x348 == 0)
m.c145 = Constraint(expr= m.x289 - 1.11*m.x349 == 0)
m.c146 = Constraint(expr= m.x290 - 1.11*m.x350 == 0)
m.c147 = Constraint(expr= m.x291 - 1.11*m.x351 == 0)
m.c148 = Constraint(expr= m.x292 - 1.11*m.x352 == 0)
m.c149 = Constraint(expr= m.x293 - 1.11*m.x353 == 0)
m.c150 = Constraint(expr= m.x294 - 1.11*m.x354 == 0)
m.c151 = Constraint(expr= m.x295 - 1.11*m.x355 == 0)
m.c152 = Constraint(expr= m.x296 - 1.11*m.x356 == 0)
m.c153 = Constraint(expr= m.x297 - 1.11*m.x357 == 0)
m.c154 = Constraint(expr= m.x298 - 1.11*m.x358 == 0)
m.c155 = Constraint(expr= m.x299 - 1.11*m.x359 == 0)
m.c156 = Constraint(expr= m.x300 - 1.11*m.x360 == 0)
m.c157 = Constraint(expr= m.x301 - 1.11*m.x361 == 0)
m.c158 = Constraint(expr= m.x302 - 1.11*m.x362 == 0)
m.c159 = Constraint(expr= m.x303 - 1.11*m.x363 == 0)
m.c160 = Constraint(expr= m.x304 - 1.11*m.x364 == 0)
m.c161 = Constraint(expr= m.x305 - 1.11*m.x365 == 0)
m.c162 = Constraint(expr= m.x306 - 1.11*m.x366 == 0)
m.c163 = Constraint(expr= m.x307 - 1.11*m.x367 == 0)
m.c164 = Constraint(expr= m.x308 - 1.11*m.x368 == 0)
m.c165 = Constraint(expr= m.x309 - 1.11*m.x369 == 0)
m.c166 = Constraint(expr= m.x310 - 1.11*m.x370 == 0)
m.c167 = Constraint(expr= m.x311 - 1.11*m.x371 == 0)
m.c168 = Constraint(expr= m.x312 - 1.11*m.x372 == 0)
m.c169 = Constraint(expr= m.x313 - 1.11*m.x373 == 0)
m.c170 = Constraint(expr= m.x314 - 1.11*m.x374 == 0)
m.c171 = Constraint(expr= m.x315 - 1.11*m.x375 == 0)
m.c172 = Constraint(expr= m.x316 - 1.11*m.x376 == 0)
m.c173 = Constraint(expr= m.x317 - 1.11*m.x377 == 0)
m.c174 = Constraint(expr= m.x318 - 1.11*m.x378 == 0)
m.c175 = Constraint(expr= m.x319 - 1.11*m.x379 == 0)
m.c176 = Constraint(expr= m.x320 - 1.11*m.x380 == 0)
m.c177 = Constraint(expr= m.x321 - 1.11*m.x381 == 0)
m.c178 = Constraint(expr= m.x322 - 1.11*m.x382 == 0)
m.c179 = Constraint(expr= m.x323 - 1.11*m.x383 == 0)
m.c180 = Constraint(expr= m.x324 - 1.11*m.x384 == 0)
m.c181 = Constraint(expr= - m.x25 + m.x385 == 0)
m.c182 = Constraint(expr= - m.x26 + m.x386 == 0)
m.c183 = Constraint(expr= - m.x27 + m.x387 == 0)
m.c184 = Constraint(expr= - m.x28 + m.x388 == 0)
m.c185 = Constraint(expr= - m.x29 + m.x389 == 0)
m.c186 = Constraint(expr= - m.x30 + m.x390 == 0)
m.c187 = Constraint(expr= - m.x31 + m.x391 == 0)
m.c188 = Constraint(expr= - m.x32 + m.x392 == 0)
m.c189 = Constraint(expr= - m.x33 + m.x393 == 0)
m.c190 = Constraint(expr= - m.x34 + m.x394 == 0)
m.c191 = Constraint(expr= - m.x35 + m.x395 == 0)
m.c192 = Constraint(expr= - m.x36 + m.x396 == 0)
m.c193 = Constraint(expr= - m.x37 + m.x397 == 0)
m.c194 = Constraint(expr= - m.x38 + m.x398 == 0)
m.c195 = Constraint(expr= - m.x39 + m.x399 == 0)
m.c196 = Constraint(expr= - m.x40 + m.x400 == 0)
m.c197 = Constraint(expr= - m.x41 + m.x401 == 0)
m.c198 = Constraint(expr= - m.x42 + m.x402 == 0)
m.c199 = Constraint(expr= - m.x43 + m.x403 == 0)
m.c200 = Constraint(expr= - m.x44 + m.x404 == 0)
m.c201 = Constraint(expr= - m.x45 + m.x405 == 0)
m.c202 = Constraint(expr= - m.x46 + m.x406 == 0)
m.c203 = Constraint(expr= - m.x47 + m.x407 == 0)
m.c204 = Constraint(expr= - m.x48 + m.x408 == 0)
m.c205 = Constraint(expr= - m.x49 + m.x409 == 0)
m.c206 = Constraint(expr= - m.x50 + m.x410 == 0)
m.c207 = Constraint(expr= - m.x51 + m.x411 == 0)
m.c208 = Constraint(expr= - m.x52 + m.x412 == 0)
m.c209 = Constraint(expr= - m.x53 + m.x413 == 0)
m.c210 = Constraint(expr= - m.x54 + m.x414 == 0)
m.c211 = Constraint(expr= - m.x55 + m.x415 == 0)
m.c212 = Constraint(expr= - m.x56 + m.x416 == 0)
m.c213 = Constraint(expr= - m.x57 + m.x417 == 0)
m.c214 = Constraint(expr= - m.x58 + m.x418 == 0)
m.c215 = Constraint(expr= - m.x59 + m.x419 == 0)
m.c216 = Constraint(expr= - m.x60 + m.x420 == 0)
m.c217 = Constraint(expr= - m.x61 + m.x421 == 0)
m.c218 = Constraint(expr= - m.x62 + m.x422 == 0)
m.c219 = Constraint(expr= - m.x63 + m.x423 == 0)
m.c220 = Constraint(expr= - m.x64 + m.x424 == 0)
m.c221 = Constraint(expr= - m.x65 + m.x425 == 0)
m.c222 = Constraint(expr= - m.x66 + m.x426 == 0)
m.c223 = Constraint(expr= - m.x67 + m.x427 == 0)
m.c224 = Constraint(expr= - m.x68 + m.x428 == 0)
m.c225 = Constraint(expr= - m.x69 + m.x429 == 0)
m.c226 = Constraint(expr= - m.x70 + m.x430 == 0)
m.c227 = Constraint(expr= - m.x71 + m.x431 == 0)
m.c228 = Constraint(expr= - m.x72 + m.x432 == 0)
m.c229 = Constraint(expr= - m.x73 + m.x433 == 0)
m.c230 = Constraint(expr= - m.x74 + m.x434 == 0)
m.c231 = Constraint(expr= - m.x75 + m.x435 == 0)
m.c232 = Constraint(expr= - m.x76 + m.x436 == 0)
m.c233 = Constraint(expr= - m.x77 + m.x437 == 0)
m.c234 = Constraint(expr= - m.x78 + m.x438 == 0)
m.c235 = Constraint(expr= - m.x79 + m.x439 == 0)
m.c236 = Constraint(expr= - m.x80 + m.x440 == 0)
m.c237 = Constraint(expr= - m.x81 + m.x441 == 0)
m.c238 = Constraint(expr= - m.x82 + m.x442 == 0)
m.c239 = Constraint(expr= - m.x83 + m.x443 == 0)
m.c240 = Constraint(expr= - m.x84 + m.x444 == 0)
m.c241 = Constraint(expr= m.x85 - m.x145 - m.x265 == 0)
m.c242 = Constraint(expr= m.x86 - m.x146 - m.x266 == 0)
m.c243 = Constraint(expr= m.x87 - m.x147 - m.x267 == 0)
m.c244 = Constraint(expr= m.x88 - m.x148 - m.x268 == 0)
m.c245 = Constraint(expr= m.x89 - m.x149 - m.x269 == 0)
m.c246 = Constraint(expr= m.x90 - m.x150 - m.x270 == 0)
m.c247 = Constraint(expr= m.x91 - m.x151 - m.x271 == 0)
m.c248 = Constraint(expr= m.x92 - m.x152 - m.x272 == 0)
m.c249 = Constraint(expr= m.x93 - m.x153 - m.x273 == 0)
m.c250 = Constraint(expr= m.x94 - m.x154 - m.x274 == 0)
m.c251 = Constraint(expr= m.x95 - m.x155 - m.x275 == 0)
m.c252 = Constraint(expr= m.x96 - m.x156 - m.x276 == 0)
m.c253 = Constraint(expr= m.x97 - m.x157 - m.x277 == 0)
m.c254 = Constraint(expr= m.x98 - m.x158 - m.x278 == 0)
m.c255 = Constraint(expr= m.x99 - m.x159 - m.x279 == 0)
m.c256 = Constraint(expr= m.x100 - m.x160 - m.x280 == 0)
m.c257 = Constraint(expr= m.x101 - m.x161 - m.x281 == 0)
m.c258 = Constraint(expr= m.x102 - m.x162 - m.x282 == 0)
m.c259 = Constraint(expr= m.x103 - m.x163 - m.x283 == 0)
m.c260 = Constraint(expr= m.x104 - m.x164 - m.x284 == 0)
m.c261 = Constraint(expr= m.x105 - m.x165 - m.x285 == 0)
m.c262 = Constraint(expr= m.x106 - m.x166 - m.x286 == 0)
m.c263 = Constraint(expr= m.x107 - m.x167 - m.x287 == 0)
m.c264 = Constraint(expr= m.x108 - m.x168 - m.x288 == 0)
m.c265 = Constraint(expr= m.x109 - m.x169 - m.x289 == 0)
m.c266 = Constraint(expr= m.x110 - m.x170 - m.x290 == 0)
m.c267 = Constraint(expr= m.x111 - m.x171 - m.x291 == 0)
m.c268 = Constraint(expr= m.x112 - m.x172 - m.x292 == 0)
m.c269 = Constraint(expr= m.x113 - m.x173 - m.x293 == 0)
m.c270 = Constraint(expr= m.x114 - m.x174 - m.x294 == 0)
m.c271 = Constraint(expr= m.x115 - m.x175 - m.x295 == 0)
m.c272 = Constraint(expr= m.x116 - m.x176 - m.x296 == 0)
m.c273 = Constraint(expr= m.x117 - m.x177 - m.x297 == 0)
m.c274 = Constraint(expr= m.x118 - m.x178 - m.x298 == 0)
m.c275 = Constraint(expr= m.x119 - m.x179 - m.x299 == 0)
m.c276 = Constraint(expr= m.x120 - m.x180 - m.x300 == 0)
m.c277 = Constraint(expr= m.x121 - m.x181 - m.x301 == 0)
m.c278 = Constraint(expr= m.x122 - m.x182 - m.x302 == 0)
m.c279 = Constraint(expr= m.x123 - m.x183 - m.x303 == 0)
m.c280 = Constraint(expr= m.x124 - m.x184 - m.x304 == 0)
m.c281 = Constraint(expr= m.x125 - m.x185 - m.x305 == 0)
m.c282 = Constraint(expr= m.x126 - m.x186 - m.x306 == 0)
m.c283 = Constraint(expr= m.x127 - m.x187 - m.x307 == 0)
m.c284 = Constraint(expr= m.x128 - m.x188 - m.x308 == 0)
m.c285 = Constraint(expr= m.x129 - m.x189 - m.x309 == 0)
m.c286 = Constraint(expr= m.x130 - m.x190 - m.x310 == 0)
m.c287 = Constraint(expr= m.x131 - m.x191 - m.x311 == 0)
m.c288 = Constraint(expr= m.x132 - m.x192 - m.x312 == 0)
m.c289 = Constraint(expr= m.x133 - m.x193 - m.x313 == 0)
m.c290 = Constraint(expr= m.x134 - m.x194 - m.x314 == 0)
m.c291 = Constraint(expr= m.x135 - m.x195 - m.x315 == 0)
m.c292 = Constraint(expr= m.x136 - m.x196 - m.x316 == 0)
m.c293 = Constraint(expr= m.x137 - m.x197 - m.x317 == 0)
m.c294 = Constraint(expr= m.x138 - m.x198 - m.x318 == 0)
m.c295 = Constraint(expr= m.x139 - m.x199 - m.x319 == 0)
m.c296 = Constraint(expr= m.x140 - m.x200 - m.x320 == 0)
m.c297 = Constraint(expr= m.x141 - m.x201 - m.x321 == 0)
m.c298 = Constraint(expr= m.x142 - m.x202 - m.x322 == 0)
m.c299 = Constraint(expr= m.x143 - m.x203 - m.x323 == 0)
m.c300 = Constraint(expr= m.x144 - m.x204 - m.x324 == 0)
m.c301 = Constraint(expr= - m.x1 + m.x205 == 0)
m.c302 = Constraint(expr= - m.x1 + m.x206 == 0)
m.c303 = Constraint(expr= - m.x1 + m.x207 == 0)
m.c304 = Constraint(expr= - m.x1 + m.x208 == 0)
m.c305 = Constraint(expr= - m.x1 + m.x209 == 0)
m.c306 = Constraint(expr= - m.x1 + m.x210 == 0)
m.c307 = Constraint(expr= - m.x1 + m.x211 == 0)
m.c308 = Constraint(expr= - m.x1 + m.x212 == 0)
m.c309 = Constraint(expr= - m.x1 + m.x213 == 0)
m.c310 = Constraint(expr= - m.x1 + m.x214 == 0)
m.c311 = Constraint(expr= - m.x2 + m.x215 == 0)
m.c312 = Constraint(expr= - m.x2 + m.x216 == 0)
m.c313 = Constraint(expr= - m.x2 + m.x217 == 0)
m.c314 = Constraint(expr= - m.x2 + m.x218 == 0)
m.c315 = Constraint(expr= - m.x2 + m.x219 == 0)
m.c316 = Constraint(expr= - m.x2 + m.x220 == 0)
m.c317 = Constraint(expr= - m.x2 + m.x221 == 0)
m.c318 = Constraint(expr= - m.x2 + m.x222 == 0)
m.c319 = Constraint(expr= - m.x2 + m.x223 == 0)
m.c320 = Constraint(expr= - m.x2 + m.x224 == 0)
m.c321 = Constraint(expr= - m.x3 + m.x225 == 0)
m.c322 = Constraint(expr= - m.x3 + m.x226 == 0)
m.c323 = Constraint(expr= - m.x3 + m.x227 == 0)
m.c324 = Constraint(expr= - m.x3 + m.x228 == 0)
m.c325 = Constraint(expr= - m.x3 + m.x229 == 0)
m.c326 = Constraint(expr= - m.x3 + m.x230 == 0)
m.c327 = Constraint(expr= - m.x3 + m.x231 == 0)
m.c328 = Constraint(expr= - m.x3 + m.x232 == 0)
m.c329 = Constraint(expr= - m.x3 + m.x233 == 0)
m.c330 = Constraint(expr= - m.x3 + m.x234 == 0)
m.c331 = Constraint(expr= - m.x4 + m.x235 == 0)
m.c332 = Constraint(expr= - m.x4 + m.x236 == 0)
m.c333 = Constraint(expr= - m.x4 + m.x237 == 0)
m.c334 = Constraint(expr= - m.x4 + m.x238 == 0)
m.c335 = Constraint(expr= - m.x4 + m.x239 == 0)
m.c336 = Constraint(expr= - m.x4 + m.x240 == 0)
m.c337 = Constraint(expr= - m.x4 + m.x241 == 0)
m.c338 = Constraint(expr= - m.x4 + m.x242 == 0)
m.c339 = Constraint(expr= - m.x4 + m.x243 == 0)
m.c340 = Constraint(expr= - m.x4 + m.x244 == 0)
m.c341 = Constraint(expr= - m.x5 + m.x245 == 0)
m.c342 = Constraint(expr= - m.x5 + m.x246 == 0)
m.c343 = Constraint(expr= - m.x5 + m.x247 == 0)
m.c344 = Constraint(expr= - m.x5 + m.x248 == 0)
m.c345 = Constraint(expr= - m.x5 + m.x249 == 0)
m.c346 = Constraint(expr= - m.x5 + m.x250 == 0)
m.c347 = Constraint(expr= - m.x5 + m.x251 == 0)
m.c348 = Constraint(expr= - m.x5 + m.x252 == 0)
m.c349 = Constraint(expr= - m.x5 + m.x253 == 0)
m.c350 = Constraint(expr= - m.x5 + m.x254 == 0)
m.c351 = Constraint(expr= - m.x6 + m.x255 == 0)
m.c352 = Constraint(expr= - m.x6 + m.x256 == 0)
m.c353 = Constraint(expr= - m.x6 + m.x257 == 0)
m.c354 = Constraint(expr= - m.x6 + m.x258 == 0)
m.c355 = Constraint(expr= - m.x6 + m.x259 == 0)
m.c356 = Constraint(expr= - m.x6 + m.x260 == 0)
m.c357 = Constraint(expr= - m.x6 + m.x261 == 0)
m.c358 = Constraint(expr= - m.x6 + m.x262 == 0)
m.c359 = Constraint(expr= - m.x6 + m.x263 == 0)
m.c360 = Constraint(expr= - m.x6 + m.x264 == 0)
m.c361 = Constraint(expr= - m.x7 + m.x325 - m.x565 == 0)
m.c362 = Constraint(expr= - m.x7 + m.x326 - m.x566 == 0)
m.c363 = Constraint(expr= - m.x7 + m.x327 - m.x567 == 0)
m.c364 = Constraint(expr= - m.x7 + m.x328 - m.x568 == 0)
m.c365 = Constraint(expr= - m.x7 + m.x329 - m.x569 == 0)
m.c366 = Constraint(expr= - m.x7 + m.x330 - m.x570 == 0)
m.c367 = Constraint(expr= - m.x7 + m.x331 - m.x571 == 0)
m.c368 = Constraint(expr= - m.x7 + m.x332 - m.x572 == 0)
m.c369 = Constraint(expr= - m.x7 + m.x333 - m.x573 == 0)
m.c370 = Constraint(expr= - m.x7 + m.x334 - m.x574 == 0)
m.c371 = Constraint(expr= - m.x8 + m.x335 + m.x565 - m.x575 == 0)
m.c372 = Constraint(expr= - m.x8 + m.x336 + m.x566 - m.x576 == 0)
m.c373 = Constraint(expr= - m.x8 + m.x337 + m.x567 - m.x577 == 0)
m.c374 = Constraint(expr= - m.x8 + m.x338 + m.x568 - m.x578 == 0)
m.c375 = Constraint(expr= - m.x8 + m.x339 + m.x569 - m.x579 == 0)
m.c376 = Constraint(expr= - m.x8 + m.x340 + m.x570 - m.x580 == 0)
m.c377 = Constraint(expr= - m.x8 + m.x341 + m.x571 - m.x581 == 0)
m.c378 = Constraint(expr= - m.x8 + m.x342 + m.x572 - m.x582 == 0)
m.c379 = Constraint(expr= - m.x8 + m.x343 + m.x573 - m.x583 == 0)
m.c380 = Constraint(expr= - m.x8 + m.x344 + m.x574 - m.x584 == 0)
m.c381 = Constraint(expr= - m.x9 + m.x345 + m.x575 - m.x585 == 0)
m.c382 = Constraint(expr= - m.x9 + m.x346 + m.x576 - m.x586 == 0)
m.c383 = Constraint(expr= - m.x9 + m.x347 + m.x577 - m.x587 == 0)
m.c384 = Constraint(expr= - m.x9 + m.x348 + m.x578 - m.x588 == 0)
m.c385 = Constraint(expr= - m.x9 + m.x349 + m.x579 - m.x589 == 0)
m.c386 = Constraint(expr= - m.x9 + m.x350 + m.x580 - m.x590 == 0)
m.c387 = Constraint(expr= - m.x9 + m.x351 + m.x581 - m.x591 == 0)
m.c388 = Constraint(expr= - m.x9 + m.x352 + m.x582 - m.x592 == 0)
m.c389 = Constraint(expr= - m.x9 + m.x353 + m.x583 - m.x593 == 0)
m.c390 = Constraint(expr= - m.x9 + m.x354 + m.x584 - m.x594 == 0)
m.c391 = Constraint(expr= - m.x10 + m.x355 + m.x585 - m.x595 == 0)
m.c392 = Constraint(expr= - m.x10 + m.x356 + m.x586 - m.x596 == 0)
m.c393 = Constraint(expr= - m.x10 + m.x357 + m.x587 - m.x597 == 0)
m.c394 = Constraint(expr= - m.x10 + m.x358 + m.x588 - m.x598 == 0)
m.c395 = Constraint(expr= - m.x10 + m.x359 + m.x589 - m.x599 == 0)
m.c396 = Constraint(expr= - m.x10 + m.x360 + m.x590 - m.x600 == 0)
m.c397 = Constraint(expr= - m.x10 + m.x361 + m.x591 - m.x601 == 0)
m.c398 = Constraint(expr= - m.x10 + m.x362 + m.x592 - m.x602 == 0)
m.c399 = Constraint(expr= - m.x10 + m.x363 + m.x593 - m.x603 == 0)
m.c400 = Constraint(expr= - m.x10 + m.x364 + m.x594 - m.x604 == 0)
m.c401 = Constraint(expr= - m.x11 + m.x365 + m.x595 - m.x605 == 0)
m.c402 = Constraint(expr= - m.x11 + m.x366 + m.x596 - m.x606 == 0)
m.c403 = Constraint(expr= - m.x11 + m.x367 + m.x597 - m.x607 == 0)
m.c404 = Constraint(expr= - m.x11 + m.x368 + m.x598 - m.x608 == 0)
m.c405 = Constraint(expr= - m.x11 + m.x369 + m.x599 - m.x609 == 0)
m.c406 = Constraint(expr= - m.x11 + m.x370 + m.x600 - m.x610 == 0)
m.c407 = Constraint(expr= - m.x11 + m.x371 + m.x601 - m.x611 == 0)
m.c408 = Constraint(expr= - m.x11 + m.x372 + m.x602 - m.x612 == 0)
m.c409 = Constraint(expr= - m.x11 + m.x373 + m.x603 - m.x613 == 0)
m.c410 = Constraint(expr= - m.x11 + m.x374 + m.x604 - m.x614 == 0)
m.c411 = Constraint(expr= - m.x12 + m.x375 + m.x605 - m.x615 == 0)
m.c412 = Constraint(expr= - m.x12 + m.x376 + m.x606 - m.x616 == 0)
m.c413 = Constraint(expr= - m.x12 + m.x377 + m.x607 - m.x617 == 0)
m.c414 = Constraint(expr= - m.x12 + m.x378 + m.x608 - m.x618 == 0)
m.c415 = Constraint(expr= - m.x12 + m.x379 + m.x609 - m.x619 == 0)
m.c416 = Constraint(expr= - m.x12 + m.x380 + m.x610 - m.x620 == 0)
m.c417 = Constraint(expr= - m.x12 + m.x381 + m.x611 - m.x621 == 0)
m.c418 = Constraint(expr= - m.x12 + m.x382 + m.x612 - m.x622 == 0)
m.c419 = Constraint(expr= - m.x12 + m.x383 + m.x613 - m.x623 == 0)
m.c420 = Constraint(expr= - m.x12 + m.x384 + m.x614 - m.x624 == 0)
m.c421 = Constraint(expr= - m.x1 + m.x13 == 0)
m.c422 = Constraint(expr= - m.x2 + m.x14 == 0)
m.c423 = Constraint(expr= - m.x3 + m.x15 == 0)
m.c424 = Constraint(expr= - m.x4 + m.x16 == 0)
m.c425 = Constraint(expr= - m.x5 + m.x17 == 0)
m.c426 = Constraint(expr= - m.x6 + m.x18 == 0)
m.c427 = Constraint(expr= - m.x7 + m.x19 == 0)
m.c428 = Constraint(expr= - m.x8 + m.x20 == 0)
m.c429 = Constraint(expr= - m.x9 + m.x21 == 0)
m.c430 = Constraint(expr= - m.x10 + m.x22 == 0)
m.c431 = Constraint(expr= - m.x11 + m.x23 == 0)
m.c432 = Constraint(expr= - m.x12 + m.x24 == 0)
m.c433 = Constraint(expr= - 1.37455*m.x445 + m.x505 - m.x626 - m.x627 - m.x628 == 0)
m.c434 = Constraint(expr= - 2.472633*m.x446 + m.x506 - m.x626 - m.x627 - m.x628 == 0)
m.c435 = Constraint(expr= - 4.976822*m.x447 + m.x507 - m.x626 - m.x627 - m.x628 == 0)
m.c436 = Constraint(expr= - 2.565652*m.x448 + m.x508 - m.x626 - m.x627 - m.x628 == 0)
m.c437 = Constraint(expr= - 3.356331*m.x449 + m.x509 - m.x626 - m.x627 - m.x628 == 0)
m.c438 = Constraint(expr= - 1.44013616*m.x450 + m.x510 - m.x626 - m.x627 - m.x628 == 0)
m.c439 = Constraint(expr= - 1.959312*m.x451 + m.x511 - m.x626 - m.x627 - m.x628 == 0)
m.c440 = Constraint(expr= - 2.5554035*m.x452 + m.x512 - m.x626 - m.x627 - m.x628 == 0)
m.c441 = Constraint(expr= - 6.121276*m.x453 + m.x513 - m.x626 - m.x627 - m.x628 == 0)
m.c442 = Constraint(expr= - 2.268122*m.x454 + m.x514 - m.x626 - m.x627 - m.x628 == 0)
m.c443 = Constraint(expr= - 4.020626*m.x455 + m.x515 - m.x629 - m.x630 - m.x631 == 0)
m.c444 = Constraint(expr= - 2.964906*m.x456 + m.x516 - m.x629 - m.x630 - m.x631 == 0)
m.c445 = Constraint(expr= - 4.504642*m.x457 + m.x517 - m.x629 - m.x630 - m.x631 == 0)
m.c446 = Constraint(expr= - 3.200062*m.x458 + m.x518 - m.x629 - m.x630 - m.x631 == 0)
m.c447 = Constraint(expr= - 2.624108*m.x459 + m.x519 - m.x629 - m.x630 - m.x631 == 0)
m.c448 = Constraint(expr= - 0.04478201*m.x460 + m.x520 - m.x629 - m.x630 - m.x631 == 0)
m.c449 = Constraint(expr= - 3.275987*m.x461 + m.x521 - m.x629 - m.x630 - m.x631 == 0)
m.c450 = Constraint(expr= - 0.9265037*m.x462 + m.x522 - m.x629 - m.x630 - m.x631 == 0)
m.c451 = Constraint(expr= - 3.760758*m.x463 + m.x523 - m.x629 - m.x630 - m.x631 == 0)
m.c452 = Constraint(expr= - 3.826681*m.x464 + m.x524 - m.x629 - m.x630 - m.x631 == 0)
m.c453 = Constraint(expr= - 5.974445*m.x465 + m.x525 - m.x632 - m.x633 - m.x634 == 0)
m.c454 = Constraint(expr= - 2.597016*m.x466 + m.x526 - m.x632 - m.x633 - m.x634 == 0)
m.c455 = Constraint(expr= - 4.248418*m.x467 + m.x527 - m.x632 - m.x633 - m.x634 == 0)
m.c456 = Constraint(expr= - 4.934691*m.x468 + m.x528 - m.x632 - m.x633 - m.x634 == 0)
m.c457 = Constraint(expr= - 5.99296*m.x469 + m.x529 - m.x632 - m.x633 - m.x634 == 0)
m.c458 = Constraint(expr= - 0.68209498*m.x470 + m.x530 - m.x632 - m.x633 - m.x634 == 0)
m.c459 = Constraint(expr= - 2.410622*m.x471 + m.x531 - m.x632 - m.x633 - m.x634 == 0)
m.c460 = Constraint(expr= - 2.4881944*m.x472 + m.x532 - m.x632 - m.x633 - m.x634 == 0)
m.c461 = Constraint(expr= - 7.781311*m.x473 + m.x533 - m.x632 - m.x633 - m.x634 == 0)
m.c462 = Constraint(expr= - 7.257567*m.x474 + m.x534 - m.x632 - m.x633 - m.x634 == 0)
m.c463 = Constraint(expr= - 1.012926*m.x475 + m.x535 - m.x635 - m.x636 - m.x637 == 0)
m.c464 = Constraint(expr= - 2.996514*m.x476 + m.x536 - m.x635 - m.x636 - m.x637 == 0)
m.c465 = Constraint(expr= - 3.493567*m.x477 + m.x537 - m.x635 - m.x636 - m.x637 == 0)
m.c466 = Constraint(expr= - 3.433273*m.x478 + m.x538 - m.x635 - m.x636 - m.x637 == 0)
m.c467 = Constraint(expr= - 4.120419*m.x479 + m.x539 - m.x635 - m.x636 - m.x637 == 0)
m.c468 = Constraint(expr= - 1.90055992*m.x480 + m.x540 - m.x635 - m.x636 - m.x637 == 0)
m.c469 = Constraint(expr= - 2.112299*m.x481 + m.x541 - m.x635 - m.x636 - m.x637 == 0)
m.c470 = Constraint(expr= - 1.4857817*m.x482 + m.x542 - m.x635 - m.x636 - m.x637 == 0)
m.c471 = Constraint(expr= - 4.199485*m.x483 + m.x543 - m.x635 - m.x636 - m.x637 == 0)
m.c472 = Constraint(expr= - 3.512231*m.x484 + m.x544 - m.x635 - m.x636 - m.x637 == 0)
m.c473 = Constraint(expr= - 5.547826*m.x485 + m.x545 - m.x638 - m.x639 - m.x640 == 0)
m.c474 = Constraint(expr= - 3.024617*m.x486 + m.x546 - m.x638 - m.x639 - m.x640 == 0)
m.c475 = Constraint(expr= - 4.285229*m.x487 + m.x547 - m.x638 - m.x639 - m.x640 == 0)
m.c476 = Constraint(expr= - 2.960692*m.x488 + m.x548 - m.x638 - m.x639 - m.x640 == 0)
m.c477 = Constraint(expr= - 4.627118*m.x489 + m.x549 - m.x638 - m.x639 - m.x640 == 0)
m.c478 = Constraint(expr= - 2.6051957*m.x490 + m.x550 - m.x638 - m.x639 - m.x640 == 0)
m.c479 = Constraint(expr= - 2.520239*m.x491 + m.x551 - m.x638 - m.x639 - m.x640 == 0)
m.c480 = Constraint(expr= - 2.207549*m.x492 + m.x552 - m.x638 - m.x639 - m.x640 == 0)
m.c481 = Constraint(expr= - 7.75634*m.x493 + m.x553 - m.x638 - m.x639 - m.x640 == 0)
m.c482 = Constraint(expr= - 8.229719*m.x494 + m.x554 - m.x638 - m.x639 - m.x640 == 0)
m.c483 = Constraint(expr= - 5.486787*m.x495 + m.x555 - m.x641 - m.x642 - m.x643 == 0)
m.c484 = Constraint(expr= - 2.461346*m.x496 + m.x556 - m.x641 - m.x642 - m.x643 == 0)
m.c485 = Constraint(expr= - 8.845282*m.x497 + m.x557 - m.x641 - m.x642 - m.x643 == 0)
m.c486 = Constraint(expr= - 5.157271*m.x498 + m.x558 - m.x641 - m.x642 - m.x643 == 0)
m.c487 = Constraint(expr= - 4.191177*m.x499 + m.x559 - m.x641 - m.x642 - m.x643 == 0)
m.c488 = Constraint(expr= - 5.13465497*m.x500 + m.x560 - m.x641 - m.x642 - m.x643 == 0)
m.c489 = Constraint(expr= - 1.290353*m.x501 + m.x561 - m.x641 - m.x642 - m.x643 == 0)
m.c490 = Constraint(expr= - 2.683989*m.x502 + m.x562 - m.x641 - m.x642 - m.x643 == 0)
m.c491 = Constraint(expr= - 10.832325*m.x503 + m.x563 - m.x641 - m.x642 - m.x643 == 0)
m.c492 = Constraint(expr= - 8.466163*m.x504 + m.x564 - m.x641 - m.x642 - m.x643 == 0)
m.c493 = Constraint(expr= m.x385 - m.x445 - m.x644 - m.x674 - m.x692 == 0)
m.c494 = Constraint(expr= m.x386 - m.x446 - m.x644 - m.x674 - m.x692 == 0)
m.c495 = Constraint(expr= m.x387 - m.x447 - m.x644 - m.x674 - m.x692 == 0)
m.c496 = Constraint(expr= m.x388 - m.x448 - m.x644 - m.x674 - m.x692 == 0)
m.c497 = Constraint(expr= m.x389 - m.x449 - m.x644 - m.x674 - m.x692 == 0)
m.c498 = Constraint(expr= m.x390 - m.x450 - m.x644 - m.x674 - m.x692 == 0)
m.c499 = Constraint(expr= m.x391 - m.x451 - m.x644 - m.x674 - m.x692 == 0)
m.c500 = Constraint(expr= m.x392 - m.x452 - m.x644 - m.x674 - m.x692 == 0)
m.c501 = Constraint(expr= m.x393 - m.x453 - m.x644 - m.x674 - m.x692 == 0)
m.c502 = Constraint(expr= m.x394 - m.x454 - m.x644 - m.x674 - m.x692 == 0)
m.c503 = Constraint(expr= m.x395 - m.x455 - m.x645 - m.x675 - m.x693 == 0)
m.c504 = Constraint(expr= m.x396 - m.x456 - m.x645 - m.x675 - m.x693 == 0)
m.c505 = Constraint(expr= m.x397 - m.x457 - m.x645 - m.x675 - m.x693 == 0)
m.c506 = Constraint(expr= m.x398 - m.x458 - m.x645 - m.x675 - m.x693 == 0)
m.c507 = Constraint(expr= m.x399 - m.x459 - m.x645 - m.x675 - m.x693 == 0)
m.c508 = Constraint(expr= m.x400 - m.x460 - m.x645 - m.x675 - m.x693 == 0)
m.c509 = Constraint(expr= m.x401 - m.x461 - m.x645 - m.x675 - m.x693 == 0)
m.c510 = Constraint(expr= m.x402 - m.x462 - m.x645 - m.x675 - m.x693 == 0)
m.c511 = Constraint(expr= m.x403 - m.x463 - m.x645 - m.x675 - m.x693 == 0)
m.c512 = Constraint(expr= m.x404 - m.x464 - m.x645 - m.x675 - m.x693 == 0)
m.c513 = Constraint(expr= m.x405 - m.x465 - m.x646 - m.x676 - m.x694 == 0)
m.c514 = Constraint(expr= m.x406 - m.x466 - m.x646 - m.x676 - m.x694 == 0)
m.c515 = Constraint(expr= m.x407 - m.x467 - m.x646 - m.x676 - m.x694 == 0)
m.c516 = Constraint(expr= m.x408 - m.x468 - m.x646 - m.x676 - m.x694 == 0)
m.c517 = Constraint(expr= m.x409 - m.x469 - m.x646 - m.x676 - m.x694 == 0)
m.c518 = Constraint(expr= m.x410 - m.x470 - m.x646 - m.x676 - m.x694 == 0)
m.c519 = Constraint(expr= m.x411 - m.x471 - m.x646 - m.x676 - m.x694 == 0)
m.c520 = Constraint(expr= m.x412 - m.x472 - m.x646 - m.x676 - m.x694 == 0)
m.c521 = Constraint(expr= m.x413 - m.x473 - m.x646 - m.x676 - m.x694 == 0)
m.c522 = Constraint(expr= m.x414 - m.x474 - m.x646 - m.x676 - m.x694 == 0)
m.c523 = Constraint(expr= m.x415 - m.x475 - m.x647 - m.x677 - m.x695 == 0)
m.c524 = Constraint(expr= m.x416 - m.x476 - m.x647 - m.x677 - m.x695 == 0)
m.c525 = Constraint(expr= m.x417 - m.x477 - m.x647 - m.x677 - m.x695 == 0)
m.c526 = Constraint(expr= m.x418 - m.x478 - m.x647 - m.x677 - m.x695 == 0)
m.c527 = Constraint(expr= m.x419 - m.x479 - m.x647 - m.x677 - m.x695 == 0)
m.c528 = Constraint(expr= m.x420 - m.x480 - m.x647 - m.x677 - m.x695 == 0)
m.c529 = Constraint(expr= m.x421 - m.x481 - m.x647 - m.x677 - m.x695 == 0)
m.c530 = Constraint(expr= m.x422 - m.x482 - m.x647 - m.x677 - m.x695 == 0)
m.c531 = Constraint(expr= m.x423 - m.x483 - m.x647 - m.x677 - m.x695 == 0)
m.c532 = Constraint(expr= m.x424 - m.x484 - m.x647 - m.x677 - m.x695 == 0)
m.c533 = Constraint(expr= m.x425 - m.x485 - m.x648 - m.x678 - m.x696 == 0)
m.c534 = Constraint(expr= m.x426 - m.x486 - m.x648 - m.x678 - m.x696 == 0)
m.c535 = Constraint(expr= m.x427 - m.x487 - m.x648 - m.x678 - m.x696 == 0)
m.c536 = Constraint(expr= m.x428 - m.x488 - m.x648 - m.x678 - m.x696 == 0)
m.c537 = Constraint(expr= m.x429 - m.x489 - m.x648 - m.x678 - m.x696 == 0)
m.c538 = Constraint(expr= m.x430 - m.x490 - m.x648 - m.x678 - m.x696 == 0)
m.c539 = Constraint(expr= m.x431 - m.x491 - m.x648 - m.x678 - m.x696 == 0)
m.c540 = Constraint(expr= m.x432 - m.x492 - m.x648 - m.x678 - m.x696 == 0)
m.c541 = Constraint(expr= m.x433 - m.x493 - m.x648 - m.x678 - m.x696 == 0)
m.c542 = Constraint(expr= m.x434 - m.x494 - m.x648 - m.x678 - m.x696 == 0)
m.c543 = Constraint(expr= m.x435 - m.x495 - m.x649 - m.x679 - m.x697 == 0)
m.c544 = Constraint(expr= m.x436 - m.x496 - m.x649 - m.x679 - m.x697 == 0)
m.c545 = Constraint(expr= m.x437 - m.x497 - m.x649 - m.x679 - m.x697 == 0)
m.c546 = Constraint(expr= m.x438 - m.x498 - m.x649 - m.x679 - m.x697 == 0)
m.c547 = Constraint(expr= m.x439 - m.x499 - m.x649 - m.x679 - m.x697 == 0)
m.c548 = Constraint(expr= m.x440 - m.x500 - m.x649 - m.x679 - m.x697 == 0)
m.c549 = Constraint(expr= m.x441 - m.x501 - m.x649 - m.x679 - m.x697 == 0)
m.c550 = Constraint(expr= m.x442 - m.x502 - m.x649 - m.x679 - m.x697 == 0)
m.c551 = Constraint(expr= m.x443 - m.x503 - m.x649 - m.x679 - m.x697 == 0)
m.c552 = Constraint(expr= m.x444 - m.x504 - m.x649 - m.x679 - m.x697 == 0)
m.c553 = Constraint(expr= m.b726 + m.b744 + m.b762 <= 1)
m.c554 = Constraint(expr= m.b727 + m.b745 + m.b763 <= 1)
m.c555 = Constraint(expr= m.b728 + m.b746 + m.b764 <= 1)
m.c556 = Constraint(expr= m.b729 + m.b747 + m.b765 <= 1)
m.c557 = Constraint(expr= m.b730 + m.b748 + m.b766 <= 1)
m.c558 = Constraint(expr= m.b731 + m.b749 + m.b767 <= 1)
m.c559 = Constraint(expr= m.x627 - 3.145*m.x650 - 2.465*m.x651 == 0)
m.c560 = Constraint(expr= m.x630 - 3.145*m.x652 - 2.465*m.x653 == 0)
m.c561 = Constraint(expr= m.x633 - 3.145*m.x654 - 2.465*m.x655 == 0)
m.c562 = Constraint(expr= m.x636 - 3.145*m.x656 - 2.465*m.x657 == 0)
m.c563 = Constraint(expr= m.x639 - 3.145*m.x658 - 2.465*m.x659 == 0)
m.c564 = Constraint(expr= m.x642 - 3.145*m.x660 - 2.465*m.x661 == 0)
m.c565 = Constraint(expr= m.x644 - m.x650 - m.x651 == 0)
m.c566 = Constraint(expr= m.x645 - m.x652 - m.x653 == 0)
m.c567 = Constraint(expr= m.x646 - m.x654 - m.x655 == 0)
m.c568 = Constraint(expr= m.x647 - m.x656 - m.x657 == 0)
m.c569 = Constraint(expr= m.x648 - m.x658 - m.x659 == 0)
m.c570 = Constraint(expr= m.x649 - m.x660 - m.x661 == 0)
m.c571 = Constraint(expr= m.x650 - m.x662 - m.x663 == 0)
m.c572 = Constraint(expr= m.x652 - m.x664 - m.x665 == 0)
m.c573 = Constraint(expr= m.x654 - m.x666 - m.x667 == 0)
m.c574 = Constraint(expr= m.x656 - m.x668 - m.x669 == 0)
m.c575 = Constraint(expr= m.x658 - m.x670 - m.x671 == 0)
m.c576 = Constraint(expr= m.x660 - m.x672 - m.x673 == 0)
m.c577 = Constraint(expr= m.x662 - 20*m.b732 <= 0)
m.c578 = Constraint(expr= m.x664 - 20*m.b734 <= 0)
m.c579 = Constraint(expr= m.x666 - 20*m.b736 <= 0)
m.c580 = Constraint(expr= m.x668 - 20*m.b738 <= 0)
m.c581 = Constraint(expr= m.x670 - 20*m.b740 <= 0)
m.c582 = Constraint(expr= m.x672 - 20*m.b742 <= 0)
m.c583 = Constraint(expr= m.x663 - 20*m.b733 == 0)
m.c584 = Constraint(expr= m.x665 - 20*m.b735 == 0)
m.c585 = Constraint(expr= m.x667 - 20*m.b737 == 0)
m.c586 = Constraint(expr= m.x669 - 20*m.b739 == 0)
m.c587 = Constraint(expr= m.x671 - 20*m.b741 == 0)
m.c588 = Constraint(expr= m.x673 - 20*m.b743 == 0)
m.c589 = Constraint(expr= m.x651 - 85*m.b733 <= 0)
m.c590 = Constraint(expr= m.x653 - 85*m.b735 <= 0)
m.c591 = Constraint(expr= m.x655 - 85*m.b737 <= 0)
m.c592 = Constraint(expr= m.x657 - 85*m.b739 <= 0)
m.c593 = Constraint(expr= m.x659 - 85*m.b741 <= 0)
m.c594 = Constraint(expr= m.x661 - 85*m.b743 <= 0)
m.c595 = Constraint(expr= - m.b726 + m.b732 + m.b733 == 0)
m.c596 = Constraint(expr= - m.b727 + m.b734 + m.b735 == 0)
m.c597 = Constraint(expr= - m.b728 + m.b736 + m.b737 == 0)
m.c598 = Constraint(expr= - m.b729 + m.b738 + m.b739 == 0)
m.c599 = Constraint(expr= - m.b730 + m.b740 + m.b741 == 0)
m.c600 = Constraint(expr= - m.b731 + m.b742 + m.b743 == 0)
m.c601 = Constraint(expr= m.x626 - 3.06*m.x680 - 2.38*m.x681 == 0)
m.c602 = Constraint(expr= m.x629 - 3.06*m.x682 - 2.38*m.x683 == 0)
m.c603 = Constraint(expr= m.x632 - 3.06*m.x684 - 2.38*m.x685 == 0)
m.c604 = Constraint(expr= m.x635 - 3.06*m.x686 - 2.38*m.x687 == 0)
m.c605 = Constraint(expr= m.x638 - 3.06*m.x688 - 2.38*m.x689 == 0)
m.c606 = Constraint(expr= m.x641 - 3.06*m.x690 - 2.38*m.x691 == 0)
m.c607 = Constraint(expr= m.x674 - m.x680 - m.x681 == 0)
m.c608 = Constraint(expr= m.x675 - m.x682 - m.x683 == 0)
m.c609 = Constraint(expr= m.x676 - m.x684 - m.x685 == 0)
m.c610 = Constraint(expr= m.x677 - m.x686 - m.x687 == 0)
m.c611 = Constraint(expr= m.x678 - m.x688 - m.x689 == 0)
m.c612 = Constraint(expr= m.x679 - m.x690 - m.x691 == 0)
m.c613 = Constraint(expr= m.x680 - 40*m.b750 <= 0)
m.c614 = Constraint(expr= m.x682 - 40*m.b752 <= 0)
m.c615 = Constraint(expr= m.x684 - 40*m.b754 <= 0)
m.c616 = Constraint(expr= m.x686 - 40*m.b756 <= 0)
m.c617 = Constraint(expr= m.x688 - 40*m.b758 <= 0)
m.c618 = Constraint(expr= m.x690 - 40*m.b760 <= 0)
m.c619 = Constraint(expr= m.x681 - 85*m.b751 <= 0)
m.c620 = Constraint(expr= m.x683 - 85*m.b753 <= 0)
m.c621 = Constraint(expr= m.x685 - 85*m.b755 <= 0)
m.c622 = Constraint(expr= m.x687 - 85*m.b757 <= 0)
m.c623 = Constraint(expr= m.x689 - 85*m.b759 <= 0)
m.c624 = Constraint(expr= m.x691 - 85*m.b761 <= 0)
m.c625 = Constraint(expr= m.x681 - 40*m.b751 >= 0)
m.c626 = Constraint(expr= m.x683 - 40*m.b753 >= 0)
m.c627 = Constraint(expr= m.x685 - 40*m.b755 >= 0)
m.c628 = Constraint(expr= m.x687 - 40*m.b757 >= 0)
m.c629 = Constraint(expr= m.x689 - 40*m.b759 >= 0)
m.c630 = Constraint(expr= m.x691 - 40*m.b761 >= 0)
m.c631 = Constraint(expr= - m.b744 + m.b750 + m.b751 == 0)
m.c632 = Constraint(expr= - m.b745 + m.b752 + m.b753 == 0)
m.c633 = Constraint(expr= - m.b746 + m.b754 + m.b755 == 0)
m.c634 = Constraint(expr= - m.b747 + m.b756 + m.b757 == 0)
m.c635 = Constraint(expr= - m.b748 + m.b758 + m.b759 == 0)
m.c636 = Constraint(expr= - m.b749 + m.b760 + m.b761 == 0)
m.c637 = Constraint(expr= m.x628 - 3.4*m.x698 == 0)
m.c638 = Constraint(expr= m.x631 - 2.04*m.x699 - 3.4*m.x700 - 2.04*m.x701 == 0)
m.c639 = Constraint(expr= m.x634 - 1.7*m.x702 - 2.04*m.x703 - 1.7*m.x704 - 3.4*m.x705 - 2.04*m.x706 - 1.7*m.x707 == 0)
m.c640 = Constraint(expr= m.x637 - 1.7*m.x708 - 2.04*m.x709 - 1.7*m.x710 - 3.4*m.x711 - 2.04*m.x712 - 1.7*m.x713 == 0)
m.c641 = Constraint(expr= m.x640 - 1.7*m.x714 - 2.04*m.x715 - 1.7*m.x716 - 3.4*m.x717 - 2.04*m.x718 - 1.7*m.x719 == 0)
m.c642 = Constraint(expr= m.x643 - 1.7*m.x720 - 2.04*m.x721 - 1.7*m.x722 - 3.4*m.x723 - 2.04*m.x724 - 1.7*m.x725 == 0)
m.c643 = Constraint(expr= m.x692 - m.x698 == 0)
m.c644 = Constraint(expr= m.x693 - m.x699 - m.x700 - m.x701 == 0)
m.c645 = Constraint(expr= m.x694 - m.x702 - m.x703 - m.x704 - m.x705 - m.x706 - m.x707 == 0)
m.c646 = Constraint(expr= m.x695 - m.x708 - m.x709 - m.x710 - m.x711 - m.x712 - m.x713 == 0)
m.c647 = Constraint(expr= m.x696 - m.x714 - m.x715 - m.x716 - m.x717 - m.x718 - m.x719 == 0)
m.c648 = Constraint(expr= m.x697 - m.x720 - m.x721 - m.x722 - m.x723 - m.x724 - m.x725 == 0)
m.c649 = Constraint(expr= m.x698 - 85*m.b768 <= 0)
m.c650 = Constraint(expr= m.x699 - 85*m.b772 <= 0)
m.c651 = Constraint(expr= m.x700 - 85*m.b771 <= 0)
m.c652 = Constraint(expr= m.x701 - 85*m.b772 <= 0)
m.c653 = Constraint(expr= m.x702 - 85*m.b776 <= 0)
m.c654 = Constraint(expr= m.x703 - 85*m.b775 <= 0)
m.c655 = Constraint(expr= m.x704 - 85*m.b776 <= 0)
m.c656 = Constraint(expr= m.x705 - 85*m.b774 <= 0)
m.c657 = Constraint(expr= m.x706 - 85*m.b775 <= 0)
m.c658 = Constraint(expr= m.x707 - 85*m.b776 <= 0)
m.c659 = Constraint(expr= m.x708 - 85*m.b779 <= 0)
m.c660 = Constraint(expr= m.x709 - 85*m.b778 <= 0)
m.c661 = Constraint(expr= m.x710 - 85*m.b779 <= 0)
m.c662 = Constraint(expr= m.x711 - 85*m.b777 <= 0)
m.c663 = Constraint(expr= m.x712 - 85*m.b778 <= 0)
m.c664 = Constraint(expr= m.x713 - 85*m.b779 <= 0)
m.c665 = Constraint(expr= m.x714 - 85*m.b782 <= 0)
m.c666 = Constraint(expr= m.x715 - 85*m.b781 <= 0)
m.c667 = Constraint(expr= m.x716 - 85*m.b782 <= 0)
m.c668 = Constraint(expr= m.x717 - 85*m.b780 <= 0)
m.c669 = Constraint(expr= m.x718 - 85*m.b781 <= 0)
m.c670 = Constraint(expr= m.x719 - 85*m.b782 <= 0)
m.c671 = Constraint(expr= m.x720 - 85*m.b785 <= 0)
m.c672 = Constraint(expr= m.x721 - 85*m.b784 <= 0)
m.c673 = Constraint(expr= m.x722 - 85*m.b785 <= 0)
m.c674 = Constraint(expr= m.x723 - 85*m.b783 <= 0)
m.c675 = Constraint(expr= m.x724 - 85*m.b784 <= 0)
m.c676 = Constraint(expr= m.x725 - 85*m.b785 <= 0)
m.c677 = Constraint(expr= m.x698 - 5*m.b768 >= 0)
m.c678 = Constraint(expr= m.x699 - 25*m.b772 >= 0)
m.c679 = Constraint(expr= m.x700 - 5*m.b771 >= 0)
m.c680 = Constraint(expr= m.x701 - 25*m.b772 >= 0)
m.c681 = Constraint(expr= m.x702 - 30*m.b776 >= 0)
m.c682 = Constraint(expr= m.x703 - 25*m.b775 >= 0)
m.c683 = Constraint(expr= m.x704 - 30*m.b776 >= 0)
m.c684 = Constraint(expr= m.x705 - 5*m.b774 >= 0)
m.c685 = Constraint(expr= m.x706 - 25*m.b775 >= 0)
m.c686 = Constraint(expr= m.x707 - 30*m.b776 >= 0)
m.c687 = Constraint(expr= m.x708 - 30*m.b779 >= 0)
m.c688 = Constraint(expr= m.x709 - 25*m.b778 >= 0)
m.c689 = Constraint(expr= m.x710 - 30*m.b779 >= 0)
m.c690 = Constraint(expr= m.x711 - 5*m.b777 >= 0)
m.c691 = Constraint(expr= m.x712 - 25*m.b778 >= 0)
m.c692 = Constraint(expr= m.x713 - 30*m.b779 >= 0)
m.c693 = Constraint(expr= m.x714 - 30*m.b782 >= 0)
m.c694 = Constraint(expr= m.x715 - 25*m.b781 >= 0)
m.c695 = Constraint(expr= m.x716 - 30*m.b782 >= 0)
m.c696 = Constraint(expr= m.x717 - 5*m.b780 >= 0)
m.c697 = Constraint(expr= m.x718 - 25*m.b781 >= 0)
m.c698 = Constraint(expr= m.x719 - 30*m.b782 >= 0)
m.c699 = Constraint(expr= m.x720 - 30*m.b785 >= 0)
m.c700 = Constraint(expr= m.x721 - 25*m.b784 >= 0)
m.c701 = Constraint(expr= m.x722 - 30*m.b785 >= 0)
m.c702 = Constraint(expr= m.x723 - 5*m.b783 >= 0)
m.c703 = Constraint(expr= m.x724 - 25*m.b784 >= 0)
m.c704 = Constraint(expr= m.x725 - 30*m.b785 >= 0)
m.c705 = Constraint(expr= - m.b762 + m.b768 + m.b769 + m.b770 == 0)
m.c706 = Constraint(expr= - m.b763 + m.b771 + m.b772 + m.b773 == 0)
m.c707 = Constraint(expr= - m.b764 + m.b774 + m.b775 + m.b776 == 0)
m.c708 = Constraint(expr= - m.b765 + m.b777 + m.b778 + m.b779 == 0)
m.c709 = Constraint(expr= - m.b766 + m.b780 + m.b781 + m.b782 == 0)
m.c710 = Constraint(expr= - m.b767 + m.b783 + m.b784 + m.b785 == 0)
m.c711 = Constraint(expr= m.b768 + m.b772 <= 1)
m.c712 = Constraint(expr= m.b769 + m.b772 <= 1)
m.c713 = Constraint(expr= m.b770 + m.b772 <= 1)
m.c714 = Constraint(expr= m.b768 + m.b776 <= 1)
m.c715 = Constraint(expr= m.b769 + m.b776 <= 1)
m.c716 = Constraint(expr= m.b770 + m.b776 <= 1)
m.c717 = Constraint(expr= m.b771 + m.b775 <= 1)
m.c718 = Constraint(expr= m.b772 + m.b775 <= 1)
m.c719 = Constraint(expr= m.b773 + m.b775 <= 1)
m.c720 = Constraint(expr= m.b771 + m.b776 <= 1)
m.c721 = Constraint(expr= m.b772 + m.b776 <= 1)
m.c722 = Constraint(expr= m.b773 + m.b776 <= 1)
m.c723 = Constraint(expr= m.b771 + m.b779 <= 1)
m.c724 = Constraint(expr= m.b772 + m.b779 <= 1)
m.c725 = Constraint(expr= m.b773 + m.b779 <= 1)
m.c726 = Constraint(expr= m.b774 + m.b778 <= 1)
m.c727 = Constraint(expr= m.b775 + m.b778 <= 1)
m.c728 = Constraint(expr= m.b776 + m.b778 <= 1)
m.c729 = Constraint(expr= m.b774 + m.b779 <= 1)
m.c730 = Constraint(expr= m.b775 + m.b779 <= 1)
m.c731 = Constraint(expr= m.b776 + m.b779 <= 1)
m.c732 = Constraint(expr= m.b774 + m.b782 <= 1)
m.c733 = Constraint(expr= m.b775 + m.b782 <= 1)
m.c734 = Constraint(expr= m.b776 + m.b782 <= 1)
m.c735 = Constraint(expr= m.b777 + m.b781 <= 1)
m.c736 = Constraint(expr= m.b778 + m.b781 <= 1)
m.c737 = Constraint(expr= m.b779 + m.b781 <= 1)
m.c738 = Constraint(expr= m.b777 + m.b782 <= 1)
m.c739 = Constraint(expr= m.b778 + m.b782 <= 1)
m.c740 = Constraint(expr= m.b779 + m.b782 <= 1)
m.c741 = Constraint(expr= m.b777 + m.b785 <= 1)
m.c742 = Constraint(expr= m.b778 + m.b785 <= 1)
m.c743 = Constraint(expr= m.b779 + m.b785 <= 1)
m.c744 = Constraint(expr= m.b780 + m.b784 <= 1)
m.c745 = Constraint(expr= m.b781 + m.b784 <= 1)
m.c746 = Constraint(expr= m.b782 + m.b784 <= 1)
m.c747 = Constraint(expr= m.b780 + m.b785 <= 1)
m.c748 = Constraint(expr= m.b781 + m.b785 <= 1)
m.c749 = Constraint(expr= m.b782 + m.b785 <= 1)
|
python
|
import sqlite3
import os
from pomodoro import Pomodoro
os.chdir("..") # Go up one directory from working directory
# Create database if it does not exist
database_path = "data\pomodoros.db"
if not os.path.exists(database_path):
print("Creating database ...")
os.system("database.py")
conn = sqlite3.connect("data\pomodoros.db")
cursor = conn.cursor()
pomodoro = Pomodoro(cursor)
### Main loop
while True:
# Show the categories available
category_id = pomodoro.show_categories()
project_id = pomodoro.show_projects(category_id)
pomodoro_time = input("Add the length of the pomodoro in minutes: ")
# call for the timer
pomodoro.timer(minutes=pomodoro_time)
# Rest timer
pomodoro.timer(mode="rest")
# Ask for satisfaction
satisfaction = input("Type how well was your pomodoro. 1=Good - 2=Bad: ")
# Add the pomodoro to the database
pomodoro.add_pomodoro(pomodoro_time, category_id, project_id, satisfaction)
conn.commit()
# Next step
decision = pomodoro.next_decision()
if decision == 1:
continue
elif decision == 2:
pomodoro.end_project(project_id)
conn.commit()
elif decision == 3:
pomodoro.cancel_project(project_id)
conn.commit()
else:
break
conn.commit()
conn.close()
print("---ENDING PROGRAM---")
|
python
|
"""
*
* Author: Juarez Paulino(coderemite)
* Email: [email protected]
*
"""
w,h,k=map(int,input().split());r=0
for _ in [0]*k:
r+=(w+h<<1)-4;w-=4;h-=4
print(r)
|
python
|
# -*- coding: utf-8 -*-
# À partir de l’export csv de https://opendata.paris.fr/explore/dataset/les-titres-les-plus-pretes/
# vous compterez le nombre d’ouvrages par ‘type de document’ et vous afficherez les types par ordre décroissant
from collections import Counter
sep = ';'
cnt = Counter()
with open('les-titres-les-plus-pretes.csv') as f_livres:
for line in f_livres:
if line.startswith("Rang;"):
continue
line = line.rstrip()
cols = line.split(sep)
cnt[cols[1]] += int(cols[2])
# La fonction most_common renvoie la liste des tuples par ordre décroissant
# (https://docs.python.org/3.7/library/collections.html#collections.Counter.most_common)
for cat, nb in cnt.most_common():
print(f"{cat} : {nb} prêts")
|
python
|
#!/usr/bin/env python
#pythonlib
import os
import sys
import math
import re
import time
import glob
import numpy
#appion
from pyami import imagefun
from appionlib import appionLoop2
from appionlib import apImage
from appionlib import apDisplay
from appionlib import apDatabase
from appionlib.apCtf import ctfdb
from appionlib import apDefocalPairs
from appionlib import appiondata
from appionlib import apParticle
from appionlib import apFile
from appionlib import apMask
from appionlib import apBoxer
from appionlib import apSizing
class ParticleExtractLoop(appionLoop2.AppionLoop):
############################################################
## Check pixel size
############################################################
def checkPixelSize(self):
# make sure that images all have same pixel size:
# first get pixel size of first image:
self.params['apix'] = None
for imgdata in self.imgtree:
# get pixel size
imgname = imgdata['filename']
if imgname in self.donedict:
continue
if self.params['apix'] is None:
self.params['apix'] = apDatabase.getPixelSize(imgdata)
apDisplay.printMsg("Stack pixelsize = %.3f A"%(self.params['apix']))
if apDatabase.getPixelSize(imgdata) != self.params['apix']:
apDisplay.printMsg("Image pixelsize %.3f A != Stack pixelsize %.3f A"%(apDatabase.getPixelSize(imgdata), self.params['apix']))
apDisplay.printMsg("Problem image name: %s"%(apDisplay.short(imgdata['filename'])))
apDisplay.printError("This particle selection run contains images of varying pixelsizes, a stack cannot be created")
#=======================
def getParticlesFromStack(self, stackdata,imgdata,is_defocpair=False):
"""
For image (or defocal pair), imgdata get particles in corresponding stack
"""
if is_defocpair is True:
sibling, shiftpeak = apDefocalPairs.getShiftFromImage(imgdata, self.params['sessionname'])
if shiftpeak is None:
return [],{'shiftx':0, 'shifty':0, 'scale':1}
shiftdata = {'shiftx':shiftpeak['shift'][0], 'shifty':shiftpeak['shift'][1], 'scale':shiftpeak['scalefactor']}
searchimgdata = sibling
else:
searchimgdata = imgdata
shiftdata = {'shiftx':0, 'shifty':0, 'scale':1}
partq = appiondata.ApParticleData()
partq['image'] = searchimgdata
stackpartq = appiondata.ApStackParticleData()
stackpartq['stack'] = stackdata
stackpartq['particle'] = partq
stackpartdatas = stackpartq.query()
partdatas = []
partorder = []
for stackpartdata in stackpartdatas:
if self.params['partlimit'] and self.params['partlimit'] < stackpartdata['particleNumber']:
continue
partdata = stackpartdata['particle']
partdatas.append(partdata)
partorder.append(stackpartdata['particleNumber'])
partdatas.reverse()
partorder.reverse()
self.writeStackParticleOrderFile(partorder)
return partdatas, shiftdata
def writeStackParticleOrderFile(self,partorder):
f = open(os.path.join(self.params['rundir'],'stackpartorder.list'),'a')
if partorder:
f.write('\n'.join(map((lambda x: '%d' % x),partorder))+'\n')
return
def getParticlesInImage(self,imgdata):
if self.params['defocpair'] is True and self.params['selectionid'] is not None:
# using defocal pairs and particle picks
partdatas, shiftdata = apParticle.getDefocPairParticles(imgdata, self.params['selectionid'], self.params['particlelabel'])
elif self.params['fromstackid'] is not None:
# using previous stack to make a new stack
fromstackdata = appiondata.ApStackData.direct_query(self.params['fromstackid'])
partdatas, shiftdata = self.getParticlesFromStack(fromstackdata,imgdata,self.params['defocpair'],)
else:
# using particle picks
partdatas = apParticle.getParticles(imgdata, self.params['selectionid'], self.params['particlelabel'])
shiftdata = {'shiftx':0, 'shifty':0, 'scale':1}
apDisplay.printMsg("Found %d particles"%(len(partdatas)))
### apply correlation limits
if self.params['correlationmin'] or self.params['correlationmax']:
partdatas = self.eliminateMinMaxCCParticles(partdatas)
### apply masks
if self.params['checkmask']:
partdatas = self.eliminateMaskedParticles(partdatas, imgdata)
return partdatas,shiftdata
############################################################
## Rejection Criteria
############################################################
############################################################
## image if additional criteria is not met
############################################################
def rejectImage(self, imgdata):
shortname = apDisplay.short(imgdata['filename'])
if self.params['mag']:
if not apDatabase.checkMag(imgdata, self.params['mag']):
apDisplay.printColor(shortname+" was not at the specific magnification","cyan")
return False
return True
############################################################
## get CTF parameters and skip image if criteria is not met
############################################################
def checkRequireCtf(self):
try:
return self.params['saveRequireCtf']
except KeyError:
ctfres = self.params['ctfres80min'] or self.params['ctfres50min'] or self.params['ctfres80max'] or self.params['ctfres50max']
defoc = self.params['mindefocus'] or self.params['maxdefocus']
self.params['saveRequireCtf'] = self.params['ctfcutoff'] or ctfres or defoc
return self.params['saveRequireCtf']
#=======================
def getBestCtfValue(self, imgdata, msg=False):
if self.params['ctfrunid'] is not None:
return ctfdb.getCtfValueForCtfRunId(imgdata, self.params['ctfrunid'], msg=msg)
return ctfdb.getBestCtfValue(imgdata, sortType=self.params['ctfsorttype'], method=self.params['ctfmethod'], msg=msg)
#=======================
def getDefocusAmpConstForImage(self,imgdata,msg=False):
ctfvalue = self.getBestCtfValue(imgdata, msg)
### This function returns defocus defined as negative underfocus
defocus = -(abs(ctfvalue['defocus1'])+abs(ctfvalue['defocus2']))/2
return defocus, ctfvalue['amplitude_contrast']
#=======================
def checkCtfParams(self, imgdata):
shortname = apDisplay.short(imgdata['filename'])
ctfvalue = self.getBestCtfValue(imgdata)
### check if we have values and if we care
if ctfvalue is None:
return not self.checkRequireCtf()
### check that CTF estimation is above confidence threshold
conf = ctfdb.calculateConfidenceScore(ctfvalue)
if self.params['ctfcutoff'] and conf < self.params['ctfcutoff']:
apDisplay.printColor(shortname+" is below confidence threshold (conf="+str(round(conf,3))+")\n","cyan")
return False
### check resolution requirement for CTF fit at 0.8 threshold
if self.params['ctfres80min'] is not None or self.params['ctfres80max'] is not None:
if not 'resolution_80_percent' in ctfvalue.keys() or ctfvalue['resolution_80_percent'] is None:
apDisplay.printColor("%s: no 0.8 resolution found"%(shortname), "cyan")
return False
if self.params['ctfres80max'] and ctfvalue['resolution_80_percent'] > self.params['ctfres80max']:
apDisplay.printColor("%s is above resolution threshold %.2f > %.2f"
%(shortname, ctfvalue['resolution_80_percent'], self.params['ctfres80max']), "cyan")
return False
if self.params['ctfres80min'] and ctfvalue['resolution_80_percent'] < self.params['ctfres80min']:
apDisplay.printColor("%s is below resolution threshold %.2f > %.2f"
%(shortname, ctfvalue['resolution_80_percent'], self.params['ctfres80min']), "cyan")
return False
### check resolution requirement for CTF fit at 0.5 threshold
if self.params['ctfres50min'] is not None or self.params['ctfres50max'] is not None:
if not 'resolution_50_percent' in ctfvalue.keys() or ctfvalue['resolution_50_percent'] is None:
apDisplay.printColor("%s: no 0.5 resolution found"%(shortname), "cyan")
return False
if self.params['ctfres50max'] and ctfvalue['resolution_50_percent'] > self.params['ctfres50max']:
apDisplay.printColor("%s is above resolution threshold %.2f > %.2f"
%(shortname, ctfvalue['resolution_50_percent'], self.params['ctfres50max']), "cyan")
return False
if self.params['ctfres50min'] and ctfvalue['resolution_50_percent'] < self.params['ctfres50min']:
apDisplay.printColor("%s is below resolution threshold %.2f > %.2f"
%(shortname, ctfvalue['resolution_50_percent'], self.params['ctfres50min']), "cyan")
return False
if self.params['mindefocus'] is not None or self.params['maxdefocus'] is not None:
### get best defocus value
### defocus should be in negative meters
if ctfvalue['defocus2'] is not None and ctfvalue['defocus1'] != ctfvalue['defocus2']:
defocus = (ctfvalue['defocus1'] + ctfvalue['defocus2'])/2.0
else:
defocus = ctfvalue['defocus1']
defocus = -1.0*abs(defocus)
### assume defocus values are ALWAYS negative but mindefocus is greater than maxdefocus
if self.params['mindefocus']:
self.params['mindefocus'] = -abs( self.params['mindefocus'] )
if self.params['maxdefocus']:
self.params['maxdefocus'] = -abs( self.params['maxdefocus'] )
if self.params['mindefocus'] and self.params['maxdefocus']:
if self.params['maxdefocus'] > self.params['mindefocus']:
mindef = self.params['mindefocus']
maxdef = self.params['maxdefocus']
self.params['mindefocus'] = maxdef
self.params['maxdefocus'] = mindef
### skip micrograph that have defocus above or below min & max defocus levels
if self.params['mindefocus'] and defocus > self.params['mindefocus']:
#apDisplay.printColor(shortname+" defocus ("+str(round(defocus*1e6,2))+\
# " um) is less than mindefocus ("+str(self.params['mindefocus']*1e6)+" um)\n","cyan")
return False
if self.params['maxdefocus'] and defocus < self.params['maxdefocus']:
#apDisplay.printColor(shortname+" defocus ("+str(round(defocus*1e6,2))+\
# " um) is greater than maxdefocus ("+str(self.params['maxdefocus']*1e6)+" um)\n","cyan")
return False
return True
#=======================
def checkDefocus(self, defocus, shortname):
if defocus > 0:
apDisplay.printError("defocus is positive "+str(defocus)+" for image "+shortname)
elif defocus < -1.0e3:
apDisplay.printError("defocus is very big "+str(defocus)+" for image "+shortname)
elif defocus > -1.0e-3:
apDisplay.printError("defocus is very small "+str(defocus)+" for image "+shortname)
#=======================
def eliminateMinMaxCCParticles(self, particles):
newparticles = []
eliminated = 0
for prtl in particles:
if self.params['correlationmin'] and prtl['correlation'] < self.params['correlationmin']:
eliminated += 1
elif self.params['correlationmax'] and prtl['correlation'] > self.params['correlationmax']:
eliminated += 1
else:
newparticles.append(prtl)
if eliminated > 0:
apDisplay.printMsg(str(eliminated)+" particle(s) eliminated due to min or max correlation cutoff")
return newparticles
#=======================
def eliminateMaskedParticles(self, particles, imgdata):
newparticles = []
eliminated = 0
sessiondata = apDatabase.getSessionDataFromSessionName(self.params['sessionname'])
if self.params['defocpair']:
imgdata = apDefocalPairs.getTransformedDefocPair(imgdata,2)
maskimg,maskbin = apMask.makeInspectedMask(sessiondata,self.params['maskassess'],imgdata)
if maskimg is not None:
for prtl in particles:
binnedcoord = (int(prtl['ycoord']/maskbin),int(prtl['xcoord']/maskbin))
if maskimg[binnedcoord] != 0:
eliminated += 1
else:
newparticles.append(prtl)
apDisplay.printMsg("%i particle(s) eliminated due to masking"%eliminated)
else:
apDisplay.printMsg("no masking")
newparticles = particles
return newparticles
############################################################
## Common parameters
############################################################
#=======================
def setupParserOptions(self):
self.ctfestopts = ('ace2', 'ctffind')
### values
self.parser.add_option("--bin", dest="bin", type="int", default=1,
help="Bin the particles after extracting", metavar="#")
self.parser.add_option("--ctfcutoff", dest="ctfcutoff", type="float",
help="CTF confidence cut off")
self.parser.add_option("--ctfres80min", dest="ctfres80min", type="float",
help="min resolution requirement at 0.8 threshold (rarely used)")
self.parser.add_option("--ctfres50min", dest="ctfres50min", type="float",
help="min resolution requirement at 0.5 threshold (rarely used)")
self.parser.add_option("--ctfres80max", dest="ctfres80max", type="float",
help="max resolution requirement for CTF fit at 0.8 threshold")
self.parser.add_option("--ctfres50max", dest="ctfres50max", type="float",
help="max resolution requirement for CTF fit at 0.5 threshold")
self.parser.add_option("--mincc", dest="correlationmin", type="float",
help="particle correlation mininum")
self.parser.add_option("--maxcc", dest="correlationmax", type="float",
help="particle correlation maximum")
self.parser.add_option("--mindef", dest="mindefocus", type="float",
help="minimum defocus")
self.parser.add_option("--maxdef", dest="maxdefocus", type="float",
help="maximum defocus")
self.parser.add_option("--selectionid", dest="selectionid", type="int",
help="particle picking runid")
self.parser.add_option("--fromstackid", dest="fromstackid", type="int",
help="redo a stack from a previous stack")
self.parser.add_option("--ctfrunid", dest="ctfrunid", type="int",
help="consider only specific ctfrun")
self.parser.add_option("--partlimit", dest="partlimit", type="int",
help="particle limit")
self.parser.add_option("--mag", dest="mag", type="int",
help="process only images of magification, mag")
self.parser.add_option("--maskassess", dest="maskassess",
help="Assessed mask run name")
self.parser.add_option("--label", dest="particlelabel", type="str", default=None,
help="select particles by label within the same run name")
self.parser.add_option("--ddstartframe", dest="startframe", type="int", default=0,
help="starting frame for direct detector raw frame processing. The first frame is 0")
self.parser.add_option("--ddnframe", dest="nframe", type="int",
help="total frames to consider for direct detector raw frame processing")
self.parser.add_option("--ddstack", dest="ddstack", type="int", default=0,
help="gain/dark corrected ddstack id used for dd frame integration")
self.parser.add_option("--dduseGS", dest="useGS", default=False,
action="store_true", help="use Gram-Schmidt process to scale dark to frame images")
self.parser.add_option("--dddriftlimit", dest="driftlimit", type="float",
help="direct detector frame acceptable drift, in Angstroms")
### true/false
self.parser.add_option("--defocpair", dest="defocpair", default=False,
action="store_true", help="select defocal pair")
self.parser.add_option("--checkmask", dest="checkmask", default=False,
action="store_true", help="Check masks")
self.parser.add_option("--keepall", dest="keepall", default=False,
action="store_true", help="Do not delete CTF corrected MRC files when finishing")
self.parser.add_option("--usedownmrc", dest="usedownmrc", default=False,
action="store_true", help="Use existing *.down.mrc in processing")
### option based
self.parser.add_option("--ctfmethod", dest="ctfmethod",
help="Only use ctf values coming from this method of estimation", metavar="TYPE",
type="choice", choices=self.ctfestopts)
#=======================
def checkConflicts(self):
if self.params['description'] is None:
apDisplay.printError("A description has to be specified")
if (self.params['mindefocus'] is not None and
(self.params['mindefocus'] < -1e-3 or self.params['mindefocus'] > -1e-9)):
apDisplay.printError("min defocus is not in an acceptable range, e.g. mindefocus=-1.5e-6")
if (self.params['maxdefocus'] is not None and
(self.params['maxdefocus'] < -1e-3 or self.params['maxdefocus'] > -1e-9)):
apDisplay.printError("max defocus is not in an acceptable range, e.g. maxdefocus=-1.5e-6")
if self.params['fromstackid'] is not None and self.params['selectionid'] is not None:
apDisplay.printError("please only specify one of either --selectionid or --fromstackid")
if self.params['fromstackid'] is None and self.params['selectionid'] is None:
apDisplay.printError("please specify one of either --selectionid or --fromstackid")
if self.params['maskassess'] is None and self.params['checkmask']:
apDisplay.printError("particle mask assessment run need to be defined to check mask")
if self.params['maskassess'] is not None and not self.params['checkmask']:
apDisplay.printMsg("running mask assess")
self.params['checkmask'] = True
def checkIsDD(self):
apDisplay.printWarning('Checking for dd')
if self.params['ddstack'] > 0:
self.is_dd_stack = True
self.is_dd = True
else:
if self.params['preset'] and '-a' in self.params['preset'] and (self.params['nframe'] or self.params['driftlimit'] > 0):
self.is_dd = True
self.is_dd_stack = True
elif self.params['mrcnames'] and self.params['mrcnames'].split(',')[0] and '-a' in self.params['mrcnames'].split(',')[0] and (self.params['nframe'] or self.params['driftlimit'] > 0):
self.is_dd = True
self.is_dd_stack = True
elif self.params['nframe']:
self.is_dd = True
self.is_dd_frame = True
#=======================
def preLoopFunctions(self):
self.is_dd_frame = False
self.is_dd_stack = False
self.is_dd = False
self.checkIsDD()
self.batchboxertimes = []
self.ctftimes = []
self.mergestacktimes = []
self.meanreadtimes = []
self.insertdbtimes = []
self.noimages = False
self.totalpart = 0
self.selectiondata = None
# Different class needed depending on if ddstack is specified or available
if self.is_dd:
from appionlib import apDDprocess
if self.is_dd_frame:
apDisplay.printMsg('DD Frame Processing')
self.dd = apDDprocess.initializeDDFrameprocess(self.params['sessionname'])
self.dd.setUseGS(self.params['useGS'])
if self.is_dd_stack:
apDisplay.printMsg('DD Stack Processing')
self.dd = apDDprocess.DDStackProcessing()
if len(self.imgtree) == 0:
apDisplay.printWarning("No images were found to process")
self.noimages = True
# Still need to set attributes if waiting for more images
if not self.params['wait']:
return
if self.params['selectionid'] is not None:
self.selectiondata = apParticle.getSelectionRunDataFromID(self.params['selectionid'])
if self.params['particlelabel'] == 'fromtrace':
if (not self.selectiondata['manparams'] or not self.selectiondata['manparams']['trace']):
apDisplay.printError("Can not use traced object center to extract boxed area without tracing")
else:
self.params['particlelabel'] = '_trace'
self.checkPixelSize()
self.existingParticleNumber=0
self.setStartingParticleNumber()
apDisplay.printMsg("Starting at particle number: "+str(self.particleNumber))
if self.params['partlimit'] is not None and self.particleNumber > self.params['partlimit']:
apDisplay.printError("Number of particles in existing stack already exceeds limit!")
self.logpeaks = 2
def setStartingParticleNumber(self):
self.particleNumber = self.existingParticleNumber
def convertTraceToParticlePeaks(self,imgdata):
apSizing.makeParticleFromContour(imgdata,self.selectiondata,'_trace')
#=====================
def reprocessImage(self, imgdata):
"""
Returns
True, if an image should be reprocessed
False, if an image was processed and should NOT be reprocessed
None, if image has not yet been processed
e.g. a confidence less than 80%
"""
# check to see if image is rejected by other criteria
if self.rejectImage(imgdata) is False:
return False
# check CTF parameters for image and skip if criteria is not met
if self.checkCtfParams(imgdata) is False:
return False
return None
#=======================
def processImage(self, imgdata):
imgname = imgdata['filename']
shortname = apDisplay.short(imgdata['filename'])
# set default to work with non-dd data
self.framelist = []
if self.is_dd:
if imgdata is None or imgdata['camera']['save frames'] != True:
apDisplay.printWarning('%s skipped for no-frame-saved\n ' % imgdata['filename'])
return
self.dd.setImageData(imgdata)
self.framelist = self.dd.getFrameList(self.params)
if not self.framelist:
apDisplay.printWarning('image rejected because no frame passes drift limit test')
return
if self.is_dd_stack:
# find the ddstackrun of the image
if not self.params['ddstack']:
self.dd.setDDStackRun()
else:
self.dd.setDDStackRun(self.params['ddstack'])
# compare image ddstackrun with the specified ddstackrun
if self.params['ddstack'] and self.params['ddstack'] != self.dd.getDDStackRun().dbid:
apDisplay.printWarning('ddstack image not from specified ddstack run')
apDisplay.printWarning('Skipping this image ....')
return None
# This function will reset self.dd.ddstackrun for actual processing
self.dd.setFrameStackPath(self.params['ddstack'])
### first remove any existing boxed files
shortfileroot = os.path.join(self.params['rundir'], shortname)
if not self.params['usedownmrc']:
# remove all previous temp files
rmfiles = glob.glob(shortfileroot+"*")
else:
# limit the files to be removed
rmfiles = glob.glob(shortfileroot+".*")
for rmfile in rmfiles:
apFile.removeFile(rmfile)
### convert contours to particles
if self.selectiondata and self.params['particlelabel'] == '_trace':
self.convertTraceToParticlePeaks(imgdata)
### get particles
partdatas,shiftdata = self.getParticlesInImage(imgdata)
### check if we have particles
if len(partdatas) == 0:
apDisplay.printColor(shortname+" has no remaining particles and has been rejected\n","cyan")
total_processed_particles = None
else:
### process partdatas
total_processed_particles = self.processParticles(imgdata,partdatas,shiftdata)
if total_processed_particles is None:
self.totalpart = len(partdatas)+self.totalpart
else:
self.totalpart = total_processed_particles
### check if particle limit is met
if self.params['partlimit'] is not None and self.totalpart > self.params['partlimit']:
apDisplay.printWarning("reached particle number limit of "+str(self.params['partlimit'])+" now stopping")
self.imgtree = []
self.notdone = False
def processParticles(self,imgdata,partdatas,shiftdata):
"""
this is the main component
it should return the total number of processed particles if available otherwise, it returns None
"""
raise NotImplementedError()
#=======================
def loopCleanUp(self,imgdata):
### last remove any existing boxed files, reset global params
shortname = apDisplay.short(imgdata['filename'])
shortfileroot = os.path.join(self.params['rundir'], shortname)
rmfiles = glob.glob(shortfileroot+"*")
if not self.params['keepall']:
for rmfile in rmfiles:
apFile.removeFile(rmfile)
############################################################################
# PaeticleExtract with Elimination of boxed particle cropped by the image
############################################################################
class ParticleBoxLoop(ParticleExtractLoop):
def setupParserOptions(self):
super(ParticleBoxLoop,self).setupParserOptions()
self.parser.add_option("--boxsize", dest="boxsize", type="int",
help="particle box size in pixel")
self.parser.add_option("--rotate", dest="rotate", default=False,
action="store_true", help="Apply rotation angles of ,for example, helix")
def checkConflicts(self):
super(ParticleBoxLoop,self).checkConflicts()
if self.params['boxsize'] is None:
apDisplay.printError("A boxsize has to be specified")
def preLoopFunctions(self):
super(ParticleBoxLoop,self).preLoopFunctions()
self.boxsize = int(self.params['boxsize'])
if self.params['rotate'] is True:
### with rotate we use a bigger boxsize
self.half_box = int(1.5*self.boxsize/2)
else:
self.half_box = int(math.floor(self.boxsize / 2.0))
def getParticlesInImage(self,imgdata):
partdatas,shiftdata = super(ParticleBoxLoop,self).getParticlesInImage(imgdata)
return self.removeBoxOutOfImage(imgdata,partdatas,shiftdata),shiftdata
def removeBoxOutOfImage(self,imgdata,partdatas,shiftdata):
imgdims = imgdata['camera']['dimension']
newpartdatas = []
for partdata in partdatas:
start_x,start_y = apBoxer.getBoxStartPosition(imgdata,self.half_box,partdata, shiftdata)
if apBoxer.checkBoxInImage(imgdims,start_x,start_y,self.boxsize):
newpartdatas.append(partdata)
return newpartdatas
class Test(ParticleExtractLoop):
def processParticles(self,imgdata,partdatas,shiftdata):
for partdata in partdatas:
print partdata['xcoord'],partdata['ycoord']
return None
def commitToDatabase(self,imgdata):
pass
if __name__ == '__main__':
makeStack = Test()
makeStack.run()
|
python
|
import torch
import torch.nn as nn
from models.pytorch_revgrad import RevGrad
from efficientnet_pytorch import EfficientNet as effnet
class EfficientNet(effnet):
def __init__(self, blocks_args=None, global_params=None):
super(EfficientNet, self).__init__(blocks_args=blocks_args, global_params=global_params)
def forward(self, x1, x2=None, train=False):
if train:
# main head (predictive)
out, endpoints = self.predictive_network(x1)
# additional head (adversarial)
out_s = self.adversarial_network(endpoints, x2)
return out, out_s
else:
# main head (predictive)
out, _ = self.predictive_network(x1)
return out
def build_adv_model(self):
dummy = torch.rand(1, 3, 96, 96)
endpoints = self.extract_endpoints(dummy)
n_filt = 0
for i in endpoints.keys():
if i == 'reduction_6':
n_filt += endpoints[i].shape[1]
self.adv_fc1 = nn.Linear(n_filt * 4, 300)
self.adv_fc2 = nn.Linear(300, 300)
# self.adv_fc3 = nn.Linear(300, 300)
self.adv_fc4 = nn.Linear(300, 1)
# gradient reversal layer
self.rever1_6 = RevGrad()
self.rever1_12 = RevGrad()
self.rever2_6 = RevGrad()
self.rever2_12 = RevGrad()
return True
def predictive_network(self, inputs):
# Convolution layers
endpoints = self.extract_endpoints(inputs)
# Pooling and final linear layer
x = self._avg_pooling(endpoints['reduction_6'])
if self._global_params.include_top:
x = x.flatten(start_dim=1)
x = self._dropout(x)
x = self._fc(x)
return x, endpoints
def adversarial_network(self, endpoints, x_s):
# Convolution layers
endpoints_s = self.extract_endpoints(x_s)
x6_s = self.rever2_6(endpoints_s['reduction_6']).mean(dim=2).mean(dim=2)
x12_s = self.rever2_12(endpoints_s['reduction_6']).std(dim=2).std(dim=2)
x6_p = self.rever2_6(endpoints['reduction_6']).mean(dim=2).mean(dim=2)
x12_p = self.rever2_12(endpoints['reduction_6']).std(dim=2).std(dim=2)
x = torch.cat([x6_s, x12_s, x6_p, x12_p,], dim=1,)
x = torch.relu(self.adv_fc1(x))
# x = torch.relu(self.adv_fc2(x))
# x = torch.relu(self.adv_fc3(x))
x = torch.sigmoid(self.adv_fc4(x))
return x
|
python
|
from idm.objects import dp, MySignalEvent, SignalEvent, __version__
from idm.utils import ment_user
from datetime import datetime
import time
@dp.signal_event_register('инфо', 'инфа', 'info')
def sinfo(event: SignalEvent) -> str:
if event.msg['from_id'] not in event.db.trusted_users:
message_id = event.send(event.responses['not_in_trusted'])
time.sleep(3)
event.api.msg_op(3, msg_id=message_id)
return "ok"
owner = event.api('users.get', user_ids=event.db.duty_id)[0]
# TODO: сделать функцию для формата инфы (в трех местах юзается)
event.send(event.responses['info_duty'].format(
версия = __version__, владелец = ment_user(owner),
чаты = len(event.db.chats.keys()),
ид = event.chat.iris_id, имя = event.chat.name))
return "ok"
|
python
|
"""
Title: Video Vision Transformer
Author: [Aritra Roy Gosthipaty](https://twitter.com/ariG23498), [Ayush Thakur](https://twitter.com/ayushthakur0) (equal contribution)
Date created: 2022/01/12
Last modified: 2022/01/12
Description: A Transformer-based architecture for video classification.
"""
"""
## Introduction
Videos are sequences of images. Let's assume you have an image
representation model (CNN, ViT, etc.) and a sequence model
(RNN, LSTM, etc.) at hand. We ask you to tweak the model for video
classification. The simplest approach would be to apply the image
model to individual frames, use the sequence model to learn
sequences of image features, then apply a classification head on
the learned sequence representation.
The Keras example
[Video Classification with a CNN-RNN Architecture](https://keras.io/examples/vision/video_classification/)
explains this approach in detail. Alernatively, you can also
build a hybrid Transformer-based model for video classification as shown in the Keras example
[Video Classification with Transformers](https://keras.io/examples/vision/video_transformers/).
In this example, we minimally implement
[ViViT: A Video Vision Transformer](https://arxiv.org/abs/2103.15691)
by Arnab et al., a **pure Transformer-based** model
for video classification. The authors propose a novel embedding scheme
and a number of Transformer variants to model video clips. We implement
the embedding scheme and one of the variants of the Transformer
architecture, for simplicity.
This example requires TensorFlow 2.6 or higher, and the `medmnist`
package, which can be installed by running the code cell below.
"""
"""shell
pip install -qq medmnist
"""
"""
## Imports
"""
import os
import io
import imageio
import medmnist
import ipywidgets
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
# Setting seed for reproducibility
SEED = 42
os.environ["TF_CUDNN_DETERMINISTIC"] = "1"
keras.utils.set_random_seed(SEED)
"""
## Hyperparameters
The hyperparameters are chosen via hyperparameter
search. You can learn more about the process in the "conclusion" section.
"""
# DATA
DATASET_NAME = "organmnist3d"
BATCH_SIZE = 32
AUTO = tf.data.AUTOTUNE
INPUT_SHAPE = (28, 28, 28, 1)
NUM_CLASSES = 11
# OPTIMIZER
LEARNING_RATE = 1e-4
WEIGHT_DECAY = 1e-5
# TRAINING
EPOCHS = 60
# TUBELET EMBEDDING
PATCH_SIZE = (8, 8, 8)
NUM_PATCHES = (INPUT_SHAPE[0] // PATCH_SIZE[0]) ** 2
# ViViT ARCHITECTURE
LAYER_NORM_EPS = 1e-6
PROJECTION_DIM = 128
NUM_HEADS = 8
NUM_LAYERS = 8
"""
## Dataset
For our example we use the
[MedMNIST v2: A Large-Scale Lightweight Benchmark for 2D and 3D Biomedical Image Classification](https://medmnist.com/)
dataset. The videos are lightweight and easy to train on.
"""
def download_and_prepare_dataset(data_info: dict):
"""Utility function to download the dataset.
Arguments:
data_info (dict): Dataset metadata.
"""
data_path = keras.utils.get_file(origin=data_info["url"], md5_hash=data_info["MD5"])
with np.load(data_path) as data:
# Get videos
train_videos = data["train_images"]
valid_videos = data["val_images"]
test_videos = data["test_images"]
# Get labels
train_labels = data["train_labels"].flatten()
valid_labels = data["val_labels"].flatten()
test_labels = data["test_labels"].flatten()
return (
(train_videos, train_labels),
(valid_videos, valid_labels),
(test_videos, test_labels),
)
# Get the metadata of the dataset
info = medmnist.INFO[DATASET_NAME]
# Get the dataset
prepared_dataset = download_and_prepare_dataset(info)
(train_videos, train_labels) = prepared_dataset[0]
(valid_videos, valid_labels) = prepared_dataset[1]
(test_videos, test_labels) = prepared_dataset[2]
"""
### `tf.data` pipeline
"""
@tf.function
def preprocess(frames: tf.Tensor, label: tf.Tensor):
"""Preprocess the frames tensors and parse the labels."""
# Preprocess images
frames = tf.image.convert_image_dtype(
frames[
..., tf.newaxis
], # The new axis is to help for further processing with Conv3D layers
tf.float32,
)
# Parse label
label = tf.cast(label, tf.float32)
return frames, label
def prepare_dataloader(
videos: np.ndarray,
labels: np.ndarray,
loader_type: str = "train",
batch_size: int = BATCH_SIZE,
):
"""Utility function to prepare the dataloader."""
dataset = tf.data.Dataset.from_tensor_slices((videos, labels))
if loader_type == "train":
dataset = dataset.shuffle(BATCH_SIZE * 2)
dataloader = (
dataset.map(preprocess, num_parallel_calls=tf.data.AUTOTUNE)
.batch(batch_size)
.prefetch(tf.data.AUTOTUNE)
)
return dataloader
trainloader = prepare_dataloader(train_videos, train_labels, "train")
validloader = prepare_dataloader(valid_videos, valid_labels, "valid")
testloader = prepare_dataloader(test_videos, test_labels, "test")
"""
## Tubelet Embedding
In ViTs, an image is divided into patches, which are then spatially
flattened, a process known as tokenization. For a video, one can
repeat this process for individual frames. **Uniform frame sampling**
as suggested by the authors is a tokenization scheme in which we
sample frames from the video clip and perform simple ViT tokenization.
|  |
| :--: |
| Uniform Frame Sampling [Source](https://arxiv.org/abs/2103.15691) |
**Tubelet Embedding** is different in terms of capturing temporal
information from the video.
First, we extract volumes from the video -- these volumes contain
patches of the frame and the temporal information as well. The volumes
are then flattened to build video tokens.
|  |
| :--: |
| Tubelet Embedding [Source](https://arxiv.org/abs/2103.15691) |
"""
class TubeletEmbedding(layers.Layer):
def __init__(self, embed_dim, patch_size, **kwargs):
super().__init__(**kwargs)
self.projection = layers.Conv3D(
filters=embed_dim,
kernel_size=patch_size,
strides=patch_size,
padding="VALID",
)
self.flatten = layers.Reshape(target_shape=(-1, embed_dim))
def call(self, videos):
projected_patches = self.projection(videos)
flattened_patches = self.flatten(projected_patches)
return flattened_patches
"""
## Positional Embedding
This layer adds positional information to the encoded video tokens.
"""
class PositionalEncoder(layers.Layer):
def __init__(self, embed_dim, **kwargs):
super().__init__(**kwargs)
self.embed_dim = embed_dim
def build(self, input_shape):
_, num_tokens, _ = input_shape
self.position_embedding = layers.Embedding(
input_dim=num_tokens, output_dim=self.embed_dim
)
self.positions = tf.range(start=0, limit=num_tokens, delta=1)
def call(self, encoded_tokens):
# Encode the positions and add it to the encoded tokens
encoded_positions = self.position_embedding(self.positions)
encoded_tokens = encoded_tokens + encoded_positions
return encoded_tokens
"""
## Video Vision Transformer
The authors suggest 4 variants of Vision Transformer:
- Spatio-temporal attention
- Factorized encoder
- Factorized self-attention
- Factorized dot-product attention
In this example, we will implement the **Spatio-temporal attention**
model for simplicity. The following code snippet is heavily inspired from
[Image classification with Vision Transformer](https://keras.io/examples/vision/image_classification_with_vision_transformer/).
One can also refer to the
[official repository of ViViT](https://github.com/google-research/scenic/tree/main/scenic/projects/vivit)
which contains all the variants, implemented in JAX.
"""
def create_vivit_classifier(
tubelet_embedder,
positional_encoder,
input_shape=INPUT_SHAPE,
transformer_layers=NUM_LAYERS,
num_heads=NUM_HEADS,
embed_dim=PROJECTION_DIM,
layer_norm_eps=LAYER_NORM_EPS,
num_classes=NUM_CLASSES,
):
# Get the input layer
inputs = layers.Input(shape=input_shape)
# Create patches.
patches = tubelet_embedder(inputs)
# Encode patches.
encoded_patches = positional_encoder(patches)
# Create multiple layers of the Transformer block.
for _ in range(transformer_layers):
# Layer normalization and MHSA
x1 = layers.LayerNormalization(epsilon=1e-6)(encoded_patches)
attention_output = layers.MultiHeadAttention(
num_heads=num_heads, key_dim=embed_dim // num_heads, dropout=0.1
)(x1, x1)
# Skip connection
x2 = layers.Add()([attention_output, encoded_patches])
# Layer Normalization and MLP
x3 = layers.LayerNormalization(epsilon=1e-6)(x2)
x3 = keras.Sequential(
[
layers.Dense(units=embed_dim * 4, activation=tf.nn.gelu),
layers.Dense(units=embed_dim, activation=tf.nn.gelu),
]
)(x3)
# Skip connection
encoded_patches = layers.Add()([x3, x2])
# Layer normalization and Global average pooling.
representation = layers.LayerNormalization(epsilon=layer_norm_eps)(encoded_patches)
representation = layers.GlobalAvgPool1D()(representation)
# Classify outputs.
outputs = layers.Dense(units=num_classes, activation="softmax")(representation)
# Create the Keras model.
model = keras.Model(inputs=inputs, outputs=outputs)
return model
"""
## Train
"""
def run_experiment():
# Initialize model
model = create_vivit_classifier(
tubelet_embedder=TubeletEmbedding(
embed_dim=PROJECTION_DIM, patch_size=PATCH_SIZE
),
positional_encoder=PositionalEncoder(embed_dim=PROJECTION_DIM),
)
# Compile the model with the optimizer, loss function
# and the metrics.
optimizer = keras.optimizers.Adam(learning_rate=LEARNING_RATE)
model.compile(
optimizer=optimizer,
loss="sparse_categorical_crossentropy",
metrics=[
keras.metrics.SparseCategoricalAccuracy(name="accuracy"),
keras.metrics.SparseTopKCategoricalAccuracy(5, name="top-5-accuracy"),
],
)
# Train the model.
_ = model.fit(trainloader, epochs=EPOCHS, validation_data=validloader)
_, accuracy, top_5_accuracy = model.evaluate(testloader)
print(f"Test accuracy: {round(accuracy * 100, 2)}%")
print(f"Test top 5 accuracy: {round(top_5_accuracy * 100, 2)}%")
return model
model = run_experiment()
"""
## Inference
"""
NUM_SAMPLES_VIZ = 25
testsamples, labels = next(iter(testloader))
testsamples, labels = testsamples[:NUM_SAMPLES_VIZ], labels[:NUM_SAMPLES_VIZ]
ground_truths = []
preds = []
videos = []
for i, (testsample, label) in enumerate(zip(testsamples, labels)):
# Generate gif
with io.BytesIO() as gif:
imageio.mimsave(gif, (testsample.numpy() * 255).astype("uint8"), "GIF", fps=5)
videos.append(gif.getvalue())
# Get model prediction
output = model.predict(tf.expand_dims(testsample, axis=0))[0]
pred = np.argmax(output, axis=0)
ground_truths.append(label.numpy().astype("int"))
preds.append(pred)
def make_box_for_grid(image_widget, fit):
"""Make a VBox to hold caption/image for demonstrating option_fit values.
Source: https://ipywidgets.readthedocs.io/en/latest/examples/Widget%20Styling.html
"""
# Make the caption
if fit is not None:
fit_str = "'{}'".format(fit)
else:
fit_str = str(fit)
h = ipywidgets.HTML(value="" + str(fit_str) + "")
# Make the green box with the image widget inside it
boxb = ipywidgets.widgets.Box()
boxb.children = [image_widget]
# Compose into a vertical box
vb = ipywidgets.widgets.VBox()
vb.layout.align_items = "center"
vb.children = [h, boxb]
return vb
boxes = []
for i in range(NUM_SAMPLES_VIZ):
ib = ipywidgets.widgets.Image(value=videos[i], width=100, height=100)
true_class = info["label"][str(ground_truths[i])]
pred_class = info["label"][str(preds[i])]
caption = f"T: {true_class} | P: {pred_class}"
boxes.append(make_box_for_grid(ib, caption))
ipywidgets.widgets.GridBox(
boxes, layout=ipywidgets.widgets.Layout(grid_template_columns="repeat(5, 200px)")
)
"""
## Final thoughts
With a vanilla implementation, we achieve ~79-80% Top-1 accuracy on the
test dataset.
The hyperparameters used in this tutorial were finalized by running a
hyperparameter search using
[W&B Sweeps](https://docs.wandb.ai/guides/sweeps).
You can find out our sweeps result
[here](https://wandb.ai/minimal-implementations/vivit/sweeps/66fp0lhz)
and our quick analysis of the results
[here](https://wandb.ai/minimal-implementations/vivit/reports/Hyperparameter-Tuning-Analysis--VmlldzoxNDEwNzcx).
For further improvement, you could look into the following:
- Using data augmentation for videos.
- Using a better regularization scheme for training.
- Apply different variants of the transformer model as in the paper.
We would like to thank [Anurag Arnab](https://anuragarnab.github.io/)
(first author of ViViT) for helpful discussion. We are grateful to
[Weights and Biases](https://wandb.ai/site) program for helping with
GPU credits.
You can use the trained model hosted on [Hugging Face Hub](https://huggingface.co/keras-io/video-vision-transformer)
and try the demo on [Hugging Face Spaces](https://huggingface.co/spaces/keras-io/video-vision-transformer-CT).
"""
|
python
|
import logging.config
import unittest
#import testing.mysqld
import sqlalchemy
from src.utils.Settings import Settings
# Modules to Test
from src.utils.Connector import Connector
class TestConnector(unittest.TestCase):
@classmethod
def tag(cls, *tags):
'''
Decorator to add tags to a test class or method.
'''
def decorator(obj):
setattr(obj, 'tags', set(tags))
return obj
return decorator
@classmethod
def setUpClass(cls):
'''
Set up for class to instantiate constructor of class under test
'''
super(TestConnector, cls).setUpClass()
logging.disable(logging.CRITICAL)
# Read in config.ini for database info
# Uses local config to not expose sensitive info
settings = Settings(config_file='../config.ini')
config = settings.db_config_read(section='sqldb')
cls.username = 'test'
cls.password = 'password'
# Initialize Connector constructor
cls.connector = Connector(TestConnector.username, TestConnector.password, config)
def test_init(self):
'''
Test Connector __init_
'''
self.assertTrue(TestConnector.connector.settings_model.username is TestConnector.username,
'Mock value should match username')
def test_create_engine(self):
'''
Test create_engine
'''
conn = TestConnector.connector.create_engine(TestConnector.connector.settings_model)
mock_conn = sqlalchemy.create_engine('mysql+pymysql://')
self.assertIsInstance(conn, type(mock_conn), 'Class type should match mock class type')
def test_login(self):
'''
Test login
'''
mock_conn = sqlalchemy.create_engine('mysql+pymysql://')
login = TestConnector.connector.login(mock_conn)
self.assertIsNotNone(login, 'Pass if value is returned from server')
@classmethod
def tearDownClass(cls):
'''
Tear up for class to destroy items created during class under test
'''
super(TestConnector, cls).tearDownClass()
logging.disable(logging.NOTSET)
cls.settings = ''
cls.username = ''
cls.password = ''
del cls.connector
if __name__ == "__main__":
unittest.main()
|
python
|
from rsTools.ui.menus import menu
from ... import shelf, shelfButton
from rsTools.glob import *
import os
import rsTools.utils.osUtils.enviroments as env
import maya.cmds as cmds
from rsTools.core.skeleton import skeletonAsset
class skeletonShelfButton(shelfButton.ShelfButton):
_color = (255, 157, 0)
_title = None
_icon = os.path.join(g_rs_path_image_shelf, "bones.svg")
_annotation = "anim"
_enabled = True
# main overloading proc
def created(self):
_menuItems = {"tearOff": False,
"enabled": True,
"items_provider": None,
"async": False,
"cache": 0,
"items": [{"label": "QuickUI", "enabled": False},
{"label": "joint Tool Box", "command": lambda: self._buildJointToolBox(
), "toolTip": "NICE"},
{"label": "joint Pose UI", "command": lambda: self._buildJointPoseUI(
), "toolTip": "NICE"},
"separator",
{"label": "rigSkeleton Build", "enabled": False},
{"label": "rigSkeleton", "actionType": "skeletonAsset", "id": "skeletonAction",
"command": lambda: self._createRigSkeleton(), "toolTip": "Create rigSkeletonAsset"},
]
}
self._menu = menu.ContextMenu(_menuItems)
self._menu.attachTo(self._widget, button="right")
def _buildJointToolBox(self):
import rsTools.ui.interfaces.joints.jointsUI as jointUI
jointUI.run()
def _buildJointPoseUI(self):
import rsTools.ui.interfaces.jointPose.jointPoseUI as jointUI
jointUI.run()
def _createRigSkeleton(self):
ls = cmds.ls(sl=True)
if ls:
obb = self._menu.getActionByID("skeletonAction")
assetName = obb.getText()
lod = obb.getComboText()
project = env.get_project_show()
topNodeName = project+"_rigSkeleton_"+assetName+lod+"_GRP"
rigSkeletonAsset = skeletonAsset.SkeletonAsset(topNodeName, ls[0])
obb.setPlaceholderText("bearA")
def mousePressed(self):
import rsTools.ui.interfaces.joints.jointsUI as ui
ui.run()
|
python
|
# must be first, as it does event loop patching and other "first" things
from common.page_tokens import PageTokenManager
from oozer.entities.collect_entities_iterators import (
iter_collect_entities_per_page,
iter_collect_entities_per_page_graph,
)
from tests.base.testcase import TestCase, mock
from common.enums.entity import Entity
from common.enums.reporttype import ReportType
from common.id_tools import generate_universal_id
from oozer.common.cold_storage.batch_store import ChunkDumpStore
from oozer.common.job_scope import JobScope
from oozer.common.enum import FB_PAGE_MODEL, FB_PAGE_POST_MODEL, FB_AD_VIDEO_MODEL
from tests.base import random
class TestCollectEntitiesPerPage(TestCase):
def setUp(self):
super().setUp()
self.sweep_id = random.gen_string_id()
self.scope_id = random.gen_string_id()
self.ad_account_id = random.gen_string_id()
def test_correct_vendor_data_inserted_into_cold_store_payload_posts(self):
entity_types = [Entity.PagePost, Entity.PageVideo]
fb_model_map = {Entity.PagePost: FB_PAGE_POST_MODEL, Entity.PageVideo: FB_AD_VIDEO_MODEL}
get_all_method_map = {Entity.PagePost: 'get_posts', Entity.PageVideo: 'get_videos'}
for entity_type in entity_types:
with self.subTest(f'Entity type = "{entity_type}"'):
fbid = random.gen_string_id()
FB_MODEL = fb_model_map[entity_type]
get_method_name = get_all_method_map[entity_type]
job_scope = JobScope(
sweep_id=self.sweep_id,
ad_account_id=self.ad_account_id,
report_type=ReportType.entity,
report_variant=entity_type,
tokens=['blah'],
)
universal_id_should_be = generate_universal_id(
ad_account_id=self.ad_account_id,
report_type=ReportType.entity,
entity_id=fbid,
entity_type=entity_type,
)
fb_data = FB_MODEL(fbid=fbid)
fb_data['account_id'] = '0'
entities_data = [fb_data]
with mock.patch.object(FB_PAGE_MODEL, get_method_name, return_value=entities_data), mock.patch.object(
ChunkDumpStore, 'store'
) as store:
list(iter_collect_entities_per_page(job_scope))
assert store.called
store_args, store_keyword_args = store.call_args
assert not store_keyword_args
assert len(store_args) == 1, 'Store method should be called with just 1 parameter'
data_actual = store_args[0]
vendor_data_key = '__oprm'
assert (
vendor_data_key in data_actual and type(data_actual[vendor_data_key]) == dict
), 'Special vendor key is present in the returned data'
assert data_actual[vendor_data_key] == {
'id': universal_id_should_be
}, 'Vendor data is set with the right universal id'
class TestCollectEntitiesPerPageGraph(TestCase):
def setUp(self):
super().setUp()
self.sweep_id = random.gen_string_id()
self.scope_id = random.gen_string_id()
self.ad_account_id = random.gen_string_id()
def test_correct_vendor_data_inserted_into_cold_store_payload_posts(self):
entity_types = [Entity.PagePostPromotable]
fb_model_map = {Entity.PagePostPromotable: FB_PAGE_POST_MODEL}
for entity_type in entity_types:
with self.subTest(f'Entity type - "{entity_type}"'):
fbid = random.gen_string_id()
FB_MODEL = fb_model_map[entity_type]
job_scope = JobScope(
sweep_id=self.sweep_id,
ad_account_id=self.ad_account_id,
report_type=ReportType.entity,
report_variant=entity_type,
tokens=['user-token'],
)
universal_id_should_be = generate_universal_id(
ad_account_id=self.ad_account_id,
report_type=ReportType.entity,
entity_id=fbid,
entity_type=entity_type,
)
fb_data = FB_MODEL(fbid=fbid)
fb_data['account_id'] = '0'
entities_data = [fb_data]
with mock.patch.object(
PageTokenManager, 'get_best_token', return_value=None
) as get_best_token, mock.patch.object(
FB_PAGE_MODEL, 'get_feed', return_value=entities_data
), mock.patch.object(
FB_PAGE_MODEL, 'get_ads_posts', return_value=entities_data
), mock.patch.object(
ChunkDumpStore, 'store'
) as store, mock.patch.object(
FB_PAGE_POST_MODEL, 'get', side_effect=lambda field: field == 'is_eligible_for_promotion'
):
list(iter_collect_entities_per_page_graph(job_scope))
assert get_best_token.called
assert store.called
store_args, store_keyword_args = store.call_args
assert not store_keyword_args
assert len(store_args) == 1, 'Store method should be called with just 1 parameter'
data_actual = store_args[0]
vendor_data_key = '__oprm'
assert (
vendor_data_key in data_actual and type(data_actual[vendor_data_key]) == dict
), 'Special vendor key is present in the returned data'
assert data_actual[vendor_data_key] == {
'id': universal_id_should_be
}, 'Vendor data is set with the right universal id'
|
python
|
"""
Model
"""
from . import attention
from . import evaluator
from . import GAN_model
from . import GAN_trainer
from . import generator
from . import trainer
|
python
|
import json
from typing import Dict, Optional
from azure.durable_functions.models.FunctionContext import FunctionContext
class DurableOrchestrationBindings:
"""Binding information.
Provides information relevant to the creation and management of
durable functions.
"""
# parameter names are as defined by JSON schema and do not conform to PEP8 naming conventions
def __init__(self, taskHubName: str, creationUrls: Dict[str, str],
managementUrls: Dict[str, str], rpcBaseUrl: Optional[str] = None, **kwargs):
self._task_hub_name: str = taskHubName
self._creation_urls: Dict[str, str] = creationUrls
self._management_urls: Dict[str, str] = managementUrls
# TODO: we can remove the Optional once we drop support for 1.x,
# this is always provided in 2.x
self._rpc_base_url: Optional[str] = rpcBaseUrl
self._client_data = FunctionContext(**kwargs)
@property
def task_hub_name(self) -> str:
"""Get the name of the container that is used for orchestrations."""
return self._task_hub_name
@property
def creation_urls(self) -> Dict[str, str]:
"""Get the URLs that are used for creating new orchestrations."""
return self._creation_urls
@property
def management_urls(self) -> Dict[str, str]:
"""Get the URLs that are used for managing orchestrations."""
return self._management_urls
@property
def rpc_base_url(self) -> Optional[str]:
"""Get the base url communication between out of proc workers and the function host."""
return self._rpc_base_url
@property
def client_data(self) -> FunctionContext:
"""Get any additional client data provided within the context of the client."""
return self._client_data
@classmethod
def from_json(cls, json_string):
"""Convert the value passed into a new instance of the class.
Parameters
----------
json_string
Context passed a JSON serializable value to be converted into an
instance of the class
Returns
-------
DurableOrchestrationBindings
New instance of the durable orchestration binding class
"""
json_dict = json.loads(json_string)
return cls(**json_dict)
|
python
|
# Copyright 2014-2020 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This file is part of the GDB testsuite. It test the xmethods support
# in the Python extension language.
import gdb
import re
from gdb.xmethod import XMethod
from gdb.xmethod import XMethodMatcher, XMethodWorker
from gdb.xmethod import SimpleXMethodMatcher
def A_plus_A(obj, opr):
print('From Python <A_plus_A>:')
return obj['a'] + opr['a']
def plus_plus_A(obj):
print('From Python <plus_plus_A>:')
return obj['a'] + 1
def A_geta(obj):
print('From Python <A_geta>:')
return obj['a']
def A_getarrayind(obj, index):
print('From Python <A_getarrayind>:')
return obj['array'][index]
def A_indexoper(obj, index):
return obj['array'][index].reference_value()
def B_indexoper(obj, index):
return obj['array'][index].const_value().reference_value()
type_A = gdb.parse_and_eval('(dop::A *) 0').type.target()
type_B = gdb.parse_and_eval('(dop::B *) 0').type.target()
type_int = gdb.parse_and_eval('(int *) 0').type.target()
# The E class matcher and worker test two things:
# 1. xmethod returning None.
# 2. Matcher returning a list of workers.
class E_method_char_worker(XMethodWorker):
def __init__(self):
pass
def get_arg_types(self):
return gdb.lookup_type('char')
def get_result_type(self, obj, arg):
return gdb.lookup_type('void')
def __call__(self, obj, arg):
print('From Python <E_method_char>')
return None
class E_method_int_worker(XMethodWorker):
def __init__(self):
pass
def get_arg_types(self):
return gdb.lookup_type('int')
# Note: get_result_type method elided on purpose
def __call__(self, obj, arg):
print('From Python <E_method_int>')
return None
class E_method_matcher(XMethodMatcher):
def __init__(self):
XMethodMatcher.__init__(self, 'E_methods')
self.methods = [XMethod('method_int'), XMethod('method_char')]
def match(self, class_type, method_name):
class_tag = class_type.unqualified().tag
if not re.match('^dop::E$', class_tag):
return None
if not re.match('^method$', method_name):
return None
workers = []
if self.methods[0].enabled:
workers.append(E_method_int_worker())
if self.methods[1].enabled:
workers.append(E_method_char_worker())
return workers
# The G class method matcher and worker illustrate how to write
# xmethod matchers and workers for template classes and template
# methods.
class G_size_diff_worker(XMethodWorker):
def __init__(self, class_template_type, method_template_type):
self._class_template_type = class_template_type
self._method_template_type = method_template_type
def get_arg_types(self):
pass
def __call__(self, obj):
print('From Python G<>::size_diff()')
return (self._method_template_type.sizeof -
self._class_template_type.sizeof)
class G_size_mul_worker(XMethodWorker):
def __init__(self, class_template_type, method_template_val):
self._class_template_type = class_template_type
self._method_template_val = method_template_val
def get_arg_types(self):
pass
def __call__(self, obj):
print('From Python G<>::size_mul()')
return self._class_template_type.sizeof * self._method_template_val
class G_mul_worker(XMethodWorker):
def __init__(self, class_template_type, method_template_type):
self._class_template_type = class_template_type
self._method_template_type = method_template_type
def get_arg_types(self):
return self._method_template_type
def __call__(self, obj, arg):
print('From Python G<>::mul()')
return obj['t'] * arg
class G_methods_matcher(XMethodMatcher):
def __init__(self):
XMethodMatcher.__init__(self, 'G_methods')
self.methods = [XMethod('size_diff'),
XMethod('size_mul'),
XMethod('mul')]
def _is_enabled(self, name):
for method in self.methods:
if method.name == name and method.enabled:
return True
def match(self, class_type, method_name):
class_tag = class_type.unqualified().tag
if not re.match('^dop::G<[ ]*[_a-zA-Z][ _a-zA-Z0-9]*>$',
class_tag):
return None
t_name = class_tag[7:-1]
try:
t_type = gdb.lookup_type(t_name)
except gdb.error:
return None
if re.match('^size_diff<[ ]*[_a-zA-Z][ _a-zA-Z0-9]*>$', method_name):
if not self._is_enabled('size_diff'):
return None
t1_name = method_name[10:-1]
try:
t1_type = gdb.lookup_type(t1_name)
return G_size_diff_worker(t_type, t1_type)
except gdb.error:
return None
if re.match('^size_mul<[ ]*[0-9]+[ ]*>$', method_name):
if not self._is_enabled('size_mul'):
return None
m_val = int(method_name[9:-1])
return G_size_mul_worker(t_type, m_val)
if re.match('^mul<[ ]*[_a-zA-Z][ _a-zA-Z0-9]*>$', method_name):
if not self._is_enabled('mul'):
return None
t1_name = method_name[4:-1]
try:
t1_type = gdb.lookup_type(t1_name)
return G_mul_worker(t_type, t1_type)
except gdb.error:
return None
global_dm_list = [
SimpleXMethodMatcher(r'A_plus_A',
r'^dop::A$',
r'operator\+',
A_plus_A,
# This is a replacement, hence match the arg type
# exactly!
type_A.const().reference()),
SimpleXMethodMatcher(r'plus_plus_A',
r'^dop::A$',
r'operator\+\+',
plus_plus_A),
SimpleXMethodMatcher(r'A_geta',
r'^dop::A$',
r'^geta$',
A_geta),
SimpleXMethodMatcher(r'A_getarrayind',
r'^dop::A$',
r'^getarrayind$',
A_getarrayind,
type_int),
SimpleXMethodMatcher(r'A_indexoper',
r'^dop::A$',
r'operator\[\]',
A_indexoper,
type_int),
SimpleXMethodMatcher(r'B_indexoper',
r'^dop::B$',
r'operator\[\]',
B_indexoper,
type_int)
]
for matcher in global_dm_list:
gdb.xmethod.register_xmethod_matcher(gdb, matcher)
gdb.xmethod.register_xmethod_matcher(gdb.current_progspace(),
G_methods_matcher())
gdb.xmethod.register_xmethod_matcher(gdb.current_progspace(),
E_method_matcher())
|
python
|
"""
Unit Testing
"""
def unittest(tests=None):
"""
A Unit Test wrapper that will return the return code and an error message ( if any )
- Return
Syntax:
{
0 : [return_code_1, error_message_1],
1 : [return-code_2, error_message_2],
}
Type: Dictionary
:: Syntax
tests
Description: The function and arguments you want to test mapped to the result
Syntax:
{
0 : {
"function" : func_name,
"args" : [args],
"result" : result
}
}
function : Your function variable
args : List of arguments to put into function (if any)
result : Your expected output of type variable
Type: Dictionary
"""
ret_code = 0 # 0 : No Error, 1 : Error
err_msg = "" # Error Message
ret = {}
# Process
for k,v in tests.items():
test_id = k
curr_test_val = v
test_func = curr_test_val["function"]
test_args = curr_test_val["args"]
test_expected_res = curr_test_val["result"]
try:
if not(test_args == []):
print(f"Test function: {test_func}")
print(f"Test arguments: {test_args}")
print(f"Test results: {test_expected_res}")
assert test_func(test_args) == test_expected_res
else:
print(f"Test function: {test_func}")
print(f"Test results: {test_expected_res}")
assert test_func() == test_expected_res, "Error"
ret[test_id] = [ret_code, ""]
except Exception as ex:
ret_code = 1
err_msg = str(ex)
print(f"Exception: {ex}")
ret[test_id] = [ret_code, err_msg]
# Reset
ret_code = 0
# Return
print(f"Result: {ret}")
return ret
def design_test(functions=None, args=None, results=None):
"""
Design Testing syntax (JSON Format) by
1. Taking all functions, arguments and results provided
2. Parsing into dictionary
- Please put
- 'None' (for non-list/dictionary variables),
- '[]' for list variables and
- '{}' for dictionary variables for all empty arguments
:: Params
functions
Description: This contains all functions
Type: List
args
Description: This contains all arguments to pass into the function
Type: List
results
Description: This contains the intended output string / result to pass into the function
Type: List
"""
res_dict = {}
number_of_functions = len(functions)
number_of_arguments = len(args)
number_of_results = len(results)
# If equals
for i in range(number_of_functions):
# Append values to dictionary
curr_func = functions[i]
curr_args = args[i]
curr_exp_res = results[i]
# Generate new variable
res_dict[i] = {
"function" : None,
"args" : [],
"result" : None
}
if not (curr_func == None):
res_dict[i]["function"] = curr_func
else:
res_dict[i]["function"] = None
if not (curr_args == None):
res_dict[i]["args"] = args[i]
else:
res_dict[i]["args"] = []
if not (curr_exp_res == None):
res_dict[i]["result"] = results[i]
else:
res_dict[i]["result"] = None
return res_dict
def run_test(tests):
"""
Run Submitted test designs
:: Params
tests
Description: This is the test in json format
Syntax:
tests = {
0 : {
"function" : test_1, # Type Function
"args" : [], # Type List
"result" : "Hello world" # Type String
},
n : {
"function" : <func_var>, # Type Function
"args" : [arg1, arg2,... argn], # Type List
"result" : "<output_result>" # Type String
},
}
Type: Dictionary
"""
number_of_tests = len(tests)
# Errors
number_of_errors = 0
errors = []
# --- Processing
# Program Testing
ret = unittest(tests)
# Error Scanning
for i in range(number_of_tests):
curr_test_res = ret[i]
err_msg = curr_test_res[1]
if not (err_msg == ""):
# Populate Error List
number_of_errors+=1
errors.append(err_msg)
# break
if number_of_errors == 0:
print("No Errors Found!")
else:
for err_id in range(number_of_errors):
print("Error Found: {}".format(errors[err_id]))
def main():
print("Begin Test")
# --- Internal Functions
def test_1():
print("Hello world")
def test_2(msg):
print("Hello!".format(msg))
# --- Input
# Local Variables
testspecs = {
"functions" : [test_1, test_2],
"args" : [None, "World"],
"results" : ["Hello world", "Hello! World"]
}
tests = design_test(**testspecs)
run_test(tests)
# assert test_2("World") == "Hello! World"
if __name__ == "__main__":
main()
|
python
|
n = int(input())
m = n
line = input().split()
t = 0
for i in range(0, n):
if int(line[i]) >= 0:
t += int(line[i])
else:
m -= 1
a = t / m
print(a)
|
python
|
import pandas as pd
from pathlib import Path
import sys
import re
from collections import OrderedDict
import pyqtgraph as pg
from pyqtgraph.parametertree import Parameter, ParameterTree
from ephys.ephysanalysis import acq4read
from ephys.mapanalysistools import analyzeMapData as AMD
import pylibrary.tools.fileselector as FS
class ReadDataTable():
def __init__(self):
"""
"""
self.tree = None
self.ptree = ParameterTree()
# self.ptreedata = Parameter.create(name='dataset', type='group', children=self.setParams(0))
# self.ptree.setParameters(self.ptreedata) # add the table with granularity of "cells"
self.prottree = ParameterTree()
# self.setProtocols() # add the protocols
# self.ptree_layout.addWidget(self.analysis_ptree)
# self.ptree_layout.addWidget(self.ptree) # Parameter Tree on left
# self.ptree_layout.addWidget(self.prottree) # protocol tree just below
#
def get_filename(self, test=False):
if test == True:
return self.filename
fn = Path(self.current_DSC.strip(), self.current_protocol)
print( "filename: ", fn)
return fn
def readtable(self, fn=None, datainfo='protocols', listprotocols=False):
if datainfo == 'protocols':
dataname = 'data_complete'
if datainfo == 'images':
dataname = 'data_images'
df = pd.read_pickle(open(fn, 'rb'))
allprotocols = []
self.tree = OrderedDict()
alldays = sorted(set(df.index.values.tolist()))
for day in alldays:
subdf = df.loc[day]
print(subdf)
dayn = subdf.index.values.tolist()
slices = subdf['slice_slice']
cells = subdf['cell_cell']
protocols = subdf[dataname]
date = subdf['date']
dsc = str(Path(date, slices, cells))
self.tree[dsc] = []
prs = protocols.split(',')
for i in range(len(prs)):
if listprotocols:
print ("\033[0;33;40m "+ str(Path(dayn, slices, cells))+ '%s'%prs[i] + '\033[0;37;40m')
self.tree[dsc].append(prs[i])
print(f'Read {len(alldays):d} records with {len(self.tree):d} {dataname:s}')
allprotocols = sorted(set(allprotocols))
return list(alldays), self.tree, df
class BuildGui():
def __init__(self, tree):
"""
Test fixture
"""
self.basename = '/Users/pbmanis/Documents/data/MRK_Pyramidal'
self.filename = None
self.tree = tree
print('tree: ', tree)
self.mainwin = pg.Qt.QtGui.QMainWindow()
self.win = pg.Qt.QtGui.QWidget()
self.main_layout = pg.Qt.QtGui.QGridLayout() # top level layout for the window
self.win.setLayout(self.main_layout)
self.mainwin.setCentralWidget(self.win)
self.mainwin.show()
self.mainwin.setWindowTitle('Data Selection')
self.mainwin.setGeometry( 100 , 100 , 1400 , 900)
# build buttons at top of controls
self.current_DSC = list(self.tree.keys())[0]
self.btn_read = pg.Qt.QtGui.QPushButton("Read")
self.btn_find = pg.Qt.QtGui.QPushButton('Find and Read')
# use a nested grid layout for the buttons
button_layout = pg.Qt.QtGui.QGridLayout()
button_layout.addWidget(self.btn_read, 1, 0, 1, 1)
# button_layout.addWidget(self.btn_analyze, 0, 1, 1, 1)
button_layout.addWidget(self.btn_find, 0, 0, 1, 1)
# build parametertree in left column
#
ptreewidth = 320
self.main_layout.setColumnMinimumWidth(0, ptreewidth)
# analysis # empty in test rig
params = [
{'name': 'Analysis', 'type': 'group', 'children': [],
}]
self.analysis_ptree = ParameterTree()
self.analysis_ptreedata = Parameter.create(name='params', type='group', children=params)
self.analysis_ptree.setParameters(self.analysis_ptreedata)
self.ptree = ParameterTree()
self.ptreedata = Parameter.create(name='dataset', type='group', children=self.setParams(0))
self.ptree.setParameters(self.ptreedata) # add the table with granularity of "cells"
self.prottree = ParameterTree()
self.setProtocols() # add the protocols
# use a grid layout to hold the trees
self.ptree_widget = pg.Qt.QtGui.QWidget()
self.ptree_layout = pg.Qt.QtGui.QGridLayout()
self.ptree_widget.setLayout(self.ptree_layout)
self.ptree_layout.setSpacing(2)
# ptree in row 1 col 0, 4 rows, 2 cols
self.ptree_layout.addWidget(self.analysis_ptree)
self.ptree_layout.addWidget(self.ptree) # Parameter Tree on left
self.ptree_layout.addWidget(self.prottree) # protocol tree just below
# self.ptree_layout.setColumnStretch(0, 5)
self.ptree_layout.setRowStretch(0, 5)
self.ptree_layout.setRowStretch(1, 1)
self.ptree_layout.setRowStretch(2, 1)
# build plot window
self.plots_widget = pg.Qt.QtGui.QWidget()
self.plots_layout = pg.Qt.QtGui.QGridLayout()
self.plots_widget.setLayout(self.plots_layout)
self.plots_layout.setContentsMargins(4, 4, 4, 4)
self.plots_layout.setSpacing(2)
self.plots = {}
for panel in zip(['Wave', 'Average', 'PSTH'], [0, 14, 18], [1, 5, 5],):
self.plots[panel[0]] = pg.PlotWidget()
self.plots_layout.addWidget(self.plots[panel[0]],
panel[1], 0, panel[2], 1)
self.plots[panel[0]].getAxis('left').setLabel('V', color="#ff0000")
self.plots[panel[0]].setTitle(panel[0], color="#ff0000")
self.plots[panel[0]].getAxis('bottom').setLabel('t (sec)', color="#ff0000")
self.main_layout.addWidget(self.plots_widget, 0, 2, 22, 1)
self.main_layout.addLayout(button_layout, 0, 0, 1, 2)
self.main_layout.addWidget(self.ptree_widget, 1, 0, -1, 2)
self.retrieveAllParameters()
# connect buttons and ptrees to actions
self.ptreedata.sigTreeStateChanged.connect(self.update_DSC)
self.prottreedata.sigTreeStateChanged.connect(self.get_current)
self.btn_read.clicked.connect(self.read_run)
# self.btn_analyze.clicked.connect(self.analyze)
self.btn_find.clicked.connect(self.find_run)
# print( self.MParams)
def retrieveAllParameters(self):
pass
def read_run(self):
pass
def find_run(self):
pass
def setParams(self, isel):
self.params = [
{'name': 'Day', 'type': 'group', 'children':
[{'name': 'Slices/Cells', 'type': 'list', 'values': list(self.tree.keys()), 'value': list(self.tree.keys())[isel]}]
}
]
return self.params
def setProtocols(self):
"""
Update the prototocls to correspond to the current parameters, top protocol selected
"""
if self.tree == None:
raise ValueError('setProtocols: Must set up read data before setting up protocols')
self.protocols = [
{'name': 'Protos', 'type': 'group', 'children':
[{'name': 'Protocols', 'type': 'list',
'values': self.tree[self.current_DSC][:], 'value': self.tree[self.current_DSC][0]}]
}
]
self.prottreedata = Parameter.create(name='protocol', type='group', children=self.protocols)
self.prottree.setParameters(self.prottreedata)
self.current_protocol = self.tree[self.current_DSC][0]
self.prottreedata.sigTreeStateChanged.connect(self.get_current)
return self.protocols
def get_current(self, param, changes):
for param, change, data in changes:
# path = self.prottreedata.childPath(param)
# if path is not None:
# childName = '.'.join(path)
# else:
# childName = param.name()
self.current_protocol = data
def update_DSC(self, param, changes):
for param, change, data in changes:
# path = self.ptreedata.childPath(param)
# if path is not None:
# childName = '.'.join(path)
# else:
# childName = param.name()
self.current_DSC = data
self.setProtocols()
def test():
app = pg.mkQApp()
RDT = ReadDataTable()
alldays, tree, df = RDT.readtable(fn='/Users/pbmanis/Desktop/Python/mrk-nf107/NF107Ai32_Het/NF107Ai32_Het.pkl',
datainfo='images')
G = BuildGui(tree)
if (sys.flags.interactive != 1): # or not hasattr(pg.Qt.QtCore, 'PYQT_VERSION'):
pg.Qt.QtGui.QApplication.instance().exec_()
if __name__ == '__main__':
test()
|
python
|
first="Akash"
last="Singh"
fname=first+" "+last
print(fname)
|
python
|
# Generated by Django 3.1.5 on 2021-01-13 18:09
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="RequestLog",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("session_key", models.CharField(blank=True, max_length=40)),
("http_method", models.CharField(max_length=10)),
("http_user_agent", models.TextField(default="")),
("http_referer", models.TextField(default="")),
("request_path", models.URLField(default="")),
("query_string", models.TextField(blank=True, default="")),
("remote_addr", models.CharField(default="", max_length=100)),
("timestamp", models.DateTimeField(default=django.utils.timezone.now)),
(
"category",
models.CharField(
help_text="Used to filter / group logs.", max_length=100
),
),
(
"label",
models.CharField(
help_text="Used to identify individual logs.", max_length=100
),
),
(
"user",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
]
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.