content
stringlengths
0
894k
type
stringclasses
2 values
""" Cisco_IOS_XR_ethernet_lldp_oper This module contains a collection of YANG definitions for Cisco IOS\-XR ethernet\-lldp package operational data. This module contains definitions for the following management objects\: lldp\: Link Layer Discovery Protocol operational data Copyright (c) 2013\-2016 by Cisco Systems, Inc. All rights reserved. """ import re import collections from enum import Enum from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict from ydk.errors import YPYError, YPYModelError class LldpL3AddrProtocolEnum(Enum): """ LldpL3AddrProtocolEnum Lldp l3 addr protocol .. data:: ipv4 = 0 IPv4 .. data:: ipv6 = 1 IPv6 """ ipv4 = 0 ipv6 = 1 @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['LldpL3AddrProtocolEnum'] class Lldp(object): """ Link Layer Discovery Protocol operational data .. attribute:: global_lldp Global LLDP data **type**\: :py:class:`GlobalLldp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.GlobalLldp>` .. attribute:: nodes Per node LLDP operational data **type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.global_lldp = Lldp.GlobalLldp() self.global_lldp.parent = self self.nodes = Lldp.Nodes() self.nodes.parent = self class GlobalLldp(object): """ Global LLDP data .. attribute:: lldp_info The LLDP Global Information of this box **type**\: :py:class:`LldpInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.GlobalLldp.LldpInfo>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_info = Lldp.GlobalLldp.LldpInfo() self.lldp_info.parent = self class LldpInfo(object): """ The LLDP Global Information of this box .. attribute:: hold_time Length of time (in sec) that receiver must keep this packet **type**\: int **range:** 0..4294967295 .. attribute:: re_init Delay (in sec) for LLDP initialization on any interface **type**\: int **range:** 0..4294967295 .. attribute:: timer Rate at which LLDP packets are sent (in sec) **type**\: int **range:** 0..4294967295 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.hold_time = None self.re_init = None self.timer = None @property def _common_path(self): return '/Cisco-IOS-XR-ethernet-lldp-oper:lldp/Cisco-IOS-XR-ethernet-lldp-oper:global-lldp/Cisco-IOS-XR-ethernet-lldp-oper:lldp-info' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.hold_time is not None: return True if self.re_init is not None: return True if self.timer is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.GlobalLldp.LldpInfo']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ethernet-lldp-oper:lldp/Cisco-IOS-XR-ethernet-lldp-oper:global-lldp' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_info is not None and self.lldp_info._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.GlobalLldp']['meta_info'] class Nodes(object): """ Per node LLDP operational data .. attribute:: node The LLDP operational data for a particular node **type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.node = YList() self.node.parent = self self.node.name = 'node' class Node(object): """ The LLDP operational data for a particular node .. attribute:: node_name <key> The identifier for the node **type**\: str **pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+) .. attribute:: interfaces The table of interfaces on which LLDP is running on this node **type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces>` .. attribute:: neighbors The LLDP neighbor tables on this node **type**\: :py:class:`Neighbors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors>` .. attribute:: statistics The LLDP traffic statistics for this node **type**\: :py:class:`Statistics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Statistics>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.node_name = None self.interfaces = Lldp.Nodes.Node.Interfaces() self.interfaces.parent = self self.neighbors = Lldp.Nodes.Node.Neighbors() self.neighbors.parent = self self.statistics = Lldp.Nodes.Node.Statistics() self.statistics.parent = self class Neighbors(object): """ The LLDP neighbor tables on this node .. attribute:: details The detailed LLDP neighbor table **type**\: :py:class:`Details <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details>` .. attribute:: devices The detailed LLDP neighbor table on this device **type**\: :py:class:`Devices <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices>` .. attribute:: summaries The LLDP neighbor summary table **type**\: :py:class:`Summaries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.details = Lldp.Nodes.Node.Neighbors.Details() self.details.parent = self self.devices = Lldp.Nodes.Node.Neighbors.Devices() self.devices.parent = self self.summaries = Lldp.Nodes.Node.Neighbors.Summaries() self.summaries.parent = self class Devices(object): """ The detailed LLDP neighbor table on this device .. attribute:: device Detailed information about a LLDP neighbor entry **type**\: list of :py:class:`Device <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.device = YList() self.device.parent = self self.device.name = 'device' class Device(object): """ Detailed information about a LLDP neighbor entry .. attribute:: device_id The neighboring device identifier **type**\: str .. attribute:: interface_name The interface name **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: lldp_neighbor lldp neighbor **type**\: list of :py:class:`LldpNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.device_id = None self.interface_name = None self.lldp_neighbor = YList() self.lldp_neighbor.parent = self self.lldp_neighbor.name = 'lldp_neighbor' class LldpNeighbor(object): """ lldp neighbor .. attribute:: chassis_id Chassis id **type**\: str .. attribute:: detail Detailed neighbor info **type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail>` .. attribute:: device_id Device identifier **type**\: str .. attribute:: enabled_capabilities Enabled Capabilities **type**\: str .. attribute:: header_version Version number **type**\: int **range:** 0..255 .. attribute:: hold_time Remaining hold time **type**\: int **range:** 0..65535 .. attribute:: mib MIB nieghbor info **type**\: :py:class:`Mib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib>` .. attribute:: platform Platform type **type**\: str .. attribute:: port_id_detail Outgoing port identifier **type**\: str .. attribute:: receiving_interface_name Interface the neighbor entry was received on **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: receiving_parent_interface_name Parent Interface the neighbor entry was received on **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.chassis_id = None self.detail = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail() self.detail.parent = self self.device_id = None self.enabled_capabilities = None self.header_version = None self.hold_time = None self.mib = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib() self.mib.parent = self self.platform = None self.port_id_detail = None self.receiving_interface_name = None self.receiving_parent_interface_name = None class Detail(object): """ Detailed neighbor info .. attribute:: auto_negotiation Auto Negotiation **type**\: str .. attribute:: enabled_capabilities Enabled Capabilities **type**\: str .. attribute:: media_attachment_unit_type Media Attachment Unit type **type**\: int **range:** 0..4294967295 .. attribute:: network_addresses Management Addresses **type**\: :py:class:`NetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses>` .. attribute:: physical_media_capabilities Physical media capabilities **type**\: str .. attribute:: port_description Port Description **type**\: str .. attribute:: port_vlan_id Vlan ID **type**\: int **range:** 0..4294967295 .. attribute:: system_capabilities System Capabilities **type**\: str .. attribute:: system_description System Description **type**\: str .. attribute:: system_name System Name **type**\: str .. attribute:: time_remaining Time remaining **type**\: int **range:** 0..4294967295 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.auto_negotiation = None self.enabled_capabilities = None self.media_attachment_unit_type = None self.network_addresses = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses() self.network_addresses.parent = self self.physical_media_capabilities = None self.port_description = None self.port_vlan_id = None self.system_capabilities = None self.system_description = None self.system_name = None self.time_remaining = None class NetworkAddresses(object): """ Management Addresses .. attribute:: lldp_addr_entry lldp addr entry **type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_addr_entry = YList() self.lldp_addr_entry.parent = self self.lldp_addr_entry.name = 'lldp_addr_entry' class LldpAddrEntry(object): """ lldp addr entry .. attribute:: address Network layer address **type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address>` .. attribute:: if_num Interface num **type**\: int **range:** 0..4294967295 .. attribute:: ma_subtype MA sub type **type**\: int **range:** 0..255 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address() self.address.parent = self self.if_num = None self.ma_subtype = None class Address(object): """ Network layer address .. attribute:: address_type AddressType **type**\: :py:class:`LldpL3AddrProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocolEnum>` .. attribute:: ipv4_address IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: ipv6_address IPv6 address **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address_type = None self.ipv4_address = None self.ipv6_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:address' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address_type is not None: return True if self.ipv4_address is not None: return True if self.ipv6_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-addr-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address is not None and self.address._has_data(): return True if self.if_num is not None: return True if self.ma_subtype is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:network-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_addr_entry is not None: for child_ref in self.lldp_addr_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail.NetworkAddresses']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:detail' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.auto_negotiation is not None: return True if self.enabled_capabilities is not None: return True if self.media_attachment_unit_type is not None: return True if self.network_addresses is not None and self.network_addresses._has_data(): return True if self.physical_media_capabilities is not None: return True if self.port_description is not None: return True if self.port_vlan_id is not None: return True if self.system_capabilities is not None: return True if self.system_description is not None: return True if self.system_name is not None: return True if self.time_remaining is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Detail']['meta_info'] class Mib(object): """ MIB nieghbor info .. attribute:: chassis_id_len Chassis ID length **type**\: int **range:** 0..65535 .. attribute:: chassis_id_sub_type Chassis ID sub type **type**\: int **range:** 0..255 .. attribute:: combined_capabilities Supported and combined cpabilities **type**\: int **range:** 0..4294967295 .. attribute:: org_def_tlv_list Org Def TLV list **type**\: :py:class:`OrgDefTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList>` .. attribute:: port_id_len Port ID length **type**\: int **range:** 0..65535 .. attribute:: port_id_sub_type Port ID sub type **type**\: int **range:** 0..255 .. attribute:: rem_index lldpRemIndex **type**\: int **range:** 0..4294967295 .. attribute:: rem_local_port_num LldpPortNumber **type**\: int **range:** 0..4294967295 .. attribute:: rem_time_mark TimeFilter **type**\: int **range:** 0..4294967295 .. attribute:: unknown_tlv_list Unknown TLV list **type**\: :py:class:`UnknownTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.chassis_id_len = None self.chassis_id_sub_type = None self.combined_capabilities = None self.org_def_tlv_list = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList() self.org_def_tlv_list.parent = self self.port_id_len = None self.port_id_sub_type = None self.rem_index = None self.rem_local_port_num = None self.rem_time_mark = None self.unknown_tlv_list = Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList() self.unknown_tlv_list.parent = self class UnknownTlvList(object): """ Unknown TLV list .. attribute:: lldp_unknown_tlv_entry lldp unknown tlv entry **type**\: list of :py:class:`LldpUnknownTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_unknown_tlv_entry = YList() self.lldp_unknown_tlv_entry.parent = self self.lldp_unknown_tlv_entry.name = 'lldp_unknown_tlv_entry' class LldpUnknownTlvEntry(object): """ lldp unknown tlv entry .. attribute:: tlv_type Unknown TLV type **type**\: int **range:** 0..255 .. attribute:: tlv_value Unknown TLV payload **type**\: str **pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.tlv_type = None self.tlv_value = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-unknown-tlv-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.tlv_type is not None: return True if self.tlv_value is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:unknown-tlv-list' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_unknown_tlv_entry is not None: for child_ref in self.lldp_unknown_tlv_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.UnknownTlvList']['meta_info'] class OrgDefTlvList(object): """ Org Def TLV list .. attribute:: lldp_org_def_tlv_entry lldp org def tlv entry **type**\: list of :py:class:`LldpOrgDefTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_org_def_tlv_entry = YList() self.lldp_org_def_tlv_entry.parent = self self.lldp_org_def_tlv_entry.name = 'lldp_org_def_tlv_entry' class LldpOrgDefTlvEntry(object): """ lldp org def tlv entry .. attribute:: oui Organizationally Unique Identifier **type**\: int **range:** 0..4294967295 .. attribute:: tlv_info_indes lldpRemOrgDefInfoIndex **type**\: int **range:** 0..4294967295 .. attribute:: tlv_subtype Org Def TLV subtype **type**\: int **range:** 0..255 .. attribute:: tlv_value Org Def TLV payload **type**\: str **pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.oui = None self.tlv_info_indes = None self.tlv_subtype = None self.tlv_value = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-org-def-tlv-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.oui is not None: return True if self.tlv_info_indes is not None: return True if self.tlv_subtype is not None: return True if self.tlv_value is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:org-def-tlv-list' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_org_def_tlv_entry is not None: for child_ref in self.lldp_org_def_tlv_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib.OrgDefTlvList']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:mib' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.chassis_id_len is not None: return True if self.chassis_id_sub_type is not None: return True if self.combined_capabilities is not None: return True if self.org_def_tlv_list is not None and self.org_def_tlv_list._has_data(): return True if self.port_id_len is not None: return True if self.port_id_sub_type is not None: return True if self.rem_index is not None: return True if self.rem_local_port_num is not None: return True if self.rem_time_mark is not None: return True if self.unknown_tlv_list is not None and self.unknown_tlv_list._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor.Mib']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-neighbor' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.chassis_id is not None: return True if self.detail is not None and self.detail._has_data(): return True if self.device_id is not None: return True if self.enabled_capabilities is not None: return True if self.header_version is not None: return True if self.hold_time is not None: return True if self.mib is not None and self.mib._has_data(): return True if self.platform is not None: return True if self.port_id_detail is not None: return True if self.receiving_interface_name is not None: return True if self.receiving_parent_interface_name is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device.LldpNeighbor']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:device' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.device_id is not None: return True if self.interface_name is not None: return True if self.lldp_neighbor is not None: for child_ref in self.lldp_neighbor: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices.Device']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:devices' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.device is not None: for child_ref in self.device: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Devices']['meta_info'] class Details(object): """ The detailed LLDP neighbor table .. attribute:: detail Detailed information about a LLDP neighbor entry **type**\: list of :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.detail = YList() self.detail.parent = self self.detail.name = 'detail' class Detail(object): """ Detailed information about a LLDP neighbor entry .. attribute:: device_id The neighboring device identifier **type**\: str .. attribute:: interface_name The interface name **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: lldp_neighbor lldp neighbor **type**\: list of :py:class:`LldpNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.device_id = None self.interface_name = None self.lldp_neighbor = YList() self.lldp_neighbor.parent = self self.lldp_neighbor.name = 'lldp_neighbor' class LldpNeighbor(object): """ lldp neighbor .. attribute:: chassis_id Chassis id **type**\: str .. attribute:: detail Detailed neighbor info **type**\: :py:class:`Detail_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_>` .. attribute:: device_id Device identifier **type**\: str .. attribute:: enabled_capabilities Enabled Capabilities **type**\: str .. attribute:: header_version Version number **type**\: int **range:** 0..255 .. attribute:: hold_time Remaining hold time **type**\: int **range:** 0..65535 .. attribute:: mib MIB nieghbor info **type**\: :py:class:`Mib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib>` .. attribute:: platform Platform type **type**\: str .. attribute:: port_id_detail Outgoing port identifier **type**\: str .. attribute:: receiving_interface_name Interface the neighbor entry was received on **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: receiving_parent_interface_name Parent Interface the neighbor entry was received on **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.chassis_id = None self.detail = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_() self.detail.parent = self self.device_id = None self.enabled_capabilities = None self.header_version = None self.hold_time = None self.mib = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib() self.mib.parent = self self.platform = None self.port_id_detail = None self.receiving_interface_name = None self.receiving_parent_interface_name = None class Detail_(object): """ Detailed neighbor info .. attribute:: auto_negotiation Auto Negotiation **type**\: str .. attribute:: enabled_capabilities Enabled Capabilities **type**\: str .. attribute:: media_attachment_unit_type Media Attachment Unit type **type**\: int **range:** 0..4294967295 .. attribute:: network_addresses Management Addresses **type**\: :py:class:`NetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses>` .. attribute:: physical_media_capabilities Physical media capabilities **type**\: str .. attribute:: port_description Port Description **type**\: str .. attribute:: port_vlan_id Vlan ID **type**\: int **range:** 0..4294967295 .. attribute:: system_capabilities System Capabilities **type**\: str .. attribute:: system_description System Description **type**\: str .. attribute:: system_name System Name **type**\: str .. attribute:: time_remaining Time remaining **type**\: int **range:** 0..4294967295 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.auto_negotiation = None self.enabled_capabilities = None self.media_attachment_unit_type = None self.network_addresses = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses() self.network_addresses.parent = self self.physical_media_capabilities = None self.port_description = None self.port_vlan_id = None self.system_capabilities = None self.system_description = None self.system_name = None self.time_remaining = None class NetworkAddresses(object): """ Management Addresses .. attribute:: lldp_addr_entry lldp addr entry **type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_addr_entry = YList() self.lldp_addr_entry.parent = self self.lldp_addr_entry.name = 'lldp_addr_entry' class LldpAddrEntry(object): """ lldp addr entry .. attribute:: address Network layer address **type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address>` .. attribute:: if_num Interface num **type**\: int **range:** 0..4294967295 .. attribute:: ma_subtype MA sub type **type**\: int **range:** 0..255 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address() self.address.parent = self self.if_num = None self.ma_subtype = None class Address(object): """ Network layer address .. attribute:: address_type AddressType **type**\: :py:class:`LldpL3AddrProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocolEnum>` .. attribute:: ipv4_address IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: ipv6_address IPv6 address **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address_type = None self.ipv4_address = None self.ipv6_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:address' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address_type is not None: return True if self.ipv4_address is not None: return True if self.ipv6_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry.Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-addr-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address is not None and self.address._has_data(): return True if self.if_num is not None: return True if self.ma_subtype is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses.LldpAddrEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:network-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_addr_entry is not None: for child_ref in self.lldp_addr_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_.NetworkAddresses']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:detail' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.auto_negotiation is not None: return True if self.enabled_capabilities is not None: return True if self.media_attachment_unit_type is not None: return True if self.network_addresses is not None and self.network_addresses._has_data(): return True if self.physical_media_capabilities is not None: return True if self.port_description is not None: return True if self.port_vlan_id is not None: return True if self.system_capabilities is not None: return True if self.system_description is not None: return True if self.system_name is not None: return True if self.time_remaining is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Detail_']['meta_info'] class Mib(object): """ MIB nieghbor info .. attribute:: chassis_id_len Chassis ID length **type**\: int **range:** 0..65535 .. attribute:: chassis_id_sub_type Chassis ID sub type **type**\: int **range:** 0..255 .. attribute:: combined_capabilities Supported and combined cpabilities **type**\: int **range:** 0..4294967295 .. attribute:: org_def_tlv_list Org Def TLV list **type**\: :py:class:`OrgDefTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList>` .. attribute:: port_id_len Port ID length **type**\: int **range:** 0..65535 .. attribute:: port_id_sub_type Port ID sub type **type**\: int **range:** 0..255 .. attribute:: rem_index lldpRemIndex **type**\: int **range:** 0..4294967295 .. attribute:: rem_local_port_num LldpPortNumber **type**\: int **range:** 0..4294967295 .. attribute:: rem_time_mark TimeFilter **type**\: int **range:** 0..4294967295 .. attribute:: unknown_tlv_list Unknown TLV list **type**\: :py:class:`UnknownTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.chassis_id_len = None self.chassis_id_sub_type = None self.combined_capabilities = None self.org_def_tlv_list = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList() self.org_def_tlv_list.parent = self self.port_id_len = None self.port_id_sub_type = None self.rem_index = None self.rem_local_port_num = None self.rem_time_mark = None self.unknown_tlv_list = Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList() self.unknown_tlv_list.parent = self class UnknownTlvList(object): """ Unknown TLV list .. attribute:: lldp_unknown_tlv_entry lldp unknown tlv entry **type**\: list of :py:class:`LldpUnknownTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_unknown_tlv_entry = YList() self.lldp_unknown_tlv_entry.parent = self self.lldp_unknown_tlv_entry.name = 'lldp_unknown_tlv_entry' class LldpUnknownTlvEntry(object): """ lldp unknown tlv entry .. attribute:: tlv_type Unknown TLV type **type**\: int **range:** 0..255 .. attribute:: tlv_value Unknown TLV payload **type**\: str **pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.tlv_type = None self.tlv_value = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-unknown-tlv-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.tlv_type is not None: return True if self.tlv_value is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:unknown-tlv-list' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_unknown_tlv_entry is not None: for child_ref in self.lldp_unknown_tlv_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.UnknownTlvList']['meta_info'] class OrgDefTlvList(object): """ Org Def TLV list .. attribute:: lldp_org_def_tlv_entry lldp org def tlv entry **type**\: list of :py:class:`LldpOrgDefTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_org_def_tlv_entry = YList() self.lldp_org_def_tlv_entry.parent = self self.lldp_org_def_tlv_entry.name = 'lldp_org_def_tlv_entry' class LldpOrgDefTlvEntry(object): """ lldp org def tlv entry .. attribute:: oui Organizationally Unique Identifier **type**\: int **range:** 0..4294967295 .. attribute:: tlv_info_indes lldpRemOrgDefInfoIndex **type**\: int **range:** 0..4294967295 .. attribute:: tlv_subtype Org Def TLV subtype **type**\: int **range:** 0..255 .. attribute:: tlv_value Org Def TLV payload **type**\: str **pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.oui = None self.tlv_info_indes = None self.tlv_subtype = None self.tlv_value = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-org-def-tlv-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.oui is not None: return True if self.tlv_info_indes is not None: return True if self.tlv_subtype is not None: return True if self.tlv_value is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:org-def-tlv-list' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_org_def_tlv_entry is not None: for child_ref in self.lldp_org_def_tlv_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib.OrgDefTlvList']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:mib' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.chassis_id_len is not None: return True if self.chassis_id_sub_type is not None: return True if self.combined_capabilities is not None: return True if self.org_def_tlv_list is not None and self.org_def_tlv_list._has_data(): return True if self.port_id_len is not None: return True if self.port_id_sub_type is not None: return True if self.rem_index is not None: return True if self.rem_local_port_num is not None: return True if self.rem_time_mark is not None: return True if self.unknown_tlv_list is not None and self.unknown_tlv_list._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor.Mib']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-neighbor' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.chassis_id is not None: return True if self.detail is not None and self.detail._has_data(): return True if self.device_id is not None: return True if self.enabled_capabilities is not None: return True if self.header_version is not None: return True if self.hold_time is not None: return True if self.mib is not None and self.mib._has_data(): return True if self.platform is not None: return True if self.port_id_detail is not None: return True if self.receiving_interface_name is not None: return True if self.receiving_parent_interface_name is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail.LldpNeighbor']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:detail' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.device_id is not None: return True if self.interface_name is not None: return True if self.lldp_neighbor is not None: for child_ref in self.lldp_neighbor: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details.Detail']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:details' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.detail is not None: for child_ref in self.detail: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Details']['meta_info'] class Summaries(object): """ The LLDP neighbor summary table .. attribute:: summary Brief information about a LLDP neighbor entry **type**\: list of :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.summary = YList() self.summary.parent = self self.summary.name = 'summary' class Summary(object): """ Brief information about a LLDP neighbor entry .. attribute:: device_id The neighboring device identifier **type**\: str .. attribute:: interface_name The interface name **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: lldp_neighbor lldp neighbor **type**\: list of :py:class:`LldpNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.device_id = None self.interface_name = None self.lldp_neighbor = YList() self.lldp_neighbor.parent = self self.lldp_neighbor.name = 'lldp_neighbor' class LldpNeighbor(object): """ lldp neighbor .. attribute:: chassis_id Chassis id **type**\: str .. attribute:: detail Detailed neighbor info **type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail>` .. attribute:: device_id Device identifier **type**\: str .. attribute:: enabled_capabilities Enabled Capabilities **type**\: str .. attribute:: header_version Version number **type**\: int **range:** 0..255 .. attribute:: hold_time Remaining hold time **type**\: int **range:** 0..65535 .. attribute:: mib MIB nieghbor info **type**\: :py:class:`Mib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib>` .. attribute:: platform Platform type **type**\: str .. attribute:: port_id_detail Outgoing port identifier **type**\: str .. attribute:: receiving_interface_name Interface the neighbor entry was received on **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: receiving_parent_interface_name Parent Interface the neighbor entry was received on **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.chassis_id = None self.detail = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail() self.detail.parent = self self.device_id = None self.enabled_capabilities = None self.header_version = None self.hold_time = None self.mib = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib() self.mib.parent = self self.platform = None self.port_id_detail = None self.receiving_interface_name = None self.receiving_parent_interface_name = None class Detail(object): """ Detailed neighbor info .. attribute:: auto_negotiation Auto Negotiation **type**\: str .. attribute:: enabled_capabilities Enabled Capabilities **type**\: str .. attribute:: media_attachment_unit_type Media Attachment Unit type **type**\: int **range:** 0..4294967295 .. attribute:: network_addresses Management Addresses **type**\: :py:class:`NetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses>` .. attribute:: physical_media_capabilities Physical media capabilities **type**\: str .. attribute:: port_description Port Description **type**\: str .. attribute:: port_vlan_id Vlan ID **type**\: int **range:** 0..4294967295 .. attribute:: system_capabilities System Capabilities **type**\: str .. attribute:: system_description System Description **type**\: str .. attribute:: system_name System Name **type**\: str .. attribute:: time_remaining Time remaining **type**\: int **range:** 0..4294967295 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.auto_negotiation = None self.enabled_capabilities = None self.media_attachment_unit_type = None self.network_addresses = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses() self.network_addresses.parent = self self.physical_media_capabilities = None self.port_description = None self.port_vlan_id = None self.system_capabilities = None self.system_description = None self.system_name = None self.time_remaining = None class NetworkAddresses(object): """ Management Addresses .. attribute:: lldp_addr_entry lldp addr entry **type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_addr_entry = YList() self.lldp_addr_entry.parent = self self.lldp_addr_entry.name = 'lldp_addr_entry' class LldpAddrEntry(object): """ lldp addr entry .. attribute:: address Network layer address **type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address>` .. attribute:: if_num Interface num **type**\: int **range:** 0..4294967295 .. attribute:: ma_subtype MA sub type **type**\: int **range:** 0..255 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address() self.address.parent = self self.if_num = None self.ma_subtype = None class Address(object): """ Network layer address .. attribute:: address_type AddressType **type**\: :py:class:`LldpL3AddrProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocolEnum>` .. attribute:: ipv4_address IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: ipv6_address IPv6 address **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address_type = None self.ipv4_address = None self.ipv6_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:address' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address_type is not None: return True if self.ipv4_address is not None: return True if self.ipv6_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry.Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-addr-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address is not None and self.address._has_data(): return True if self.if_num is not None: return True if self.ma_subtype is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses.LldpAddrEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:network-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_addr_entry is not None: for child_ref in self.lldp_addr_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail.NetworkAddresses']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:detail' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.auto_negotiation is not None: return True if self.enabled_capabilities is not None: return True if self.media_attachment_unit_type is not None: return True if self.network_addresses is not None and self.network_addresses._has_data(): return True if self.physical_media_capabilities is not None: return True if self.port_description is not None: return True if self.port_vlan_id is not None: return True if self.system_capabilities is not None: return True if self.system_description is not None: return True if self.system_name is not None: return True if self.time_remaining is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Detail']['meta_info'] class Mib(object): """ MIB nieghbor info .. attribute:: chassis_id_len Chassis ID length **type**\: int **range:** 0..65535 .. attribute:: chassis_id_sub_type Chassis ID sub type **type**\: int **range:** 0..255 .. attribute:: combined_capabilities Supported and combined cpabilities **type**\: int **range:** 0..4294967295 .. attribute:: org_def_tlv_list Org Def TLV list **type**\: :py:class:`OrgDefTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList>` .. attribute:: port_id_len Port ID length **type**\: int **range:** 0..65535 .. attribute:: port_id_sub_type Port ID sub type **type**\: int **range:** 0..255 .. attribute:: rem_index lldpRemIndex **type**\: int **range:** 0..4294967295 .. attribute:: rem_local_port_num LldpPortNumber **type**\: int **range:** 0..4294967295 .. attribute:: rem_time_mark TimeFilter **type**\: int **range:** 0..4294967295 .. attribute:: unknown_tlv_list Unknown TLV list **type**\: :py:class:`UnknownTlvList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.chassis_id_len = None self.chassis_id_sub_type = None self.combined_capabilities = None self.org_def_tlv_list = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList() self.org_def_tlv_list.parent = self self.port_id_len = None self.port_id_sub_type = None self.rem_index = None self.rem_local_port_num = None self.rem_time_mark = None self.unknown_tlv_list = Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList() self.unknown_tlv_list.parent = self class UnknownTlvList(object): """ Unknown TLV list .. attribute:: lldp_unknown_tlv_entry lldp unknown tlv entry **type**\: list of :py:class:`LldpUnknownTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_unknown_tlv_entry = YList() self.lldp_unknown_tlv_entry.parent = self self.lldp_unknown_tlv_entry.name = 'lldp_unknown_tlv_entry' class LldpUnknownTlvEntry(object): """ lldp unknown tlv entry .. attribute:: tlv_type Unknown TLV type **type**\: int **range:** 0..255 .. attribute:: tlv_value Unknown TLV payload **type**\: str **pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.tlv_type = None self.tlv_value = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-unknown-tlv-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.tlv_type is not None: return True if self.tlv_value is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList.LldpUnknownTlvEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:unknown-tlv-list' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_unknown_tlv_entry is not None: for child_ref in self.lldp_unknown_tlv_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.UnknownTlvList']['meta_info'] class OrgDefTlvList(object): """ Org Def TLV list .. attribute:: lldp_org_def_tlv_entry lldp org def tlv entry **type**\: list of :py:class:`LldpOrgDefTlvEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_org_def_tlv_entry = YList() self.lldp_org_def_tlv_entry.parent = self self.lldp_org_def_tlv_entry.name = 'lldp_org_def_tlv_entry' class LldpOrgDefTlvEntry(object): """ lldp org def tlv entry .. attribute:: oui Organizationally Unique Identifier **type**\: int **range:** 0..4294967295 .. attribute:: tlv_info_indes lldpRemOrgDefInfoIndex **type**\: int **range:** 0..4294967295 .. attribute:: tlv_subtype Org Def TLV subtype **type**\: int **range:** 0..255 .. attribute:: tlv_value Org Def TLV payload **type**\: str **pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.oui = None self.tlv_info_indes = None self.tlv_subtype = None self.tlv_value = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-org-def-tlv-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.oui is not None: return True if self.tlv_info_indes is not None: return True if self.tlv_subtype is not None: return True if self.tlv_value is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList.LldpOrgDefTlvEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:org-def-tlv-list' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_org_def_tlv_entry is not None: for child_ref in self.lldp_org_def_tlv_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib.OrgDefTlvList']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:mib' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.chassis_id_len is not None: return True if self.chassis_id_sub_type is not None: return True if self.combined_capabilities is not None: return True if self.org_def_tlv_list is not None and self.org_def_tlv_list._has_data(): return True if self.port_id_len is not None: return True if self.port_id_sub_type is not None: return True if self.rem_index is not None: return True if self.rem_local_port_num is not None: return True if self.rem_time_mark is not None: return True if self.unknown_tlv_list is not None and self.unknown_tlv_list._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor.Mib']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-neighbor' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.chassis_id is not None: return True if self.detail is not None and self.detail._has_data(): return True if self.device_id is not None: return True if self.enabled_capabilities is not None: return True if self.header_version is not None: return True if self.hold_time is not None: return True if self.mib is not None and self.mib._has_data(): return True if self.platform is not None: return True if self.port_id_detail is not None: return True if self.receiving_interface_name is not None: return True if self.receiving_parent_interface_name is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary.LldpNeighbor']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:summary' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.device_id is not None: return True if self.interface_name is not None: return True if self.lldp_neighbor is not None: for child_ref in self.lldp_neighbor: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries.Summary']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:summaries' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.summary is not None: for child_ref in self.summary: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors.Summaries']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:neighbors' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.details is not None and self.details._has_data(): return True if self.devices is not None and self.devices._has_data(): return True if self.summaries is not None and self.summaries._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Neighbors']['meta_info'] class Interfaces(object): """ The table of interfaces on which LLDP is running on this node .. attribute:: interface Operational data for an interface on which LLDP is running **type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.interface = YList() self.interface.parent = self self.interface.name = 'interface' class Interface(object): """ Operational data for an interface on which LLDP is running .. attribute:: interface_name <key> The interface name **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: if_index ifIndex **type**\: int **range:** 0..4294967295 .. attribute:: interface_name_xr Interface **type**\: str **pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm) .. attribute:: local_network_addresses Local Management Addresses **type**\: :py:class:`LocalNetworkAddresses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses>` .. attribute:: port_description Port Description **type**\: str .. attribute:: port_id Outgoing port identifier **type**\: str .. attribute:: port_id_sub_type Port ID sub type **type**\: int **range:** 0..255 .. attribute:: rx_enabled RX Enabled **type**\: int **range:** 0..255 .. attribute:: rx_state RX State **type**\: str .. attribute:: tx_enabled TX Enabled **type**\: int **range:** 0..255 .. attribute:: tx_state TX State **type**\: str """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.interface_name = None self.if_index = None self.interface_name_xr = None self.local_network_addresses = Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses() self.local_network_addresses.parent = self self.port_description = None self.port_id = None self.port_id_sub_type = None self.rx_enabled = None self.rx_state = None self.tx_enabled = None self.tx_state = None class LocalNetworkAddresses(object): """ Local Management Addresses .. attribute:: lldp_addr_entry lldp addr entry **type**\: list of :py:class:`LldpAddrEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry>` """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.lldp_addr_entry = YList() self.lldp_addr_entry.parent = self self.lldp_addr_entry.name = 'lldp_addr_entry' class LldpAddrEntry(object): """ lldp addr entry .. attribute:: address Network layer address **type**\: :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address>` .. attribute:: if_num Interface num **type**\: int **range:** 0..4294967295 .. attribute:: ma_subtype MA sub type **type**\: int **range:** 0..255 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address = Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address() self.address.parent = self self.if_num = None self.ma_subtype = None class Address(object): """ Network layer address .. attribute:: address_type AddressType **type**\: :py:class:`LldpL3AddrProtocolEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_lldp_oper.LldpL3AddrProtocolEnum>` .. attribute:: ipv4_address IPv4 address **type**\: str **pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)? .. attribute:: ipv6_address IPv6 address **type**\: str **pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)? """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.address_type = None self.ipv4_address = None self.ipv6_address = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:address' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address_type is not None: return True if self.ipv4_address is not None: return True if self.ipv6_address is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry.Address']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:lldp-addr-entry' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.address is not None and self.address._has_data(): return True if self.if_num is not None: return True if self.ma_subtype is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses.LldpAddrEntry']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:local-network-addresses' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.lldp_addr_entry is not None: for child_ref in self.lldp_addr_entry: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Interfaces.Interface.LocalNetworkAddresses']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') if self.interface_name is None: raise YPYModelError('Key property interface_name is None') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:interface[Cisco-IOS-XR-ethernet-lldp-oper:interface-name = ' + str(self.interface_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.interface_name is not None: return True if self.if_index is not None: return True if self.interface_name_xr is not None: return True if self.local_network_addresses is not None and self.local_network_addresses._has_data(): return True if self.port_description is not None: return True if self.port_id is not None: return True if self.port_id_sub_type is not None: return True if self.rx_enabled is not None: return True if self.rx_state is not None: return True if self.tx_enabled is not None: return True if self.tx_state is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Interfaces.Interface']['meta_info'] @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:interfaces' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.interface is not None: for child_ref in self.interface: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Interfaces']['meta_info'] class Statistics(object): """ The LLDP traffic statistics for this node .. attribute:: aged_out_entries Aged out entries **type**\: int **range:** 0..4294967295 .. attribute:: bad_packets Bad packet received and dropped **type**\: int **range:** 0..4294967295 .. attribute:: discarded_packets Discarded packets **type**\: int **range:** 0..4294967295 .. attribute:: discarded_tl_vs Discarded TLVs **type**\: int **range:** 0..4294967295 .. attribute:: encapsulation_errors Transmission errors **type**\: int **range:** 0..4294967295 .. attribute:: out_of_memory_errors Out\-of\-memory conditions **type**\: int **range:** 0..4294967295 .. attribute:: queue_overflow_errors Queue overflows **type**\: int **range:** 0..4294967295 .. attribute:: received_packets Received packets **type**\: int **range:** 0..4294967295 .. attribute:: table_overflow_errors Table overflows **type**\: int **range:** 0..4294967295 .. attribute:: transmitted_packets Transmitted packets **type**\: int **range:** 0..4294967295 .. attribute:: unrecognized_tl_vs Unrecognized TLVs **type**\: int **range:** 0..4294967295 """ _prefix = 'ethernet-lldp-oper' _revision = '2015-11-09' def __init__(self): self.parent = None self.aged_out_entries = None self.bad_packets = None self.discarded_packets = None self.discarded_tl_vs = None self.encapsulation_errors = None self.out_of_memory_errors = None self.queue_overflow_errors = None self.received_packets = None self.table_overflow_errors = None self.transmitted_packets = None self.unrecognized_tl_vs = None @property def _common_path(self): if self.parent is None: raise YPYModelError('parent is not set . Cannot derive path.') return self.parent._common_path +'/Cisco-IOS-XR-ethernet-lldp-oper:statistics' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.aged_out_entries is not None: return True if self.bad_packets is not None: return True if self.discarded_packets is not None: return True if self.discarded_tl_vs is not None: return True if self.encapsulation_errors is not None: return True if self.out_of_memory_errors is not None: return True if self.queue_overflow_errors is not None: return True if self.received_packets is not None: return True if self.table_overflow_errors is not None: return True if self.transmitted_packets is not None: return True if self.unrecognized_tl_vs is not None: return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node.Statistics']['meta_info'] @property def _common_path(self): if self.node_name is None: raise YPYModelError('Key property node_name is None') return '/Cisco-IOS-XR-ethernet-lldp-oper:lldp/Cisco-IOS-XR-ethernet-lldp-oper:nodes/Cisco-IOS-XR-ethernet-lldp-oper:node[Cisco-IOS-XR-ethernet-lldp-oper:node-name = ' + str(self.node_name) + ']' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.node_name is not None: return True if self.interfaces is not None and self.interfaces._has_data(): return True if self.neighbors is not None and self.neighbors._has_data(): return True if self.statistics is not None and self.statistics._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes.Node']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ethernet-lldp-oper:lldp/Cisco-IOS-XR-ethernet-lldp-oper:nodes' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.node is not None: for child_ref in self.node: if child_ref._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp.Nodes']['meta_info'] @property def _common_path(self): return '/Cisco-IOS-XR-ethernet-lldp-oper:lldp' def is_config(self): ''' Returns True if this instance represents config data else returns False ''' return False def _has_data(self): if not self.is_config(): return False if self.global_lldp is not None and self.global_lldp._has_data(): return True if self.nodes is not None and self.nodes._has_data(): return True return False @staticmethod def _meta_info(): from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ethernet_lldp_oper as meta return meta._meta_table['Lldp']['meta_info']
python
from flask import json from tests.test_case import * from app import constants from app.events.dao import users_dao as ud from app.events.dao import applications_dao as ad class AppsTestCase(TestCase): def setUp(self): super(AppsTestCase, self).setUp() Application.query.delete() db_session_commit() def tearDown(self): super(AppsTestCase, self).tearDown() Application.query.delete() db_session_commit() def test_app_methods(self): user1 = ud.get_user_by_email(constants.TEST_USER_EMAIL) test_app = ad.create_app("test1", user1.id)[1] self.assertEquals(test_app.id, ad.get_app_by_name("test1").id) self.assertEquals(test_app.secret_key, ad.get_app_by_name("test1").secret_key) previous_secret = test_app.secret_key new_secret = ad.reset_secret_key(test_app.id) self.assertNotEquals(previous_secret, new_secret) self.assertEquals(new_secret, test_app.secret_key) def test_app_is_owned_by_user(self): user1 = ud.get_user_by_email(constants.TEST_USER_EMAIL) test_app = ad.create_app("test2", user1.id)[1] self.assertEquals(ad.is_owned_by_user(test_app.id, user1.id), True)
python
#!/usr/bin/env python # -*-coding:utf-8-*- from os.path import expanduser home = expanduser("~") orig_content = file(home + "/github/luiti/luiti/README.markdown").read() layout_content = """ --- layout: default title: Home note: This file is auto generated by /tools/generate_document_guide_page.py, dont modify this file directly. --- """ appended = """ <script src="{{ "/javascripts/fix_luiti_index_document.js" | prepend: site.baseurl }}" type="text/javascript"></script> """ new_content = (layout_content + orig_content + appended).strip() new_file = file("document_guide.markdown", "w") new_file.write(new_content) new_file.close()
python
#In this problem we have to state the count of rotations a sorted array has #gone through. # For Ex: # 4 5 6 1 2 3 4 # The above array has gone through 3 rotations n=int(input("Enter the length of the array:\n")) arr=[] #taking input for i in range(0,n): print("Element",i+1) ele = int(input()) arr.append(ele) c=0 mini=1000000 #This loop will find out the index of the minimum element for ele in arr: if ele<mini: mini=ele min_in=c c=c+1 #The index of minimum elemt will give us the number of rotations print("Number of rotataions = ",min_in) # TEST CASES # # 1)INPUT: # Enter the length of the array: # 5 # 10 20 30 1 2 # OUTPUT: # Number of rotataions = 3 # # 2)INPUT: # Enter the length of the array: # 5 # 1 2 3 4 5 # OUTPUT: # Number of rotataions = 0 # # Time Complexity: O(n) # Space Complexity: O(n) Here n is the length of the array
python
from .Common import * from .chars import ( Header, InitStrFormat, InitStrFormatContainDummy, GoalStrFormat, InitActionStateUpdateFormat, InitActionTimesUpdateFormat, HandsPosition, EndPose, InitState, SpecialDomainHeadStr, SpecialFuncAndPreStr, SpecialActionStr, initAction, DummyAction, ActionTransition )
python
import logging import os from faucetconfrpc.faucetconfrpc_client_lib import FaucetConfRpcClient from poseidon_core.helpers.config import yaml_dump class EmptyFaucetConf(Exception): pass class FaucetRemoteConfGetSetter: DEFAULT_CONFIG_FILE = '' def __init__(self, client_key=None, client_cert=None, ca_cert=None, server_addr=None): self.client = FaucetConfRpcClient( client_key=client_key, client_cert=client_cert, ca_cert=ca_cert, server_addr=server_addr) @staticmethod def config_file_path(config_file): if config_file: return os.path.basename(config_file) return config_file def read_faucet_conf(self, config_file): self.faucet_conf = self.client.get_config_file( config_filename=self.config_file_path(config_file)) if self.faucet_conf is None: logging.error('Faucet config is empty, exiting.') raise EmptyFaucetConf return self.faucet_conf def write_faucet_conf(self, config_file=None, faucet_conf=None, merge=False): if not config_file: config_file = self.DEFAULT_CONFIG_FILE if faucet_conf is None: faucet_conf = self.faucet_conf return self.client.set_config_file( self.faucet_conf, config_filename=self.config_file_path(config_file), merge=merge) def get_dps(self): self.read_faucet_conf(config_file=None) return self.faucet_conf.get('dps', {}) def set_acls(self, acls): self.read_faucet_conf(config_file=None) self.faucet_conf['acls'] = acls self.write_faucet_conf(config_file=None) def get_port_conf(self, dp, port): switch_conf = self.get_switch_conf(dp) if not switch_conf: return None return switch_conf['interfaces'].get(port, None) def get_switch_conf(self, dp): return self.get_dps().get(dp, None) def get_stack_root_switch(self): root_stack_switch = [ switch for switch, switch_conf in self.get_dps().items() if switch_conf.get('stack', {}).get('priority', None)] if root_stack_switch: return root_stack_switch[0] return None def set_port_conf(self, dp, port, port_conf): return self.client.set_dp_interfaces( [(dp, {port: yaml_dump(port_conf)})]) def update_switch_conf(self, dp, switch_conf): return self.write_faucet_conf( faucet_conf={'dps': {dp: switch_conf}}, merge=True) def mirror_port(self, dp, mirror_port, port): # pragma: no cover self.client.add_port_mirror(dp, port, mirror_port) def unmirror_port(self, dp, mirror_port, port): # pragma: no cover self.client.remove_port_mirror(dp, port, mirror_port) def clear_mirror_port(self, dp, mirror_port): # pragma: no cover self.client.clear_port_mirror(dp, mirror_port)
python
import json import logging import re import sys from pathlib import Path from typing import List, Optional import requests from slugify import slugify from kadenze_dl.models import Session, Video logger = logging.getLogger("utils") logger.addHandler(logging.StreamHandler(sys.stdout)) logger.setLevel(logging.INFO) filename_pattern = re.compile("file/(.*\.mp4)\?") def format_course(course: str) -> str: formatted_course = course.split("/")[-1] return f"{formatted_course}" def extract_filename(video_url: str) -> Optional[str]: try: filename = re.search(filename_pattern, video_url).group(1) except Exception: filename = None return filename def get_courses_from_json(response: str) -> List[str]: try: json_string = json.loads(response) courses = [course["course_path"] for course in json_string["courses"]] except ValueError: logger.info("Error getting the courses list. Check that you're enrolled on selected courses.") courses = [] return courses def get_sessions_from_json(response: str, course: str) -> List[Session]: sessions = [] try: d = json.loads(response) lectures = d["lectures"] for i, lecture in enumerate(lectures, start=1): try: session = Session(course, lecture["order"], slugify(lecture["title"]), lecture["course_session_path"]) sessions.append(session) except Exception as e: logger.exception(f"Error while extracting session metadata from course {course} at index {i}: {e}") except Exception as e: logger.exception(f"Error while extracting session metadata from course {course}: {e}") return sessions def get_videos_from_json(response: str, resolution: int, session: Session) -> List[Video]: videos = [] try: d = json.loads(response) video_format = f"h264_{resolution}_url" vs = d["videos"] for i, v in enumerate(vs, start=1): try: video = Video(session, v["order"], v["title"], v[video_format]) videos.append(video) except Exception as e: logger.exception(f"Error while extracting video metadata from session {session.name} at index {i}: {e}") except Exception as e: logger.exception(f"Error getting videos: {e}") return videos def get_video_title(video_title: str, filename: str) -> str: try: slug = slugify(video_title) video_title = "_".join(filename.split(".")[:-1]) + "p_" + slug + "." + filename.split(".")[-1] except IndexError: video_title = filename return video_title def write_video(video_url: str, full_path: str, filename: str, chunk_size: int = 4096): try: size = int(requests.head(video_url).headers["Content-Length"]) size_on_disk = check_if_file_exists(full_path, filename) if size_on_disk < size: fd = Path(full_path) fd.mkdir(parents=True, exist_ok=True) with open(fd / filename, "wb") as f: r = requests.get(video_url, stream=True) current_size = 0 for chunk in r.iter_content(chunk_size=chunk_size): f.write(chunk) current_size += chunk_size s = progress(current_size, size, filename) print(s, end="", flush=True) print(s) else: logger.info(f"{filename} already downloaded, skipping...") except Exception as e: logger.exception(f"Error while writing video to {full_path}/{filename}: {e}") def check_if_file_exists(full_path: str, filename: str) -> int: f = Path(full_path + "/" + filename) if f.exists(): return f.stat().st_size else: return 0 def progress(count, total, status=""): bar_len = 60 filled_len = int(round(bar_len * count / float(total))) percents = round(100.0 * count / float(total), 1) bar = "=" * filled_len + "-" * (bar_len - filled_len) s = "[%s] %s%s filename: %s\r" % (bar, percents, "%", status) return s
python
# -*- coding: utf-8 -*- # Generated by Django 1.11.29 on 2020-07-20 00:14 from __future__ import unicode_literals import datetime from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Place', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128)), ('cre_id', models.CharField(max_length=32)), ('place_id', models.IntegerField()), ('x', models.FloatField()), ('y', models.FloatField()), ], ), migrations.CreateModel( name='plotModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('state', models.CharField(choices=[('AG', 'Aguascalientes'), ('BC', 'Baja California'), ('BS', 'Baja California Sur'), ('CM', 'Campeche'), ('CS', 'Chiapas'), ('CH', 'Chihuahua'), ('DF', 'Ciudad de México'), ('CO', 'Coahuila'), ('CL', 'Colima'), ('DG', 'Durango'), ('GJ', 'Guanajuato'), ('GR', 'Guerrero'), ('HG', 'Hidalgo'), ('JA', 'Jalisco'), ('MX', 'Estado de México'), ('MI', 'Michoacán'), ('NA', 'Nayarit'), ('NL', 'Nuevo Leon'), ('OA', 'Oaxaca'), ('PU', 'Puebla'), ('QT', 'Querétaro'), ('QR', 'Quintana Roo'), ('SL', 'San Luis Potosí'), ('SI', 'Sinaloa'), ('SO', 'Sonora'), ('TB', 'Tabasco'), ('TM', 'Tamaulipas'), ('TL', 'Tlaxcala'), ('VE', 'Veracruz'), ('YU', 'Yucatan'), ('ZA', 'Zacatecas')], max_length=20)), ('initial_date', models.DateField(default=datetime.date.today, verbose_name='Fecha de inicio')), ('end_date', models.DateField(default=datetime.date.today, verbose_name='Fecha final')), ], ), migrations.CreateModel( name='Prices', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('prices_place_id', models.IntegerField()), ('regular', models.FloatField()), ('premium', models.FloatField()), ('diesel', models.FloatField()), ], ), ]
python
# """Test Classification Manager Module.""" # import pytest # from geniepy.errors import ClassifierError # import geniepy.datamgmt.daos as daos # import geniepy.datamgmt.repositories as dr # from geniepy.datamgmt.tables import PUBMED_PROPTY, CTD_PROPTY, CLSFR_PROPTY # from geniepy.datamgmt import DaoManager # from geniepy.datamgmt.parsers import ClassifierParser # from tests.resources.mock import MOCK_CLSFRMGR, TEST_CHUNKSIZE # import tests.resources.mock as mock # class TestClassMgr: # """PyTest Class to test Classification manager.""" # # Create and configure mock ctd dao # ctd_dao = daos.CtdDao(dr.SqlRepository("sqlite://", CTD_PROPTY)) # # pylint: disable=protected-access # ctd_dao._parser.scraper = mock.MockCtdScraper() # # Create and configure mock pubmed dao # pubmed_dao = daos.PubMedDao(dr.SqlRepository("sqlite://", PUBMED_PROPTY)) # # pylint: disable=protected-access # pubmed_dao._parser.scraper = mock.MockPubMedScraper() # # Create and configure mock pubmed dao # classifier_dao = daos.ClassifierDao(dr.SqlRepository("sqlite://", CLSFR_PROPTY)) # # pylint: disable=protected-access # # Construct mock dao manager for testing # dao_mgr = DaoManager( # ctd_dao=ctd_dao, pubmed_dao=pubmed_dao, classifier_dao=classifier_dao # ) # def test_constructor(self): # """Test obj construction.""" # assert MOCK_CLSFRMGR is not None # def test_predict_records(self): # """ # Test prediction of records. # Records are fed into the classifier to be predicted and classification manager # returns a dataframe containing the corresponding predictions. # """ # # Generate records to be fed into classifiers # self.dao_mgr.download(TEST_CHUNKSIZE) # gen_df = self.dao_mgr.gen_records(TEST_CHUNKSIZE) # raw_df = next(gen_df) # predicted_df = MOCK_CLSFRMGR.predict(raw_df) # # Make sure predicted all rows # expected_rows = raw_df.shape[0] # actual_rows = predicted_df.shape[0] # assert actual_rows == expected_rows # # Make sure predicted df is valid (should return no errors) # assert not ClassifierParser.validate(predicted_df) # # Make sure one prediction per classifier # cols = predicted_df.columns # # Make sure has a digest column # assert "digest" in cols # # Make sure has one prediction column per classifier # for classifier in MOCK_CLSFRMGR._classifiers: # assert classifier.name in cols # # TODO validate classifier predicted dataframe # def test_predict_invalid_records(self): # """Test attempting to predict with invalid records.""" # with pytest.raises(ClassifierError): # MOCK_CLSFRMGR.predict(None) # def test_predict_invalid_records(self): # """Test attempting to predict with invalid records.""" # with pytest.raises(ClassifierError): # MOCK_CLSFRMGR.predict(None)
python
from flask import render_template from app import app from .request import get_sources,get_news # from .models import Source,Article # from .request import get_news @app.route('/') def index(): ''' View root page function that returns the index page and its data ''' # Getting popular news title = 'News Highlight' general_sources = get_sources('general') business_sources = get_sources('business') sports_sources = get_sources('sports') technology_sources = get_sources('technology') return render_template('index.html', title = title, general = general_sources, business = business_sources, sports = sports_sources, technology = technology_sources) @app.route('/news/<id>') def news(id): '''View a specific source page and its news''' news = get_news(id) title = f'{id}' return render_template('news.html',id = id, news = news)
python
"""Run a system command in its own working directory.""" # ============================================================================= # CONTENTS # ----------------------------------------------------------------------------- # phlsys_workingdircommand # # Public Classes: # CommandWithWorkingDirectory # # ----------------------------------------------------------------------------- # (this contents block is generated, edits will be lost) # ============================================================================= from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import phlsys_subprocess class CommandWithWorkingDirectory(object): def __init__(self, command_path, working_dir_path): self._working_dir_path = os.path.abspath(working_dir_path) self._command_path = os.path.abspath(command_path) def __call__(self, *args, **kwargs): stdin = kwargs.pop("stdin", None) assert not kwargs result = phlsys_subprocess.run( self._command_path, *args, stdin=stdin, workingDir=self._working_dir_path) return result.stdout # ----------------------------------------------------------------------------- # Copyright (C) 2015 Bloomberg Finance L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------ END-OF-FILE ----------------------------------
python
"""Retrieve the path of the parent module to dynamically build the name of FastAPI app.""" import pathlib parent_module = pathlib.Path(__file__).parent.name
python
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import dials_data import dials_data.datasets import dials_data.download import mock def test_all_datasets_can_be_parsed(): assert dials_data.datasets.definition def test_repository_location(): rl = dials_data.datasets.repository_location() assert rl.check(dir=1) def test_fetching_undefined_datasets_does_not_crash(): df = dials_data.download.DataFetcher(read_only=True) assert df("aardvark") is False def test_requests_for_future_datasets_can_be_intercepted(): df = dials_data.download.DataFetcher(read_only=True) df.result_filter = mock.Mock() df.result_filter.return_value = False assert df("aardvark") is False df.result_filter.assert_called_once_with(result=False)
python
# Generated from IEC61131Parser.g4 by ANTLR 4.9.1 from antlr4 import * from io import StringIO from typing.io import TextIO import sys def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\u0118") buf.write("\u0a1a\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64") buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t") buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t") buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t") buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4") buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4") buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4") buf.write("p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4") buf.write("y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080") buf.write("\t\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083") buf.write("\4\u0084\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087") buf.write("\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a") buf.write("\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e") buf.write("\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091") buf.write("\4\u0092\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095") buf.write("\t\u0095\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098") buf.write("\4\u0099\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c") buf.write("\t\u009c\4\u009d\t\u009d\4\u009e\t\u009e\4\u009f\t\u009f") buf.write("\4\u00a0\t\u00a0\4\u00a1\t\u00a1\4\u00a2\t\u00a2\4\u00a3") buf.write("\t\u00a3\4\u00a4\t\u00a4\4\u00a5\t\u00a5\4\u00a6\t\u00a6") buf.write("\4\u00a7\t\u00a7\4\u00a8\t\u00a8\4\u00a9\t\u00a9\4\u00aa") buf.write("\t\u00aa\4\u00ab\t\u00ab\4\u00ac\t\u00ac\4\u00ad\t\u00ad") buf.write("\4\u00ae\t\u00ae\4\u00af\t\u00af\4\u00b0\t\u00b0\4\u00b1") buf.write("\t\u00b1\4\u00b2\t\u00b2\4\u00b3\t\u00b3\4\u00b4\t\u00b4") buf.write("\4\u00b5\t\u00b5\4\u00b6\t\u00b6\4\u00b7\t\u00b7\4\u00b8") buf.write("\t\u00b8\4\u00b9\t\u00b9\4\u00ba\t\u00ba\4\u00bb\t\u00bb") buf.write("\4\u00bc\t\u00bc\4\u00bd\t\u00bd\4\u00be\t\u00be\4\u00bf") buf.write("\t\u00bf\4\u00c0\t\u00c0\4\u00c1\t\u00c1\4\u00c2\t\u00c2") buf.write("\4\u00c3\t\u00c3\4\u00c4\t\u00c4\4\u00c5\t\u00c5\4\u00c6") buf.write("\t\u00c6\4\u00c7\t\u00c7\4\u00c8\t\u00c8\4\u00c9\t\u00c9") buf.write("\4\u00ca\t\u00ca\4\u00cb\t\u00cb\4\u00cc\t\u00cc\4\u00cd") buf.write("\t\u00cd\4\u00ce\t\u00ce\4\u00cf\t\u00cf\4\u00d0\t\u00d0") buf.write("\4\u00d1\t\u00d1\4\u00d2\t\u00d2\4\u00d3\t\u00d3\4\u00d4") buf.write("\t\u00d4\4\u00d5\t\u00d5\4\u00d6\t\u00d6\4\u00d7\t\u00d7") buf.write("\4\u00d8\t\u00d8\4\u00d9\t\u00d9\4\u00da\t\u00da\4\u00db") buf.write("\t\u00db\4\u00dc\t\u00dc\4\u00dd\t\u00dd\4\u00de\t\u00de") buf.write("\4\u00df\t\u00df\4\u00e0\t\u00e0\4\u00e1\t\u00e1\4\u00e2") buf.write("\t\u00e2\4\u00e3\t\u00e3\4\u00e4\t\u00e4\4\u00e5\t\u00e5") buf.write("\4\u00e6\t\u00e6\4\u00e7\t\u00e7\4\u00e8\t\u00e8\4\u00e9") buf.write("\t\u00e9\4\u00ea\t\u00ea\4\u00eb\t\u00eb\4\u00ec\t\u00ec") buf.write("\4\u00ed\t\u00ed\4\u00ee\t\u00ee\4\u00ef\t\u00ef\4\u00f0") buf.write("\t\u00f0\4\u00f1\t\u00f1\4\u00f2\t\u00f2\4\u00f3\t\u00f3") buf.write("\4\u00f4\t\u00f4\4\u00f5\t\u00f5\4\u00f6\t\u00f6\4\u00f7") buf.write("\t\u00f7\4\u00f8\t\u00f8\4\u00f9\t\u00f9\4\u00fa\t\u00fa") buf.write("\4\u00fb\t\u00fb\4\u00fc\t\u00fc\4\u00fd\t\u00fd\4\u00fe") buf.write("\t\u00fe\4\u00ff\t\u00ff\4\u0100\t\u0100\4\u0101\t\u0101") buf.write("\4\u0102\t\u0102\4\u0103\t\u0103\4\u0104\t\u0104\4\u0105") buf.write("\t\u0105\4\u0106\t\u0106\4\u0107\t\u0107\4\u0108\t\u0108") buf.write("\4\u0109\t\u0109\4\u010a\t\u010a\4\u010b\t\u010b\4\u010c") buf.write("\t\u010c\4\u010d\t\u010d\4\u010e\t\u010e\4\u010f\t\u010f") buf.write("\4\u0110\t\u0110\4\u0111\t\u0111\4\u0112\t\u0112\4\u0113") buf.write("\t\u0113\4\u0114\t\u0114\4\u0115\t\u0115\4\u0116\t\u0116") buf.write("\4\u0117\t\u0117\4\u0118\t\u0118\4\u0119\t\u0119\4\u011a") buf.write("\t\u011a\4\u011b\t\u011b\4\u011c\t\u011c\4\u011d\t\u011d") buf.write("\4\u011e\t\u011e\4\u011f\t\u011f\4\u0120\t\u0120\4\u0121") buf.write("\t\u0121\4\u0122\t\u0122\4\u0123\t\u0123\4\u0124\t\u0124") buf.write("\4\u0125\t\u0125\4\u0126\t\u0126\4\u0127\t\u0127\4\u0128") buf.write("\t\u0128\4\u0129\t\u0129\4\u012a\t\u012a\4\u012b\t\u012b") buf.write("\4\u012c\t\u012c\4\u012d\t\u012d\4\u012e\t\u012e\4\u012f") buf.write("\t\u012f\4\u0130\t\u0130\4\u0131\t\u0131\3\2\3\2\3\3\3") buf.write("\3\3\4\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\7\3\7\3\b\3\b\3\t") buf.write("\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16\3\16") buf.write("\3\17\3\17\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23") buf.write("\3\23\3\24\3\24\3\25\3\25\3\26\3\26\3\27\3\27\3\30\3\30") buf.write("\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3\35\3\36") buf.write("\3\36\3\37\3\37\3 \3 \3!\3!\3\"\3\"\3#\3#\3$\3$\3%\3%") buf.write("\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+\3,\3,\3-\3-\3.") buf.write("\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\63\3\63\3\64") buf.write("\3\64\3\65\3\65\3\65\3\65\3\65\7\65\u02d4\n\65\f\65\16") buf.write("\65\u02d7\13\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66") buf.write("\3\66\3\66\7\66\u02e3\n\66\f\66\16\66\u02e6\13\66\3\66") buf.write("\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\7\67\u02f1\n") buf.write("\67\f\67\16\67\u02f4\13\67\3\67\3\67\3\67\3\67\38\38\3") buf.write("8\38\38\38\38\38\38\38\38\38\38\38\38\39\39\39\39\39\3") buf.write("9\39\39\39\39\39\39\39\39\39\3:\3:\3:\3:\3:\3:\3:\3:\3") buf.write(":\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3;\3") buf.write(";\3;\3<\3<\3<\3<\3<\3<\3<\3<\3<\3<\3<\3<\3<\3<\3=\3=\3") buf.write("=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3") buf.write(">\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3?\3?\3?\3?\3") buf.write("?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3@\3@\3@\3@\3@\3@\3@\3@\3") buf.write("A\3A\3A\3A\3A\3A\3A\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3") buf.write("B\3B\3B\3B\3B\3B\3B\3B\3B\3C\3C\3C\3C\3C\3C\3C\3C\3C\3") buf.write("C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3D\3D\3D\3D\3D\3D\3D\3E\3") buf.write("E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3") buf.write("F\3F\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3G\3G\3G\3G\3G\3G\3") buf.write("G\3G\3H\3H\3H\3H\3H\3H\3H\3H\3H\3H\3H\3H\3H\3I\3I\3I\3") buf.write("I\3I\3I\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3J\3J\3J\3J\3") buf.write("J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3K\3K\3K\3K\3K\3K\3K\3K\3") buf.write("L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3M\3M\3M\3M\3M\3") buf.write("M\3M\3M\3M\3M\3M\3M\3N\3N\3N\3N\3N\3N\3N\3N\3N\3N\3N\3") buf.write("N\3O\3O\3O\3O\3O\3O\3O\3O\3O\3O\3O\3O\3P\3P\3P\3P\3P\3") buf.write("P\3P\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3") buf.write("Q\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3") buf.write("S\3S\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3T\3") buf.write("T\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3U\3") buf.write("U\3U\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3V\3") buf.write("V\3V\3W\3W\3W\3W\3W\3W\3W\3W\3W\3W\3W\3W\3W\3W\3W\3X\3") buf.write("X\3X\3X\5X\u04af\nX\3Y\3Y\3Y\3Y\3Y\3Y\3Y\3Y\3Y\3Y\3Y\3") buf.write("Y\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3Z\3") buf.write("[\3[\3[\3[\3[\3[\3[\3[\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3") buf.write("\\\3\\\3\\\5\\\u04e2\n\\\3]\3]\3]\3]\3]\3]\3]\3]\3]\3") buf.write("]\3]\3]\3]\3]\3^\3^\3^\3^\3^\3^\3^\3^\3^\3^\3^\3^\3^\3") buf.write("^\3_\3_\3_\3_\3_\3_\3_\3_\3_\3_\3_\3_\3_\3`\3`\3`\3`\3") buf.write("`\3`\3`\3`\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a\3a\3a\3a\3a\3") buf.write("a\3a\3a\3a\3b\3b\3b\3b\3b\3b\3b\3b\3b\3b\3b\3b\3b\3c\3") buf.write("c\3c\3c\3c\3c\3c\3c\3c\3c\3c\3c\3d\3d\3d\3d\3d\3d\3d\3") buf.write("d\3d\3d\3d\3d\3e\3e\3e\3e\3e\3e\3e\3e\3e\3e\3e\3f\3f\3") buf.write("f\3f\3f\3f\3f\3f\3f\3f\3f\3g\3g\3g\3g\3g\3g\3g\3g\3g\3") buf.write("g\3g\3h\3h\3h\3h\3h\3h\3h\3h\3h\3h\3h\3i\3i\3i\3i\3i\3") buf.write("i\3i\3i\3i\3i\3i\3j\3j\3j\3j\3j\3j\3j\3j\3j\3j\3k\3k\3") buf.write("k\3k\3k\3k\3k\3k\3k\3k\3k\3l\3l\3l\3l\3l\3l\3l\3l\3l\3") buf.write("l\3l\3m\3m\3m\3m\3m\3m\3m\3m\3m\3m\3m\3n\3n\3n\3n\3n\3") buf.write("n\3n\3n\3n\3n\3n\3o\3o\3o\3o\3o\3o\3o\3o\3o\3o\3o\3p\3") buf.write("p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3q\3q\3q\3q\3q\3q\3q\3q\3") buf.write("q\3q\3q\3r\3r\3r\3r\3r\3r\3r\3r\3r\3r\3r\3s\3s\3s\3s\3") buf.write("s\3s\3s\3s\3s\3s\3t\3t\3t\3t\3t\3t\3t\3t\3t\3t\3u\3u\3") buf.write("u\3u\3u\3u\3u\3u\3u\3u\3v\3v\3v\3v\3v\3v\3v\3v\3v\3v\3") buf.write("w\3w\3w\3w\3w\3w\3w\3w\3w\3w\3x\3x\3x\3x\3x\3x\3x\3x\3") buf.write("x\3x\3y\3y\3y\3y\3y\3y\3y\3y\3y\3z\3z\3z\3z\3z\3z\3z\3") buf.write("z\3z\3{\3{\3{\3{\3{\3{\3{\3{\3{\3|\3|\3|\3|\3|\3|\3|\3") buf.write("|\3|\3}\3}\3}\3}\3}\3}\3}\3}\3}\3~\3~\3~\3~\3~\3~\3~\3") buf.write("~\3~\3\177\3\177\3\177\3\177\3\177\3\177\3\177\3\177\3") buf.write("\177\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080\3") buf.write("\u0080\3\u0080\3\u0080\3\u0081\3\u0081\3\u0081\3\u0081") buf.write("\3\u0081\3\u0081\3\u0081\3\u0081\3\u0081\3\u0082\3\u0082") buf.write("\3\u0082\3\u0082\3\u0082\3\u0082\3\u0082\3\u0082\3\u0082") buf.write("\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083") buf.write("\3\u0083\3\u0083\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084") buf.write("\3\u0084\3\u0084\3\u0084\3\u0084\3\u0085\3\u0085\3\u0085") buf.write("\3\u0085\3\u0085\3\u0085\3\u0085\3\u0085\3\u0085\3\u0086") buf.write("\3\u0086\3\u0086\3\u0086\3\u0086\3\u0086\3\u0086\3\u0086") buf.write("\3\u0086\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087") buf.write("\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088") buf.write("\3\u0088\3\u0088\3\u0088\3\u0089\3\u0089\3\u0089\6\u0089") buf.write("\u06b2\n\u0089\r\u0089\16\u0089\u06b3\3\u0089\3\u0089") buf.write("\3\u0089\3\u008a\3\u008a\3\u008a\3\u008a\3\u008a\3\u008a") buf.write("\3\u008a\3\u008a\3\u008b\3\u008b\3\u008b\3\u008b\3\u008b") buf.write("\3\u008b\3\u008b\3\u008b\3\u008c\3\u008c\3\u008c\3\u008c") buf.write("\3\u008c\3\u008c\3\u008c\3\u008c\3\u008d\3\u008d\3\u008d") buf.write("\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008e\3\u008e") buf.write("\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008f") buf.write("\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f") buf.write("\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090") buf.write("\3\u0091\3\u0091\3\u0091\3\u0091\3\u0091\3\u0091\3\u0091") buf.write("\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092") buf.write("\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093") buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094") buf.write("\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095") buf.write("\3\u0096\3\u0096\3\u0096\3\u0096\3\u0096\3\u0096\3\u0096") buf.write("\3\u0097\3\u0097\3\u0097\6\u0097\u071d\n\u0097\r\u0097") buf.write("\16\u0097\u071e\3\u0097\3\u0097\3\u0098\3\u0098\3\u0098") buf.write("\3\u0098\3\u0098\3\u0098\3\u0098\3\u0099\3\u0099\3\u0099") buf.write("\3\u0099\3\u0099\3\u0099\3\u0099\3\u009a\3\u009a\3\u009a") buf.write("\3\u009a\3\u009a\3\u009a\3\u009a\3\u009b\3\u009b\3\u009b") buf.write("\3\u009b\3\u009b\3\u009b\3\u009b\3\u009c\3\u009c\3\u009c") buf.write("\3\u009c\3\u009c\3\u009c\3\u009c\3\u009d\3\u009d\3\u009d") buf.write("\3\u009d\3\u009d\3\u009d\3\u009d\3\u009e\3\u009e\3\u009e") buf.write("\3\u009e\3\u009e\3\u009e\3\u009e\3\u009f\3\u009f\3\u009f") buf.write("\3\u009f\3\u009f\3\u009f\3\u009f\3\u00a0\3\u00a0\3\u00a0") buf.write("\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a1\3\u00a1\3\u00a1") buf.write("\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a2\3\u00a2\3\u00a2") buf.write("\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a3\3\u00a3\3\u00a3") buf.write("\3\u00a3\3\u00a3\3\u00a3\3\u00a4\3\u00a4\3\u00a4\3\u00a4") buf.write("\3\u00a4\3\u00a4\3\u00a5\3\u00a5\3\u00a5\3\u00a5\3\u00a5") buf.write("\3\u00a5\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6") buf.write("\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a8") buf.write("\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a9\3\u00a9") buf.write("\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00aa\3\u00aa\3\u00aa") buf.write("\3\u00aa\3\u00aa\3\u00aa\3\u00ab\3\u00ab\3\u00ab\3\u00ab") buf.write("\3\u00ab\3\u00ab\3\u00ac\3\u00ac\3\u00ac\3\u00ac\3\u00ac") buf.write("\3\u00ac\3\u00ad\3\u00ad\3\u00ad\3\u00ad\3\u00ad\3\u00ad") buf.write("\3\u00ae\3\u00ae\3\u00ae\3\u00ae\3\u00ae\3\u00ae\3\u00af") buf.write("\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af\3\u00b0\3\u00b0") buf.write("\3\u00b0\3\u00b0\3\u00b0\3\u00b0\3\u00b1\3\u00b1\3\u00b1") buf.write("\3\u00b1\3\u00b1\3\u00b1\3\u00b2\3\u00b2\3\u00b2\3\u00b2") buf.write("\3\u00b2\3\u00b2\3\u00b3\3\u00b3\3\u00b3\3\u00b3\3\u00b3") buf.write("\3\u00b3\3\u00b4\3\u00b4\3\u00b4\3\u00b4\3\u00b4\3\u00b4") buf.write("\3\u00b5\3\u00b5\3\u00b5\3\u00b5\3\u00b5\3\u00b5\3\u00b6") buf.write("\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b7\3\u00b7") buf.write("\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b8\3\u00b8\3\u00b8") buf.write("\3\u00b8\3\u00b8\3\u00b8\3\u00b9\3\u00b9\3\u00b9\3\u00b9") buf.write("\3\u00b9\3\u00b9\3\u00ba\3\u00ba\3\u00ba\3\u00ba\3\u00ba") buf.write("\3\u00ba\3\u00bb\3\u00bb\3\u00bb\3\u00bb\3\u00bb\3\u00bb") buf.write("\3\u00bc\3\u00bc\3\u00bc\3\u00bc\3\u00bc\3\u00bd\3\u00bd") buf.write("\3\u00bd\3\u00bd\3\u00bd\3\u00be\3\u00be\3\u00be\3\u00be") buf.write("\3\u00be\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00c0") buf.write("\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c1\3\u00c1\3\u00c1") buf.write("\3\u00c1\3\u00c1\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2") buf.write("\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c4\3\u00c4") buf.write("\3\u00c4\3\u00c4\3\u00c4\3\u00c5\3\u00c5\3\u00c5\3\u00c5") buf.write("\3\u00c5\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c7") buf.write("\3\u00c7\3\u00c7\3\u00c7\3\u00c7\3\u00c8\3\u00c8\3\u00c8") buf.write("\3\u00c8\3\u00c8\3\u00c9\3\u00c9\3\u00c9\3\u00c9\3\u00c9") buf.write("\3\u00ca\3\u00ca\3\u00ca\3\u00ca\3\u00ca\3\u00cb\3\u00cb") buf.write("\3\u00cb\3\u00cb\3\u00cb\3\u00cc\3\u00cc\3\u00cc\3\u00cc") buf.write("\3\u00cc\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00ce") buf.write("\3\u00ce\3\u00ce\3\u00ce\3\u00ce\3\u00cf\3\u00cf\3\u00cf") buf.write("\3\u00cf\3\u00cf\3\u00d0\3\u00d0\3\u00d0\3\u00d0\3\u00d0") buf.write("\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d2\3\u00d2") buf.write("\3\u00d2\3\u00d2\3\u00d2\3\u00d3\3\u00d3\3\u00d3\3\u00d3") buf.write("\3\u00d3\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d5") buf.write("\3\u00d5\3\u00d5\3\u00d5\3\u00d5\3\u00d6\3\u00d6\3\u00d6") buf.write("\3\u00d6\3\u00d6\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d7") buf.write("\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d9\3\u00d9") buf.write("\3\u00d9\3\u00d9\3\u00d9\3\u00da\3\u00da\3\u00da\3\u00da") buf.write("\3\u00da\3\u00db\3\u00db\3\u00db\3\u00db\3\u00db\3\u00dc") buf.write("\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dd\3\u00dd\3\u00dd") buf.write("\3\u00dd\3\u00dd\3\u00de\3\u00de\3\u00de\3\u00de\3\u00de") buf.write("\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df\3\u00e0\3\u00e0") buf.write("\3\u00e0\3\u00e0\3\u00e0\3\u00e1\3\u00e1\3\u00e1\3\u00e1") buf.write("\3\u00e1\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e3\3\u00e3") buf.write("\3\u00e3\3\u00e3\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e5") buf.write("\3\u00e5\3\u00e5\3\u00e5\3\u00e6\3\u00e6\3\u00e6\3\u00e6") buf.write("\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e8\3\u00e8\3\u00e8") buf.write("\3\u00e8\3\u00e9\3\u00e9\3\u00e9\3\u00e9\3\u00ea\3\u00ea") buf.write("\3\u00ea\3\u00ea\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00ec") buf.write("\3\u00ec\3\u00ec\3\u00ec\3\u00ed\3\u00ed\3\u00ed\3\u00ed") buf.write("\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ef\3\u00ef\3\u00ef") buf.write("\3\u00ef\3\u00f0\3\u00f0\3\u00f0\3\u00f0\3\u00f1\3\u00f1") buf.write("\3\u00f1\3\u00f1\3\u00f2\3\u00f2\3\u00f2\3\u00f2\3\u00f3") buf.write("\3\u00f3\3\u00f3\3\u00f3\3\u00f4\3\u00f4\3\u00f4\3\u00f4") buf.write("\3\u00f5\3\u00f5\3\u00f5\3\u00f5\3\u00f6\3\u00f6\3\u00f6") buf.write("\3\u00f6\3\u00f7\3\u00f7\3\u00f7\3\u00f7\3\u00f8\3\u00f8") buf.write("\3\u00f8\3\u00f8\3\u00f9\3\u00f9\3\u00f9\3\u00f9\3\u00fa") buf.write("\3\u00fa\3\u00fa\3\u00fa\3\u00fb\3\u00fb\3\u00fb\3\u00fb") buf.write("\3\u00fc\3\u00fc\3\u00fc\3\u00fc\3\u00fd\3\u00fd\3\u00fd") buf.write("\3\u00fd\3\u00fe\3\u00fe\3\u00fe\3\u00fe\3\u00ff\3\u00ff") buf.write("\3\u00ff\3\u00ff\3\u0100\3\u0100\3\u0100\3\u0100\3\u0101") buf.write("\3\u0101\3\u0101\3\u0101\3\u0102\3\u0102\3\u0102\3\u0102") buf.write("\3\u0103\3\u0103\3\u0103\3\u0103\3\u0104\3\u0104\3\u0104") buf.write("\3\u0104\3\u0105\3\u0105\3\u0105\3\u0105\3\u0106\3\u0106") buf.write("\3\u0106\3\u0106\3\u0107\3\u0107\3\u0107\3\u0107\3\u0108") buf.write("\3\u0108\3\u0108\3\u0108\3\u0109\3\u0109\3\u0109\3\u0109") buf.write("\3\u010a\3\u010a\3\u010a\3\u010a\3\u010b\3\u010b\3\u010b") buf.write("\3\u010c\3\u010c\3\u010c\3\u010d\3\u010d\3\u010d\3\u010e") buf.write("\3\u010e\3\u010e\3\u010f\3\u010f\3\u010f\3\u0110\3\u0110") buf.write("\3\u0110\3\u0111\3\u0111\3\u0111\3\u0112\3\u0112\3\u0112") buf.write("\3\u0113\3\u0113\3\u0113\3\u0114\3\u0114\3\u0114\3\u0115") buf.write("\3\u0115\3\u0115\3\u0116\3\u0116\3\u0116\3\u0117\3\u0117") buf.write("\3\u0117\3\u0118\3\u0118\3\u0118\3\u0119\3\u0119\3\u0119") buf.write("\3\u011a\3\u011a\3\u011a\3\u011b\3\u011b\3\u011b\3\u011c") buf.write("\3\u011c\3\u011c\3\u011d\3\u011d\3\u011d\3\u011e\3\u011e") buf.write("\3\u011e\3\u011f\3\u011f\3\u011f\3\u0120\3\u0120\3\u0120") buf.write("\3\u0121\3\u0121\3\u0121\3\u0122\3\u0122\3\u0122\3\u0123") buf.write("\3\u0123\3\u0123\3\u0124\3\u0124\3\u0124\3\u0125\3\u0125") buf.write("\3\u0125\3\u0126\3\u0126\3\u0126\3\u0127\3\u0127\3\u0127") buf.write("\3\u0128\3\u0128\3\u0128\5\u0128\u09c2\n\u0128\3\u0128") buf.write("\5\u0128\u09c5\n\u0128\3\u0128\6\u0128\u09c8\n\u0128\r") buf.write("\u0128\16\u0128\u09c9\3\u0128\3\u0128\6\u0128\u09ce\n") buf.write("\u0128\r\u0128\16\u0128\u09cf\7\u0128\u09d2\n\u0128\f") buf.write("\u0128\16\u0128\u09d5\13\u0128\3\u0129\3\u0129\7\u0129") buf.write("\u09d9\n\u0129\f\u0129\16\u0129\u09dc\13\u0129\3\u012a") buf.write("\3\u012a\3\u012b\6\u012b\u09e1\n\u012b\r\u012b\16\u012b") buf.write("\u09e2\3\u012c\3\u012c\3\u012c\3\u012c\5\u012c\u09e9\n") buf.write("\u012c\3\u012c\6\u012c\u09ec\n\u012c\r\u012c\16\u012c") buf.write("\u09ed\3\u012d\3\u012d\3\u012d\3\u012d\5\u012d\u09f4\n") buf.write("\u012d\3\u012d\6\u012d\u09f7\n\u012d\r\u012d\16\u012d") buf.write("\u09f8\3\u012e\3\u012e\3\u012e\3\u012e\3\u012e\5\u012e") buf.write("\u0a00\n\u012e\3\u012e\6\u012e\u0a03\n\u012e\r\u012e\16") buf.write("\u012e\u0a04\3\u012f\6\u012f\u0a08\n\u012f\r\u012f\16") buf.write("\u012f\u0a09\3\u012f\3\u012f\3\u0130\3\u0130\7\u0130\u0a10") buf.write("\n\u0130\f\u0130\16\u0130\u0a13\13\u0130\3\u0130\3\u0130") buf.write("\3\u0130\3\u0130\3\u0131\3\u0131\6\u02d5\u02e4\u02f2\u0a11") buf.write("\2\u0132\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f") buf.write("\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27") buf.write("-\30/\31\61\32\63\33\65\2\67\29\2;\2=\2?\2A\2C\2E\2G\2") buf.write("I\2K\2M\2O\2Q\2S\2U\2W\2Y\2[\2]\2_\2a\2c\2e\2g\2i\34k") buf.write("\35m\36o\37q s!u\"w#y${%}&\177\'\u0081(\u0083)\u0085*") buf.write("\u0087+\u0089,\u008b-\u008d.\u008f/\u0091\60\u0093\61") buf.write("\u0095\62\u0097\63\u0099\64\u009b\65\u009d\66\u009f\67") buf.write("\u00a18\u00a39\u00a5:\u00a7;\u00a9<\u00ab=\u00ad>\u00af") buf.write("?\u00b1@\u00b3A\u00b5B\u00b7C\u00b9D\u00bbE\u00bdF\u00bf") buf.write("G\u00c1H\u00c3I\u00c5J\u00c7K\u00c9L\u00cbM\u00cdN\u00cf") buf.write("O\u00d1P\u00d3Q\u00d5R\u00d7S\u00d9T\u00dbU\u00ddV\u00df") buf.write("W\u00e1X\u00e3Y\u00e5Z\u00e7[\u00e9\\\u00eb]\u00ed^\u00ef") buf.write("_\u00f1`\u00f3a\u00f5b\u00f7c\u00f9d\u00fbe\u00fdf\u00ff") buf.write("g\u0101h\u0103i\u0105j\u0107k\u0109l\u010bm\u010dn\u010f") buf.write("o\u0111p\u0113q\u0115r\u0117s\u0119t\u011bu\u011dv\u011f") buf.write("w\u0121x\u0123y\u0125z\u0127{\u0129|\u012b}\u012d~\u012f") buf.write("\177\u0131\u0080\u0133\u0081\u0135\u0082\u0137\u0083\u0139") buf.write("\u0084\u013b\u0085\u013d\u0086\u013f\u0087\u0141\u0088") buf.write("\u0143\u0089\u0145\u008a\u0147\u008b\u0149\u008c\u014b") buf.write("\u008d\u014d\u008e\u014f\u008f\u0151\u0090\u0153\u0091") buf.write("\u0155\u0092\u0157\u0093\u0159\u0094\u015b\u0095\u015d") buf.write("\u0096\u015f\u0097\u0161\u0098\u0163\u0099\u0165\u009a") buf.write("\u0167\u009b\u0169\u009c\u016b\u009d\u016d\u009e\u016f") buf.write("\u009f\u0171\u00a0\u0173\u00a1\u0175\u00a2\u0177\u00a3") buf.write("\u0179\u00a4\u017b\u00a5\u017d\u00a6\u017f\u00a7\u0181") buf.write("\u00a8\u0183\u00a9\u0185\u00aa\u0187\u00ab\u0189\u00ac") buf.write("\u018b\u00ad\u018d\u00ae\u018f\u00af\u0191\u00b0\u0193") buf.write("\u00b1\u0195\u00b2\u0197\u00b3\u0199\u00b4\u019b\u00b5") buf.write("\u019d\u00b6\u019f\u00b7\u01a1\u00b8\u01a3\u00b9\u01a5") buf.write("\u00ba\u01a7\u00bb\u01a9\u00bc\u01ab\u00bd\u01ad\u00be") buf.write("\u01af\u00bf\u01b1\u00c0\u01b3\u00c1\u01b5\u00c2\u01b7") buf.write("\u00c3\u01b9\u00c4\u01bb\u00c5\u01bd\u00c6\u01bf\u00c7") buf.write("\u01c1\u00c8\u01c3\u00c9\u01c5\u00ca\u01c7\u00cb\u01c9") buf.write("\u00cc\u01cb\u00cd\u01cd\u00ce\u01cf\u00cf\u01d1\u00d0") buf.write("\u01d3\u00d1\u01d5\u00d2\u01d7\u00d3\u01d9\u00d4\u01db") buf.write("\u00d5\u01dd\u00d6\u01df\u00d7\u01e1\u00d8\u01e3\u00d9") buf.write("\u01e5\u00da\u01e7\u00db\u01e9\u00dc\u01eb\u00dd\u01ed") buf.write("\u00de\u01ef\u00df\u01f1\u00e0\u01f3\u00e1\u01f5\u00e2") buf.write("\u01f7\u00e3\u01f9\u00e4\u01fb\u00e5\u01fd\u00e6\u01ff") buf.write("\u00e7\u0201\u00e8\u0203\u00e9\u0205\u00ea\u0207\u00eb") buf.write("\u0209\u00ec\u020b\u00ed\u020d\u00ee\u020f\u00ef\u0211") buf.write("\u00f0\u0213\u00f1\u0215\u00f2\u0217\u00f3\u0219\u00f4") buf.write("\u021b\u00f5\u021d\u00f6\u021f\u00f7\u0221\u00f8\u0223") buf.write("\u00f9\u0225\u00fa\u0227\u00fb\u0229\u00fc\u022b\u00fd") buf.write("\u022d\u00fe\u022f\u00ff\u0231\u0100\u0233\u0101\u0235") buf.write("\u0102\u0237\u0103\u0239\u0104\u023b\u0105\u023d\u0106") buf.write("\u023f\u0107\u0241\u0108\u0243\u0109\u0245\u010a\u0247") buf.write("\u010b\u0249\u010c\u024b\u010d\u024d\u010e\u024f\u010f") buf.write("\u0251\u0110\u0253\u0111\u0255\u0112\u0257\u0113\u0259") buf.write("\u0114\u025b\u0115\u025d\u0116\u025f\u0117\u0261\u0118") buf.write("\3\2\'\4\2CCcc\4\2DDdd\4\2EEee\4\2FFff\4\2GGgg\4\2HHh") buf.write("h\4\2IIii\4\2JJjj\4\2KKkk\4\2LLll\4\2MMmm\4\2NNnn\4\2") buf.write("OOoo\4\2PPpp\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUuu\4") buf.write("\2VVvv\4\2WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\") buf.write("||\3\2\62;\3\2\'\'\5\2KKOOSU\6\2CCFFRRUU\7\2DDFFNNUUY") buf.write("Z\5\2C\\aac|\6\2\62;C\\aac|\3\2\62\63\3\2\629\5\2\62;") buf.write("CHch\5\2\13\f\17\17\"\"\2\u0a1b\2\3\3\2\2\2\2\5\3\2\2") buf.write("\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2") buf.write("\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27") buf.write("\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3") buf.write("\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2") buf.write(")\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2") buf.write("\2\2\63\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3") buf.write("\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y") buf.write("\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3") buf.write("\2\2\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2") buf.write("\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f") buf.write("\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2") buf.write("\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d") buf.write("\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2") buf.write("\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab") buf.write("\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2") buf.write("\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9") buf.write("\3\2\2\2\2\u00bb\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2") buf.write("\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7") buf.write("\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2") buf.write("\2\2\u00cf\3\2\2\2\2\u00d1\3\2\2\2\2\u00d3\3\2\2\2\2\u00d5") buf.write("\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\2\u00db\3\2\2") buf.write("\2\2\u00dd\3\2\2\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3") buf.write("\3\2\2\2\2\u00e5\3\2\2\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2") buf.write("\2\2\u00eb\3\2\2\2\2\u00ed\3\2\2\2\2\u00ef\3\2\2\2\2\u00f1") buf.write("\3\2\2\2\2\u00f3\3\2\2\2\2\u00f5\3\2\2\2\2\u00f7\3\2\2") buf.write("\2\2\u00f9\3\2\2\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2\2\2\u00ff") buf.write("\3\2\2\2\2\u0101\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2") buf.write("\2\2\u0107\3\2\2\2\2\u0109\3\2\2\2\2\u010b\3\2\2\2\2\u010d") buf.write("\3\2\2\2\2\u010f\3\2\2\2\2\u0111\3\2\2\2\2\u0113\3\2\2") buf.write("\2\2\u0115\3\2\2\2\2\u0117\3\2\2\2\2\u0119\3\2\2\2\2\u011b") buf.write("\3\2\2\2\2\u011d\3\2\2\2\2\u011f\3\2\2\2\2\u0121\3\2\2") buf.write("\2\2\u0123\3\2\2\2\2\u0125\3\2\2\2\2\u0127\3\2\2\2\2\u0129") buf.write("\3\2\2\2\2\u012b\3\2\2\2\2\u012d\3\2\2\2\2\u012f\3\2\2") buf.write("\2\2\u0131\3\2\2\2\2\u0133\3\2\2\2\2\u0135\3\2\2\2\2\u0137") buf.write("\3\2\2\2\2\u0139\3\2\2\2\2\u013b\3\2\2\2\2\u013d\3\2\2") buf.write("\2\2\u013f\3\2\2\2\2\u0141\3\2\2\2\2\u0143\3\2\2\2\2\u0145") buf.write("\3\2\2\2\2\u0147\3\2\2\2\2\u0149\3\2\2\2\2\u014b\3\2\2") buf.write("\2\2\u014d\3\2\2\2\2\u014f\3\2\2\2\2\u0151\3\2\2\2\2\u0153") buf.write("\3\2\2\2\2\u0155\3\2\2\2\2\u0157\3\2\2\2\2\u0159\3\2\2") buf.write("\2\2\u015b\3\2\2\2\2\u015d\3\2\2\2\2\u015f\3\2\2\2\2\u0161") buf.write("\3\2\2\2\2\u0163\3\2\2\2\2\u0165\3\2\2\2\2\u0167\3\2\2") buf.write("\2\2\u0169\3\2\2\2\2\u016b\3\2\2\2\2\u016d\3\2\2\2\2\u016f") buf.write("\3\2\2\2\2\u0171\3\2\2\2\2\u0173\3\2\2\2\2\u0175\3\2\2") buf.write("\2\2\u0177\3\2\2\2\2\u0179\3\2\2\2\2\u017b\3\2\2\2\2\u017d") buf.write("\3\2\2\2\2\u017f\3\2\2\2\2\u0181\3\2\2\2\2\u0183\3\2\2") buf.write("\2\2\u0185\3\2\2\2\2\u0187\3\2\2\2\2\u0189\3\2\2\2\2\u018b") buf.write("\3\2\2\2\2\u018d\3\2\2\2\2\u018f\3\2\2\2\2\u0191\3\2\2") buf.write("\2\2\u0193\3\2\2\2\2\u0195\3\2\2\2\2\u0197\3\2\2\2\2\u0199") buf.write("\3\2\2\2\2\u019b\3\2\2\2\2\u019d\3\2\2\2\2\u019f\3\2\2") buf.write("\2\2\u01a1\3\2\2\2\2\u01a3\3\2\2\2\2\u01a5\3\2\2\2\2\u01a7") buf.write("\3\2\2\2\2\u01a9\3\2\2\2\2\u01ab\3\2\2\2\2\u01ad\3\2\2") buf.write("\2\2\u01af\3\2\2\2\2\u01b1\3\2\2\2\2\u01b3\3\2\2\2\2\u01b5") buf.write("\3\2\2\2\2\u01b7\3\2\2\2\2\u01b9\3\2\2\2\2\u01bb\3\2\2") buf.write("\2\2\u01bd\3\2\2\2\2\u01bf\3\2\2\2\2\u01c1\3\2\2\2\2\u01c3") buf.write("\3\2\2\2\2\u01c5\3\2\2\2\2\u01c7\3\2\2\2\2\u01c9\3\2\2") buf.write("\2\2\u01cb\3\2\2\2\2\u01cd\3\2\2\2\2\u01cf\3\2\2\2\2\u01d1") buf.write("\3\2\2\2\2\u01d3\3\2\2\2\2\u01d5\3\2\2\2\2\u01d7\3\2\2") buf.write("\2\2\u01d9\3\2\2\2\2\u01db\3\2\2\2\2\u01dd\3\2\2\2\2\u01df") buf.write("\3\2\2\2\2\u01e1\3\2\2\2\2\u01e3\3\2\2\2\2\u01e5\3\2\2") buf.write("\2\2\u01e7\3\2\2\2\2\u01e9\3\2\2\2\2\u01eb\3\2\2\2\2\u01ed") buf.write("\3\2\2\2\2\u01ef\3\2\2\2\2\u01f1\3\2\2\2\2\u01f3\3\2\2") buf.write("\2\2\u01f5\3\2\2\2\2\u01f7\3\2\2\2\2\u01f9\3\2\2\2\2\u01fb") buf.write("\3\2\2\2\2\u01fd\3\2\2\2\2\u01ff\3\2\2\2\2\u0201\3\2\2") buf.write("\2\2\u0203\3\2\2\2\2\u0205\3\2\2\2\2\u0207\3\2\2\2\2\u0209") buf.write("\3\2\2\2\2\u020b\3\2\2\2\2\u020d\3\2\2\2\2\u020f\3\2\2") buf.write("\2\2\u0211\3\2\2\2\2\u0213\3\2\2\2\2\u0215\3\2\2\2\2\u0217") buf.write("\3\2\2\2\2\u0219\3\2\2\2\2\u021b\3\2\2\2\2\u021d\3\2\2") buf.write("\2\2\u021f\3\2\2\2\2\u0221\3\2\2\2\2\u0223\3\2\2\2\2\u0225") buf.write("\3\2\2\2\2\u0227\3\2\2\2\2\u0229\3\2\2\2\2\u022b\3\2\2") buf.write("\2\2\u022d\3\2\2\2\2\u022f\3\2\2\2\2\u0231\3\2\2\2\2\u0233") buf.write("\3\2\2\2\2\u0235\3\2\2\2\2\u0237\3\2\2\2\2\u0239\3\2\2") buf.write("\2\2\u023b\3\2\2\2\2\u023d\3\2\2\2\2\u023f\3\2\2\2\2\u0241") buf.write("\3\2\2\2\2\u0243\3\2\2\2\2\u0245\3\2\2\2\2\u0247\3\2\2") buf.write("\2\2\u0249\3\2\2\2\2\u024b\3\2\2\2\2\u024d\3\2\2\2\2\u024f") buf.write("\3\2\2\2\2\u0251\3\2\2\2\2\u0253\3\2\2\2\2\u0255\3\2\2") buf.write("\2\2\u0257\3\2\2\2\2\u0259\3\2\2\2\2\u025b\3\2\2\2\2\u025d") buf.write("\3\2\2\2\2\u025f\3\2\2\2\2\u0261\3\2\2\2\3\u0263\3\2\2") buf.write("\2\5\u0265\3\2\2\2\7\u0267\3\2\2\2\t\u026a\3\2\2\2\13") buf.write("\u026c\3\2\2\2\r\u026f\3\2\2\2\17\u0271\3\2\2\2\21\u0273") buf.write("\3\2\2\2\23\u0275\3\2\2\2\25\u0277\3\2\2\2\27\u0279\3") buf.write("\2\2\2\31\u027b\3\2\2\2\33\u027d\3\2\2\2\35\u0280\3\2") buf.write("\2\2\37\u0283\3\2\2\2!\u0285\3\2\2\2#\u0287\3\2\2\2%\u0289") buf.write("\3\2\2\2\'\u028c\3\2\2\2)\u028e\3\2\2\2+\u0290\3\2\2\2") buf.write("-\u0292\3\2\2\2/\u0294\3\2\2\2\61\u0296\3\2\2\2\63\u0298") buf.write("\3\2\2\2\65\u029a\3\2\2\2\67\u029c\3\2\2\29\u029e\3\2") buf.write("\2\2;\u02a0\3\2\2\2=\u02a2\3\2\2\2?\u02a4\3\2\2\2A\u02a6") buf.write("\3\2\2\2C\u02a8\3\2\2\2E\u02aa\3\2\2\2G\u02ac\3\2\2\2") buf.write("I\u02ae\3\2\2\2K\u02b0\3\2\2\2M\u02b2\3\2\2\2O\u02b4\3") buf.write("\2\2\2Q\u02b6\3\2\2\2S\u02b8\3\2\2\2U\u02ba\3\2\2\2W\u02bc") buf.write("\3\2\2\2Y\u02be\3\2\2\2[\u02c0\3\2\2\2]\u02c2\3\2\2\2") buf.write("_\u02c4\3\2\2\2a\u02c6\3\2\2\2c\u02c8\3\2\2\2e\u02ca\3") buf.write("\2\2\2g\u02cc\3\2\2\2i\u02ce\3\2\2\2k\u02dd\3\2\2\2m\u02ec") buf.write("\3\2\2\2o\u02f9\3\2\2\2q\u0308\3\2\2\2s\u0317\3\2\2\2") buf.write("u\u0325\3\2\2\2w\u0333\3\2\2\2y\u0341\3\2\2\2{\u034f\3") buf.write("\2\2\2}\u035d\3\2\2\2\177\u036b\3\2\2\2\u0081\u0379\3") buf.write("\2\2\2\u0083\u0387\3\2\2\2\u0085\u0394\3\2\2\2\u0087\u03a1") buf.write("\3\2\2\2\u0089\u03ae\3\2\2\2\u008b\u03bb\3\2\2\2\u008d") buf.write("\u03c8\3\2\2\2\u008f\u03d5\3\2\2\2\u0091\u03e2\3\2\2\2") buf.write("\u0093\u03ef\3\2\2\2\u0095\u03fc\3\2\2\2\u0097\u0409\3") buf.write("\2\2\2\u0099\u0416\3\2\2\2\u009b\u0422\3\2\2\2\u009d\u042e") buf.write("\3\2\2\2\u009f\u043a\3\2\2\2\u00a1\u0446\3\2\2\2\u00a3") buf.write("\u0452\3\2\2\2\u00a5\u045e\3\2\2\2\u00a7\u046a\3\2\2\2") buf.write("\u00a9\u0476\3\2\2\2\u00ab\u0489\3\2\2\2\u00ad\u049b\3") buf.write("\2\2\2\u00af\u04aa\3\2\2\2\u00b1\u04b0\3\2\2\2\u00b3\u04bf") buf.write("\3\2\2\2\u00b5\u04cd\3\2\2\2\u00b7\u04db\3\2\2\2\u00b9") buf.write("\u04e3\3\2\2\2\u00bb\u04f1\3\2\2\2\u00bd\u04ff\3\2\2\2") buf.write("\u00bf\u050c\3\2\2\2\u00c1\u0519\3\2\2\2\u00c3\u0526\3") buf.write("\2\2\2\u00c5\u0533\3\2\2\2\u00c7\u053f\3\2\2\2\u00c9\u054b") buf.write("\3\2\2\2\u00cb\u0556\3\2\2\2\u00cd\u0561\3\2\2\2\u00cf") buf.write("\u056c\3\2\2\2\u00d1\u0577\3\2\2\2\u00d3\u0582\3\2\2\2") buf.write("\u00d5\u058c\3\2\2\2\u00d7\u0597\3\2\2\2\u00d9\u05a2\3") buf.write("\2\2\2\u00db\u05ad\3\2\2\2\u00dd\u05b8\3\2\2\2\u00df\u05c3") buf.write("\3\2\2\2\u00e1\u05ce\3\2\2\2\u00e3\u05d9\3\2\2\2\u00e5") buf.write("\u05e4\3\2\2\2\u00e7\u05ee\3\2\2\2\u00e9\u05f8\3\2\2\2") buf.write("\u00eb\u0602\3\2\2\2\u00ed\u060c\3\2\2\2\u00ef\u0616\3") buf.write("\2\2\2\u00f1\u0620\3\2\2\2\u00f3\u0629\3\2\2\2\u00f5\u0632") buf.write("\3\2\2\2\u00f7\u063b\3\2\2\2\u00f9\u0644\3\2\2\2\u00fb") buf.write("\u064d\3\2\2\2\u00fd\u0656\3\2\2\2\u00ff\u065f\3\2\2\2") buf.write("\u0101\u0668\3\2\2\2\u0103\u0671\3\2\2\2\u0105\u067a\3") buf.write("\2\2\2\u0107\u0683\3\2\2\2\u0109\u068c\3\2\2\2\u010b\u0695") buf.write("\3\2\2\2\u010d\u069e\3\2\2\2\u010f\u06a6\3\2\2\2\u0111") buf.write("\u06ae\3\2\2\2\u0113\u06b8\3\2\2\2\u0115\u06c0\3\2\2\2") buf.write("\u0117\u06c8\3\2\2\2\u0119\u06d0\3\2\2\2\u011b\u06d8\3") buf.write("\2\2\2\u011d\u06e0\3\2\2\2\u011f\u06e8\3\2\2\2\u0121\u06ef") buf.write("\3\2\2\2\u0123\u06f6\3\2\2\2\u0125\u06fd\3\2\2\2\u0127") buf.write("\u0704\3\2\2\2\u0129\u070b\3\2\2\2\u012b\u0712\3\2\2\2") buf.write("\u012d\u0719\3\2\2\2\u012f\u0722\3\2\2\2\u0131\u0729\3") buf.write("\2\2\2\u0133\u0730\3\2\2\2\u0135\u0737\3\2\2\2\u0137\u073e") buf.write("\3\2\2\2\u0139\u0745\3\2\2\2\u013b\u074c\3\2\2\2\u013d") buf.write("\u0753\3\2\2\2\u013f\u075a\3\2\2\2\u0141\u0761\3\2\2\2") buf.write("\u0143\u0768\3\2\2\2\u0145\u076f\3\2\2\2\u0147\u0775\3") buf.write("\2\2\2\u0149\u077b\3\2\2\2\u014b\u0781\3\2\2\2\u014d\u0787") buf.write("\3\2\2\2\u014f\u078d\3\2\2\2\u0151\u0793\3\2\2\2\u0153") buf.write("\u0799\3\2\2\2\u0155\u079f\3\2\2\2\u0157\u07a5\3\2\2\2") buf.write("\u0159\u07ab\3\2\2\2\u015b\u07b1\3\2\2\2\u015d\u07b7\3") buf.write("\2\2\2\u015f\u07bd\3\2\2\2\u0161\u07c3\3\2\2\2\u0163\u07c9") buf.write("\3\2\2\2\u0165\u07cf\3\2\2\2\u0167\u07d5\3\2\2\2\u0169") buf.write("\u07db\3\2\2\2\u016b\u07e1\3\2\2\2\u016d\u07e7\3\2\2\2") buf.write("\u016f\u07ed\3\2\2\2\u0171\u07f3\3\2\2\2\u0173\u07f9\3") buf.write("\2\2\2\u0175\u07ff\3\2\2\2\u0177\u0805\3\2\2\2\u0179\u080a") buf.write("\3\2\2\2\u017b\u080f\3\2\2\2\u017d\u0814\3\2\2\2\u017f") buf.write("\u0819\3\2\2\2\u0181\u081e\3\2\2\2\u0183\u0823\3\2\2\2") buf.write("\u0185\u0828\3\2\2\2\u0187\u082d\3\2\2\2\u0189\u0832\3") buf.write("\2\2\2\u018b\u0837\3\2\2\2\u018d\u083c\3\2\2\2\u018f\u0841") buf.write("\3\2\2\2\u0191\u0846\3\2\2\2\u0193\u084b\3\2\2\2\u0195") buf.write("\u0850\3\2\2\2\u0197\u0855\3\2\2\2\u0199\u085a\3\2\2\2") buf.write("\u019b\u085f\3\2\2\2\u019d\u0864\3\2\2\2\u019f\u0869\3") buf.write("\2\2\2\u01a1\u086e\3\2\2\2\u01a3\u0873\3\2\2\2\u01a5\u0878") buf.write("\3\2\2\2\u01a7\u087d\3\2\2\2\u01a9\u0882\3\2\2\2\u01ab") buf.write("\u0887\3\2\2\2\u01ad\u088c\3\2\2\2\u01af\u0891\3\2\2\2") buf.write("\u01b1\u0896\3\2\2\2\u01b3\u089b\3\2\2\2\u01b5\u08a0\3") buf.write("\2\2\2\u01b7\u08a5\3\2\2\2\u01b9\u08aa\3\2\2\2\u01bb\u08af") buf.write("\3\2\2\2\u01bd\u08b4\3\2\2\2\u01bf\u08b9\3\2\2\2\u01c1") buf.write("\u08be\3\2\2\2\u01c3\u08c3\3\2\2\2\u01c5\u08c7\3\2\2\2") buf.write("\u01c7\u08cb\3\2\2\2\u01c9\u08cf\3\2\2\2\u01cb\u08d3\3") buf.write("\2\2\2\u01cd\u08d7\3\2\2\2\u01cf\u08db\3\2\2\2\u01d1\u08df") buf.write("\3\2\2\2\u01d3\u08e3\3\2\2\2\u01d5\u08e7\3\2\2\2\u01d7") buf.write("\u08eb\3\2\2\2\u01d9\u08ef\3\2\2\2\u01db\u08f3\3\2\2\2") buf.write("\u01dd\u08f7\3\2\2\2\u01df\u08fb\3\2\2\2\u01e1\u08ff\3") buf.write("\2\2\2\u01e3\u0903\3\2\2\2\u01e5\u0907\3\2\2\2\u01e7\u090b") buf.write("\3\2\2\2\u01e9\u090f\3\2\2\2\u01eb\u0913\3\2\2\2\u01ed") buf.write("\u0917\3\2\2\2\u01ef\u091b\3\2\2\2\u01f1\u091f\3\2\2\2") buf.write("\u01f3\u0923\3\2\2\2\u01f5\u0927\3\2\2\2\u01f7\u092b\3") buf.write("\2\2\2\u01f9\u092f\3\2\2\2\u01fb\u0933\3\2\2\2\u01fd\u0937") buf.write("\3\2\2\2\u01ff\u093b\3\2\2\2\u0201\u093f\3\2\2\2\u0203") buf.write("\u0943\3\2\2\2\u0205\u0947\3\2\2\2\u0207\u094b\3\2\2\2") buf.write("\u0209\u094f\3\2\2\2\u020b\u0953\3\2\2\2\u020d\u0957\3") buf.write("\2\2\2\u020f\u095b\3\2\2\2\u0211\u095f\3\2\2\2\u0213\u0963") buf.write("\3\2\2\2\u0215\u0967\3\2\2\2\u0217\u096a\3\2\2\2\u0219") buf.write("\u096d\3\2\2\2\u021b\u0970\3\2\2\2\u021d\u0973\3\2\2\2") buf.write("\u021f\u0976\3\2\2\2\u0221\u0979\3\2\2\2\u0223\u097c\3") buf.write("\2\2\2\u0225\u097f\3\2\2\2\u0227\u0982\3\2\2\2\u0229\u0985") buf.write("\3\2\2\2\u022b\u0988\3\2\2\2\u022d\u098b\3\2\2\2\u022f") buf.write("\u098e\3\2\2\2\u0231\u0991\3\2\2\2\u0233\u0994\3\2\2\2") buf.write("\u0235\u0997\3\2\2\2\u0237\u099a\3\2\2\2\u0239\u099d\3") buf.write("\2\2\2\u023b\u09a0\3\2\2\2\u023d\u09a3\3\2\2\2\u023f\u09a6") buf.write("\3\2\2\2\u0241\u09a9\3\2\2\2\u0243\u09ac\3\2\2\2\u0245") buf.write("\u09af\3\2\2\2\u0247\u09b2\3\2\2\2\u0249\u09b5\3\2\2\2") buf.write("\u024b\u09b8\3\2\2\2\u024d\u09bb\3\2\2\2\u024f\u09be\3") buf.write("\2\2\2\u0251\u09d6\3\2\2\2\u0253\u09dd\3\2\2\2\u0255\u09e0") buf.write("\3\2\2\2\u0257\u09e4\3\2\2\2\u0259\u09ef\3\2\2\2\u025b") buf.write("\u09fa\3\2\2\2\u025d\u0a07\3\2\2\2\u025f\u0a0d\3\2\2\2") buf.write("\u0261\u0a18\3\2\2\2\u0263\u0264\7=\2\2\u0264\4\3\2\2") buf.write("\2\u0265\u0266\7<\2\2\u0266\6\3\2\2\2\u0267\u0268\7<\2") buf.write("\2\u0268\u0269\7?\2\2\u0269\b\3\2\2\2\u026a\u026b\7]\2") buf.write("\2\u026b\n\3\2\2\2\u026c\u026d\7\60\2\2\u026d\u026e\7") buf.write("\60\2\2\u026e\f\3\2\2\2\u026f\u0270\7_\2\2\u0270\16\3") buf.write("\2\2\2\u0271\u0272\7*\2\2\u0272\20\3\2\2\2\u0273\u0274") buf.write("\7+\2\2\u0274\22\3\2\2\2\u0275\u0276\7T\2\2\u0276\24\3") buf.write("\2\2\2\u0277\u0278\7U\2\2\u0278\26\3\2\2\2\u0279\u027a") buf.write("\7\60\2\2\u027a\30\3\2\2\2\u027b\u027c\7.\2\2\u027c\32") buf.write("\3\2\2\2\u027d\u027e\7@\2\2\u027e\u027f\7?\2\2\u027f\34") buf.write("\3\2\2\2\u0280\u0281\7>\2\2\u0281\u0282\7?\2\2\u0282\36") buf.write("\3\2\2\2\u0283\u0284\7?\2\2\u0284 \3\2\2\2\u0285\u0286") buf.write("\7>\2\2\u0286\"\3\2\2\2\u0287\u0288\7@\2\2\u0288$\3\2") buf.write("\2\2\u0289\u028a\7>\2\2\u028a\u028b\7@\2\2\u028b&\3\2") buf.write("\2\2\u028c\u028d\7,\2\2\u028d(\3\2\2\2\u028e\u028f\7\61") buf.write("\2\2\u028f*\3\2\2\2\u0290\u0291\7-\2\2\u0291,\3\2\2\2") buf.write("\u0292\u0293\7/\2\2\u0293.\3\2\2\2\u0294\u0295\7%\2\2") buf.write("\u0295\60\3\2\2\2\u0296\u0297\7a\2\2\u0297\62\3\2\2\2") buf.write("\u0298\u0299\7G\2\2\u0299\64\3\2\2\2\u029a\u029b\t\2\2") buf.write("\2\u029b\66\3\2\2\2\u029c\u029d\t\3\2\2\u029d8\3\2\2\2") buf.write("\u029e\u029f\t\4\2\2\u029f:\3\2\2\2\u02a0\u02a1\t\5\2") buf.write("\2\u02a1<\3\2\2\2\u02a2\u02a3\t\6\2\2\u02a3>\3\2\2\2\u02a4") buf.write("\u02a5\t\7\2\2\u02a5@\3\2\2\2\u02a6\u02a7\t\b\2\2\u02a7") buf.write("B\3\2\2\2\u02a8\u02a9\t\t\2\2\u02a9D\3\2\2\2\u02aa\u02ab") buf.write("\t\n\2\2\u02abF\3\2\2\2\u02ac\u02ad\t\13\2\2\u02adH\3") buf.write("\2\2\2\u02ae\u02af\t\f\2\2\u02afJ\3\2\2\2\u02b0\u02b1") buf.write("\t\r\2\2\u02b1L\3\2\2\2\u02b2\u02b3\t\16\2\2\u02b3N\3") buf.write("\2\2\2\u02b4\u02b5\t\17\2\2\u02b5P\3\2\2\2\u02b6\u02b7") buf.write("\t\20\2\2\u02b7R\3\2\2\2\u02b8\u02b9\t\21\2\2\u02b9T\3") buf.write("\2\2\2\u02ba\u02bb\t\22\2\2\u02bbV\3\2\2\2\u02bc\u02bd") buf.write("\t\23\2\2\u02bdX\3\2\2\2\u02be\u02bf\t\24\2\2\u02bfZ\3") buf.write("\2\2\2\u02c0\u02c1\t\25\2\2\u02c1\\\3\2\2\2\u02c2\u02c3") buf.write("\t\26\2\2\u02c3^\3\2\2\2\u02c4\u02c5\t\27\2\2\u02c5`\3") buf.write("\2\2\2\u02c6\u02c7\t\30\2\2\u02c7b\3\2\2\2\u02c8\u02c9") buf.write("\t\31\2\2\u02c9d\3\2\2\2\u02ca\u02cb\t\32\2\2\u02cbf\3") buf.write("\2\2\2\u02cc\u02cd\t\33\2\2\u02cdh\3\2\2\2\u02ce\u02cf") buf.write("\7*\2\2\u02cf\u02d0\7,\2\2\u02d0\u02d5\3\2\2\2\u02d1\u02d4") buf.write("\5i\65\2\u02d2\u02d4\13\2\2\2\u02d3\u02d1\3\2\2\2\u02d3") buf.write("\u02d2\3\2\2\2\u02d4\u02d7\3\2\2\2\u02d5\u02d6\3\2\2\2") buf.write("\u02d5\u02d3\3\2\2\2\u02d6\u02d8\3\2\2\2\u02d7\u02d5\3") buf.write("\2\2\2\u02d8\u02d9\7,\2\2\u02d9\u02da\7+\2\2\u02da\u02db") buf.write("\3\2\2\2\u02db\u02dc\b\65\2\2\u02dcj\3\2\2\2\u02dd\u02de") buf.write("\7\61\2\2\u02de\u02df\7,\2\2\u02df\u02e4\3\2\2\2\u02e0") buf.write("\u02e3\5k\66\2\u02e1\u02e3\13\2\2\2\u02e2\u02e0\3\2\2") buf.write("\2\u02e2\u02e1\3\2\2\2\u02e3\u02e6\3\2\2\2\u02e4\u02e5") buf.write("\3\2\2\2\u02e4\u02e2\3\2\2\2\u02e5\u02e7\3\2\2\2\u02e6") buf.write("\u02e4\3\2\2\2\u02e7\u02e8\7,\2\2\u02e8\u02e9\7\61\2\2") buf.write("\u02e9\u02ea\3\2\2\2\u02ea\u02eb\b\66\2\2\u02ebl\3\2\2") buf.write("\2\u02ec\u02ed\7\61\2\2\u02ed\u02ee\7\61\2\2\u02ee\u02f2") buf.write("\3\2\2\2\u02ef\u02f1\13\2\2\2\u02f0\u02ef\3\2\2\2\u02f1") buf.write("\u02f4\3\2\2\2\u02f2\u02f3\3\2\2\2\u02f2\u02f0\3\2\2\2") buf.write("\u02f3\u02f5\3\2\2\2\u02f4\u02f2\3\2\2\2\u02f5\u02f6\7") buf.write("\f\2\2\u02f6\u02f7\3\2\2\2\u02f7\u02f8\b\67\2\2\u02f8") buf.write("n\3\2\2\2\u02f9\u02fa\5]/\2\u02fa\u02fb\5;\36\2\u02fb") buf.write("\u02fc\5E#\2\u02fc\u02fd\5O(\2\u02fd\u02fe\5[.\2\u02fe") buf.write("\u02ff\7a\2\2\u02ff\u0300\5[.\2\u0300\u0301\5Q)\2\u0301") buf.write("\u0302\7a\2\2\u0302\u0303\5]/\2\u0303\u0304\5Y-\2\u0304") buf.write("\u0305\5E#\2\u0305\u0306\5O(\2\u0306\u0307\5[.\2\u0307") buf.write("p\3\2\2\2\u0308\u0309\5]/\2\u0309\u030a\5Y-\2\u030a\u030b") buf.write("\5E#\2\u030b\u030c\5O(\2\u030c\u030d\5[.\2\u030d\u030e") buf.write("\7a\2\2\u030e\u030f\5[.\2\u030f\u0310\5Q)\2\u0310\u0311") buf.write("\7a\2\2\u0311\u0312\5]/\2\u0312\u0313\5;\36\2\u0313\u0314") buf.write("\5E#\2\u0314\u0315\5O(\2\u0315\u0316\5[.\2\u0316r\3\2") buf.write("\2\2\u0317\u0318\5;\36\2\u0318\u0319\5E#\2\u0319\u031a") buf.write("\5O(\2\u031a\u031b\5[.\2\u031b\u031c\7a\2\2\u031c\u031d") buf.write("\5[.\2\u031d\u031e\5Q)\2\u031e\u031f\7a\2\2\u031f\u0320") buf.write("\5]/\2\u0320\u0321\5;\36\2\u0321\u0322\5E#\2\u0322\u0323") buf.write("\5O(\2\u0323\u0324\5[.\2\u0324t\3\2\2\2\u0325\u0326\5") buf.write("]/\2\u0326\u0327\5;\36\2\u0327\u0328\5E#\2\u0328\u0329") buf.write("\5O(\2\u0329\u032a\5[.\2\u032a\u032b\7a\2\2\u032b\u032c") buf.write("\5[.\2\u032c\u032d\5Q)\2\u032d\u032e\7a\2\2\u032e\u032f") buf.write("\5;\36\2\u032f\u0330\5E#\2\u0330\u0331\5O(\2\u0331\u0332") buf.write("\5[.\2\u0332v\3\2\2\2\u0333\u0334\5]/\2\u0334\u0335\5") buf.write("E#\2\u0335\u0336\5O(\2\u0336\u0337\5[.\2\u0337\u0338\7") buf.write("a\2\2\u0338\u0339\5[.\2\u0339\u033a\5Q)\2\u033a\u033b") buf.write("\7a\2\2\u033b\u033c\5]/\2\u033c\u033d\5Y-\2\u033d\u033e") buf.write("\5E#\2\u033e\u033f\5O(\2\u033f\u0340\5[.\2\u0340x\3\2") buf.write("\2\2\u0341\u0342\5]/\2\u0342\u0343\5Y-\2\u0343\u0344\5") buf.write("E#\2\u0344\u0345\5O(\2\u0345\u0346\5[.\2\u0346\u0347\7") buf.write("a\2\2\u0347\u0348\5[.\2\u0348\u0349\5Q)\2\u0349\u034a") buf.write("\7a\2\2\u034a\u034b\5\67\34\2\u034b\u034c\5e\63\2\u034c") buf.write("\u034d\5[.\2\u034d\u034e\5=\37\2\u034ez\3\2\2\2\u034f") buf.write("\u0350\5\67\34\2\u0350\u0351\5e\63\2\u0351\u0352\5[.\2") buf.write("\u0352\u0353\5=\37\2\u0353\u0354\7a\2\2\u0354\u0355\5") buf.write("[.\2\u0355\u0356\5Q)\2\u0356\u0357\7a\2\2\u0357\u0358") buf.write("\5]/\2\u0358\u0359\5Y-\2\u0359\u035a\5E#\2\u035a\u035b") buf.write("\5O(\2\u035b\u035c\5[.\2\u035c|\3\2\2\2\u035d\u035e\5") buf.write("]/\2\u035e\u035f\5Y-\2\u035f\u0360\5E#\2\u0360\u0361\5") buf.write("O(\2\u0361\u0362\5[.\2\u0362\u0363\7a\2\2\u0363\u0364") buf.write("\5[.\2\u0364\u0365\5Q)\2\u0365\u0366\7a\2\2\u0366\u0367") buf.write("\5]/\2\u0367\u0368\5E#\2\u0368\u0369\5O(\2\u0369\u036a") buf.write("\5[.\2\u036a~\3\2\2\2\u036b\u036c\5]/\2\u036c\u036d\5") buf.write("Y-\2\u036d\u036e\5E#\2\u036e\u036f\5O(\2\u036f\u0370\5") buf.write("[.\2\u0370\u0371\7a\2\2\u0371\u0372\5[.\2\u0372\u0373") buf.write("\5Q)\2\u0373\u0374\7a\2\2\u0374\u0375\5;\36\2\u0375\u0376") buf.write("\5E#\2\u0376\u0377\5O(\2\u0377\u0378\5[.\2\u0378\u0080") buf.write("\3\2\2\2\u0379\u037a\5;\36\2\u037a\u037b\5E#\2\u037b\u037c") buf.write("\5O(\2\u037c\u037d\5[.\2\u037d\u037e\7a\2\2\u037e\u037f") buf.write("\5[.\2\u037f\u0380\5Q)\2\u0380\u0381\7a\2\2\u0381\u0382") buf.write("\5]/\2\u0382\u0383\5Y-\2\u0383\u0384\5E#\2\u0384\u0385") buf.write("\5O(\2\u0385\u0386\5[.\2\u0386\u0082\3\2\2\2\u0387\u0388") buf.write("\5\67\34\2\u0388\u0389\5e\63\2\u0389\u038a\5[.\2\u038a") buf.write("\u038b\5=\37\2\u038b\u038c\7a\2\2\u038c\u038d\5[.\2\u038d") buf.write("\u038e\5Q)\2\u038e\u038f\7a\2\2\u038f\u0390\5a\61\2\u0390") buf.write("\u0391\5Q)\2\u0391\u0392\5W,\2\u0392\u0393\5;\36\2\u0393") buf.write("\u0084\3\2\2\2\u0394\u0395\5\67\34\2\u0395\u0396\5e\63") buf.write("\2\u0396\u0397\5[.\2\u0397\u0398\5=\37\2\u0398\u0399\7") buf.write("a\2\2\u0399\u039a\5[.\2\u039a\u039b\5Q)\2\u039b\u039c") buf.write("\7a\2\2\u039c\u039d\5]/\2\u039d\u039e\5E#\2\u039e\u039f") buf.write("\5O(\2\u039f\u03a0\5[.\2\u03a0\u0086\3\2\2\2\u03a1\u03a2") buf.write("\5a\61\2\u03a2\u03a3\5Q)\2\u03a3\u03a4\5W,\2\u03a4\u03a5") buf.write("\5;\36\2\u03a5\u03a6\7a\2\2\u03a6\u03a7\5[.\2\u03a7\u03a8") buf.write("\5Q)\2\u03a8\u03a9\7a\2\2\u03a9\u03aa\5\67\34\2\u03aa") buf.write("\u03ab\5e\63\2\u03ab\u03ac\5[.\2\u03ac\u03ad\5=\37\2\u03ad") buf.write("\u0088\3\2\2\2\u03ae\u03af\5a\61\2\u03af\u03b0\5Q)\2\u03b0") buf.write("\u03b1\5W,\2\u03b1\u03b2\5;\36\2\u03b2\u03b3\7a\2\2\u03b3") buf.write("\u03b4\5[.\2\u03b4\u03b5\5Q)\2\u03b5\u03b6\7a\2\2\u03b6") buf.write("\u03b7\5]/\2\u03b7\u03b8\5E#\2\u03b8\u03b9\5O(\2\u03b9") buf.write("\u03ba\5[.\2\u03ba\u008a\3\2\2\2\u03bb\u03bc\5W,\2\u03bc") buf.write("\u03bd\5=\37\2\u03bd\u03be\5\65\33\2\u03be\u03bf\5K&\2") buf.write("\u03bf\u03c0\7a\2\2\u03c0\u03c1\5[.\2\u03c1\u03c2\5Q)") buf.write("\2\u03c2\u03c3\7a\2\2\u03c3\u03c4\5]/\2\u03c4\u03c5\5") buf.write("E#\2\u03c5\u03c6\5O(\2\u03c6\u03c7\5[.\2\u03c7\u008c\3") buf.write("\2\2\2\u03c8\u03c9\5E#\2\u03c9\u03ca\5O(\2\u03ca\u03cb") buf.write("\5[.\2\u03cb\u03cc\7a\2\2\u03cc\u03cd\5[.\2\u03cd\u03ce") buf.write("\5Q)\2\u03ce\u03cf\7a\2\2\u03cf\u03d0\5]/\2\u03d0\u03d1") buf.write("\5Y-\2\u03d1\u03d2\5E#\2\u03d2\u03d3\5O(\2\u03d3\u03d4") buf.write("\5[.\2\u03d4\u008e\3\2\2\2\u03d5\u03d6\5]/\2\u03d6\u03d7") buf.write("\5E#\2\u03d7\u03d8\5O(\2\u03d8\u03d9\5[.\2\u03d9\u03da") buf.write("\7a\2\2\u03da\u03db\5[.\2\u03db\u03dc\5Q)\2\u03dc\u03dd") buf.write("\7a\2\2\u03dd\u03de\5\67\34\2\u03de\u03df\5Q)\2\u03df") buf.write("\u03e0\5Q)\2\u03e0\u03e1\5K&\2\u03e1\u0090\3\2\2\2\u03e2") buf.write("\u03e3\5]/\2\u03e3\u03e4\5E#\2\u03e4\u03e5\5O(\2\u03e5") buf.write("\u03e6\5[.\2\u03e6\u03e7\7a\2\2\u03e7\u03e8\5[.\2\u03e8") buf.write("\u03e9\5Q)\2\u03e9\u03ea\7a\2\2\u03ea\u03eb\5a\61\2\u03eb") buf.write("\u03ec\5Q)\2\u03ec\u03ed\5W,\2\u03ed\u03ee\5;\36\2\u03ee") buf.write("\u0092\3\2\2\2\u03ef\u03f0\5]/\2\u03f0\u03f1\5E#\2\u03f1") buf.write("\u03f2\5O(\2\u03f2\u03f3\5[.\2\u03f3\u03f4\7a\2\2\u03f4") buf.write("\u03f5\5[.\2\u03f5\u03f6\5Q)\2\u03f6\u03f7\7a\2\2\u03f7") buf.write("\u03f8\5W,\2\u03f8\u03f9\5=\37\2\u03f9\u03fa\5\65\33\2") buf.write("\u03fa\u03fb\5K&\2\u03fb\u0094\3\2\2\2\u03fc\u03fd\5;") buf.write("\36\2\u03fd\u03fe\5E#\2\u03fe\u03ff\5O(\2\u03ff\u0400") buf.write("\5[.\2\u0400\u0401\7a\2\2\u0401\u0402\5[.\2\u0402\u0403") buf.write("\5Q)\2\u0403\u0404\7a\2\2\u0404\u0405\5]/\2\u0405\u0406") buf.write("\5E#\2\u0406\u0407\5O(\2\u0407\u0408\5[.\2\u0408\u0096") buf.write("\3\2\2\2\u0409\u040a\5]/\2\u040a\u040b\5E#\2\u040b\u040c") buf.write("\5O(\2\u040c\u040d\5[.\2\u040d\u040e\7a\2\2\u040e\u040f") buf.write("\5[.\2\u040f\u0410\5Q)\2\u0410\u0411\7a\2\2\u0411\u0412") buf.write("\5;\36\2\u0412\u0413\5E#\2\u0413\u0414\5O(\2\u0414\u0415") buf.write("\5[.\2\u0415\u0098\3\2\2\2\u0416\u0417\5a\61\2\u0417\u0418") buf.write("\5Q)\2\u0418\u0419\5W,\2\u0419\u041a\5;\36\2\u041a\u041b") buf.write("\7a\2\2\u041b\u041c\5[.\2\u041c\u041d\5Q)\2\u041d\u041e") buf.write("\7a\2\2\u041e\u041f\5E#\2\u041f\u0420\5O(\2\u0420\u0421") buf.write("\5[.\2\u0421\u009a\3\2\2\2\u0422\u0423\5W,\2\u0423\u0424") buf.write("\5=\37\2\u0424\u0425\5\65\33\2\u0425\u0426\5K&\2\u0426") buf.write("\u0427\7a\2\2\u0427\u0428\5[.\2\u0428\u0429\5Q)\2\u0429") buf.write("\u042a\7a\2\2\u042a\u042b\5E#\2\u042b\u042c\5O(\2\u042c") buf.write("\u042d\5[.\2\u042d\u009c\3\2\2\2\u042e\u042f\5E#\2\u042f") buf.write("\u0430\5O(\2\u0430\u0431\5[.\2\u0431\u0432\7a\2\2\u0432") buf.write("\u0433\5[.\2\u0433\u0434\5Q)\2\u0434\u0435\7a\2\2\u0435") buf.write("\u0436\5\67\34\2\u0436\u0437\5Q)\2\u0437\u0438\5Q)\2\u0438") buf.write("\u0439\5K&\2\u0439\u009e\3\2\2\2\u043a\u043b\5\67\34\2") buf.write("\u043b\u043c\5Q)\2\u043c\u043d\5Q)\2\u043d\u043e\5K&\2") buf.write("\u043e\u043f\7a\2\2\u043f\u0440\5[.\2\u0440\u0441\5Q)") buf.write("\2\u0441\u0442\7a\2\2\u0442\u0443\5E#\2\u0443\u0444\5") buf.write("O(\2\u0444\u0445\5[.\2\u0445\u00a0\3\2\2\2\u0446\u0447") buf.write("\5E#\2\u0447\u0448\5O(\2\u0448\u0449\5[.\2\u0449\u044a") buf.write("\7a\2\2\u044a\u044b\5[.\2\u044b\u044c\5Q)\2\u044c\u044d") buf.write("\7a\2\2\u044d\u044e\5a\61\2\u044e\u044f\5Q)\2\u044f\u0450") buf.write("\5W,\2\u0450\u0451\5;\36\2\u0451\u00a2\3\2\2\2\u0452\u0453") buf.write("\5E#\2\u0453\u0454\5O(\2\u0454\u0455\5[.\2\u0455\u0456") buf.write("\7a\2\2\u0456\u0457\5[.\2\u0457\u0458\5Q)\2\u0458\u0459") buf.write("\7a\2\2\u0459\u045a\5W,\2\u045a\u045b\5=\37\2\u045b\u045c") buf.write("\5\65\33\2\u045c\u045d\5K&\2\u045d\u00a4\3\2\2\2\u045e") buf.write("\u045f\5E#\2\u045f\u0460\5O(\2\u0460\u0461\5[.\2\u0461") buf.write("\u0462\7a\2\2\u0462\u0463\5[.\2\u0463\u0464\5Q)\2\u0464") buf.write("\u0465\7a\2\2\u0465\u0466\5]/\2\u0466\u0467\5E#\2\u0467") buf.write("\u0468\5O(\2\u0468\u0469\5[.\2\u0469\u00a6\3\2\2\2\u046a") buf.write("\u046b\5]/\2\u046b\u046c\5E#\2\u046c\u046d\5O(\2\u046d") buf.write("\u046e\5[.\2\u046e\u046f\7a\2\2\u046f\u0470\5[.\2\u0470") buf.write("\u0471\5Q)\2\u0471\u0472\7a\2\2\u0472\u0473\5E#\2\u0473") buf.write("\u0474\5O(\2\u0474\u0475\5[.\2\u0475\u00a8\3\2\2\2\u0476") buf.write("\u0477\5=\37\2\u0477\u0478\5O(\2\u0478\u0479\5;\36\2\u0479") buf.write("\u047a\7a\2\2\u047a\u047b\5? \2\u047b\u047c\5]/\2\u047c") buf.write("\u047d\5O(\2\u047d\u047e\59\35\2\u047e\u047f\5[.\2\u047f") buf.write("\u0480\5E#\2\u0480\u0481\5Q)\2\u0481\u0482\5O(\2\u0482") buf.write("\u0483\7a\2\2\u0483\u0484\5\67\34\2\u0484\u0485\5K&\2") buf.write("\u0485\u0486\5Q)\2\u0486\u0487\59\35\2\u0487\u0488\5I") buf.write("%\2\u0488\u00aa\3\2\2\2\u0489\u048a\5=\37\2\u048a\u048b") buf.write("\5O(\2\u048b\u048c\5;\36\2\u048c\u048d\7a\2\2\u048d\u048e") buf.write("\59\35\2\u048e\u048f\5Q)\2\u048f\u0490\5O(\2\u0490\u0491") buf.write("\5? \2\u0491\u0492\5E#\2\u0492\u0493\5A!\2\u0493\u0494") buf.write("\5]/\2\u0494\u0495\5W,\2\u0495\u0496\5\65\33\2\u0496\u0497") buf.write("\5[.\2\u0497\u0498\5E#\2\u0498\u0499\5Q)\2\u0499\u049a") buf.write("\5O(\2\u049a\u00ac\3\2\2\2\u049b\u049c\5=\37\2\u049c\u049d") buf.write("\5O(\2\u049d\u049e\5;\36\2\u049e\u049f\7a\2\2\u049f\u04a0") buf.write("\5[.\2\u04a0\u04a1\5W,\2\u04a1\u04a2\5\65\33\2\u04a2\u04a3") buf.write("\5O(\2\u04a3\u04a4\5Y-\2\u04a4\u04a5\5E#\2\u04a5\u04a6") buf.write("\5[.\2\u04a6\u04a7\5E#\2\u04a7\u04a8\5Q)\2\u04a8\u04a9") buf.write("\5O(\2\u04a9\u00ae\3\2\2\2\u04aa\u04ae\7\'\2\2\u04ab\u04af") buf.write("\5E#\2\u04ac\u04af\5U+\2\u04ad\u04af\5M\'\2\u04ae\u04ab") buf.write("\3\2\2\2\u04ae\u04ac\3\2\2\2\u04ae\u04ad\3\2\2\2\u04af") buf.write("\u00b0\3\2\2\2\u04b0\u04b1\5? \2\u04b1\u04b2\5]/\2\u04b2") buf.write("\u04b3\5O(\2\u04b3\u04b4\59\35\2\u04b4\u04b5\5[.\2\u04b5") buf.write("\u04b6\5E#\2\u04b6\u04b7\5Q)\2\u04b7\u04b8\5O(\2\u04b8") buf.write("\u04b9\7a\2\2\u04b9\u04ba\5\67\34\2\u04ba\u04bb\5K&\2") buf.write("\u04bb\u04bc\5Q)\2\u04bc\u04bd\59\35\2\u04bd\u04be\5I") buf.write("%\2\u04be\u00b2\3\2\2\2\u04bf\u04c0\5=\37\2\u04c0\u04c1") buf.write("\5O(\2\u04c1\u04c2\5;\36\2\u04c2\u04c3\7a\2\2\u04c3\u04c4") buf.write("\5E#\2\u04c4\u04c5\5O(\2\u04c5\u04c6\5[.\2\u04c6\u04c7") buf.write("\5=\37\2\u04c7\u04c8\5W,\2\u04c8\u04c9\5? \2\u04c9\u04ca") buf.write("\5\65\33\2\u04ca\u04cb\59\35\2\u04cb\u04cc\5=\37\2\u04cc") buf.write("\u00b4\3\2\2\2\u04cd\u04ce\59\35\2\u04ce\u04cf\5Q)\2\u04cf") buf.write("\u04d0\5O(\2\u04d0\u04d1\5? \2\u04d1\u04d2\5E#\2\u04d2") buf.write("\u04d3\5A!\2\u04d3\u04d4\5]/\2\u04d4\u04d5\5W,\2\u04d5") buf.write("\u04d6\5\65\33\2\u04d6\u04d7\5[.\2\u04d7\u04d8\5E#\2\u04d8") buf.write("\u04d9\5Q)\2\u04d9\u04da\5O(\2\u04da\u00b6\3\2\2\2\u04db") buf.write("\u04e1\7\'\2\2\u04dc\u04e2\5c\62\2\u04dd\u04e2\5\67\34") buf.write("\2\u04de\u04e2\5a\61\2\u04df\u04e2\5;\36\2\u04e0\u04e2") buf.write("\5K&\2\u04e1\u04dc\3\2\2\2\u04e1\u04dd\3\2\2\2\u04e1\u04de") buf.write("\3\2\2\2\u04e1\u04df\3\2\2\2\u04e1\u04e0\3\2\2\2\u04e2") buf.write("\u00b8\3\2\2\2\u04e3\u04e4\5;\36\2\u04e4\u04e5\5\65\33") buf.write("\2\u04e5\u04e6\5[.\2\u04e6\u04e7\5=\37\2\u04e7\u04e8\7") buf.write("a\2\2\u04e8\u04e9\5\65\33\2\u04e9\u04ea\5O(\2\u04ea\u04eb") buf.write("\5;\36\2\u04eb\u04ec\7a\2\2\u04ec\u04ed\5[.\2\u04ed\u04ee") buf.write("\5E#\2\u04ee\u04ef\5M\'\2\u04ef\u04f0\5=\37\2\u04f0\u00ba") buf.write("\3\2\2\2\u04f1\u04f2\5=\37\2\u04f2\u04f3\5O(\2\u04f3\u04f4") buf.write("\5;\36\2\u04f4\u04f5\7a\2\2\u04f5\u04f6\5O(\2\u04f6\u04f7") buf.write("\5\65\33\2\u04f7\u04f8\5M\'\2\u04f8\u04f9\5=\37\2\u04f9") buf.write("\u04fa\5Y-\2\u04fa\u04fb\5S*\2\u04fb\u04fc\5\65\33\2\u04fc") buf.write("\u04fd\59\35\2\u04fd\u04fe\5=\37\2\u04fe\u00bc\3\2\2\2") buf.write("\u04ff\u0500\5_\60\2\u0500\u0501\5\65\33\2\u0501\u0502") buf.write("\5W,\2\u0502\u0503\7a\2\2\u0503\u0504\5=\37\2\u0504\u0505") buf.write("\5c\62\2\u0505\u0506\5[.\2\u0506\u0507\5=\37\2\u0507\u0508") buf.write("\5W,\2\u0508\u0509\5O(\2\u0509\u050a\5\65\33\2\u050a\u050b") buf.write("\5K&\2\u050b\u00be\3\2\2\2\u050c\u050d\5=\37\2\u050d\u050e") buf.write("\5O(\2\u050e\u050f\5;\36\2\u050f\u0510\7a\2\2\u0510\u0511") buf.write("\5? \2\u0511\u0512\5]/\2\u0512\u0513\5O(\2\u0513\u0514") buf.write("\59\35\2\u0514\u0515\5[.\2\u0515\u0516\5E#\2\u0516\u0517") buf.write("\5Q)\2\u0517\u0518\5O(\2\u0518\u00c0\3\2\2\2\u0519\u051a") buf.write("\5=\37\2\u051a\u051b\5O(\2\u051b\u051c\5;\36\2\u051c\u051d") buf.write("\7a\2\2\u051d\u051e\5W,\2\u051e\u051f\5=\37\2\u051f\u0520") buf.write("\5Y-\2\u0520\u0521\5Q)\2\u0521\u0522\5]/\2\u0522\u0523") buf.write("\5W,\2\u0523\u0524\59\35\2\u0524\u0525\5=\37\2\u0525\u00c2") buf.write("\3\2\2\2\u0526\u0527\5E#\2\u0527\u0528\5O(\2\u0528\u0529") buf.write("\5E#\2\u0529\u052a\5[.\2\u052a\u052b\5E#\2\u052b\u052c") buf.write("\5\65\33\2\u052c\u052d\5K&\2\u052d\u052e\7a\2\2\u052e") buf.write("\u052f\5Y-\2\u052f\u0530\5[.\2\u0530\u0531\5=\37\2\u0531") buf.write("\u0532\5S*\2\u0532\u00c4\3\2\2\2\u0533\u0534\5[.\2\u0534") buf.write("\u0535\5E#\2\u0535\u0536\5M\'\2\u0536\u0537\5=\37\2\u0537") buf.write("\u0538\7a\2\2\u0538\u0539\5Q)\2\u0539\u053a\5? \2\u053a") buf.write("\u053b\7a\2\2\u053b\u053c\5;\36\2\u053c\u053d\5\65\33") buf.write("\2\u053d\u053e\5e\63\2\u053e\u00c6\3\2\2\2\u053f\u0540") buf.write("\5=\37\2\u0540\u0541\5O(\2\u0541\u0542\5;\36\2\u0542\u0543") buf.write("\7a\2\2\u0543\u0544\5S*\2\u0544\u0545\5W,\2\u0545\u0546") buf.write("\5Q)\2\u0546\u0547\5A!\2\u0547\u0548\5W,\2\u0548\u0549") buf.write("\5\65\33\2\u0549\u054a\5M\'\2\u054a\u00c8\3\2\2\2\u054b") buf.write("\u054c\5=\37\2\u054c\u054d\5O(\2\u054d\u054e\5;\36\2\u054e") buf.write("\u054f\7a\2\2\u054f\u0550\5\65\33\2\u0550\u0551\59\35") buf.write("\2\u0551\u0552\5[.\2\u0552\u0553\5E#\2\u0553\u0554\5Q") buf.write(")\2\u0554\u0555\5O(\2\u0555\u00ca\3\2\2\2\u0556\u0557") buf.write("\5=\37\2\u0557\u0558\5O(\2\u0558\u0559\5;\36\2\u0559\u055a") buf.write("\7a\2\2\u055a\u055b\5M\'\2\u055b\u055c\5=\37\2\u055c\u055d") buf.write("\5[.\2\u055d\u055e\5C\"\2\u055e\u055f\5Q)\2\u055f\u0560") buf.write("\5;\36\2\u0560\u00cc\3\2\2\2\u0561\u0562\5[.\2\u0562\u0563") buf.write("\5W,\2\u0563\u0564\5\65\33\2\u0564\u0565\5O(\2\u0565\u0566") buf.write("\5Y-\2\u0566\u0567\5E#\2\u0567\u0568\5[.\2\u0568\u0569") buf.write("\5E#\2\u0569\u056a\5Q)\2\u056a\u056b\5O(\2\u056b\u00ce") buf.write("\3\2\2\2\u056c\u056d\5_\60\2\u056d\u056e\5\65\33\2\u056e") buf.write("\u056f\5W,\2\u056f\u0570\7a\2\2\u0570\u0571\5A!\2\u0571") buf.write("\u0572\5K&\2\u0572\u0573\5Q)\2\u0573\u0574\5\67\34\2\u0574") buf.write("\u0575\5\65\33\2\u0575\u0576\5K&\2\u0576\u00d0\3\2\2\2") buf.write("\u0577\u0578\5O(\2\u0578\u0579\5Q)\2\u0579\u057a\5O(\2") buf.write("\u057a\u057b\7a\2\2\u057b\u057c\5W,\2\u057c\u057d\5=\37") buf.write("\2\u057d\u057e\5[.\2\u057e\u057f\5\65\33\2\u057f\u0580") buf.write("\5E#\2\u0580\u0581\5O(\2\u0581\u00d2\3\2\2\2\u0582\u0583") buf.write("\5O(\2\u0583\u0584\5\65\33\2\u0584\u0585\5M\'\2\u0585") buf.write("\u0586\5=\37\2\u0586\u0587\5Y-\2\u0587\u0588\5S*\2\u0588") buf.write("\u0589\5\65\33\2\u0589\u058a\59\35\2\u058a\u058b\5=\37") buf.write("\2\u058b\u00d4\3\2\2\2\u058c\u058d\5_\60\2\u058d\u058e") buf.write("\5\65\33\2\u058e\u058f\5W,\2\u058f\u0590\7a\2\2\u0590") buf.write("\u0591\5Q)\2\u0591\u0592\5]/\2\u0592\u0593\5[.\2\u0593") buf.write("\u0594\5S*\2\u0594\u0595\5]/\2\u0595\u0596\5[.\2\u0596") buf.write("\u00d6\3\2\2\2\u0597\u0598\5_\60\2\u0598\u0599\5\65\33") buf.write("\2\u0599\u059a\5W,\2\u059a\u059b\7a\2\2\u059b\u059c\5") buf.write("E#\2\u059c\u059d\5O(\2\u059d\u059e\7a\2\2\u059e\u059f") buf.write("\5Q)\2\u059f\u05a0\5]/\2\u05a0\u05a1\5[.\2\u05a1\u00d8") buf.write("\3\2\2\2\u05a2\u05a3\5_\60\2\u05a3\u05a4\5\65\33\2\u05a4") buf.write("\u05a5\5W,\2\u05a5\u05a6\7a\2\2\u05a6\u05a7\5\65\33\2") buf.write("\u05a7\u05a8\59\35\2\u05a8\u05a9\59\35\2\u05a9\u05aa\5") buf.write("=\37\2\u05aa\u05ab\5Y-\2\u05ab\u05ac\5Y-\2\u05ac\u00da") buf.write("\3\2\2\2\u05ad\u05ae\5=\37\2\u05ae\u05af\5O(\2\u05af\u05b0") buf.write("\5;\36\2\u05b0\u05b1\7a\2\2\u05b1\u05b2\5Y-\2\u05b2\u05b3") buf.write("\5[.\2\u05b3\u05b4\5W,\2\u05b4\u05b5\5]/\2\u05b5\u05b6") buf.write("\59\35\2\u05b6\u05b7\5[.\2\u05b7\u00dc\3\2\2\2\u05b8\u05b9") buf.write("\5W,\2\u05b9\u05ba\5=\37\2\u05ba\u05bb\5\65\33\2\u05bb") buf.write("\u05bc\5;\36\2\u05bc\u05bd\7a\2\2\u05bd\u05be\5a\61\2") buf.write("\u05be\u05bf\5W,\2\u05bf\u05c0\5E#\2\u05c0\u05c1\5[.\2") buf.write("\u05c1\u05c2\5=\37\2\u05c2\u00de\3\2\2\2\u05c3\u05c4\5") buf.write("E#\2\u05c4\u05c5\5M\'\2\u05c5\u05c6\5S*\2\u05c6\u05c7") buf.write("\5K&\2\u05c7\u05c8\5=\37\2\u05c8\u05c9\5M\'\2\u05c9\u05ca") buf.write("\5=\37\2\u05ca\u05cb\5O(\2\u05cb\u05cc\5[.\2\u05cc\u05cd") buf.write("\5Y-\2\u05cd\u00e0\3\2\2\2\u05ce\u05cf\5_\60\2\u05cf\u05d0") buf.write("\5\65\33\2\u05d0\u05d1\5W,\2\u05d1\u05d2\7a\2\2\u05d2") buf.write("\u05d3\59\35\2\u05d3\u05d4\5Q)\2\u05d4\u05d5\5O(\2\u05d5") buf.write("\u05d6\5? \2\u05d6\u05d7\5E#\2\u05d7\u05d8\5A!\2\u05d8") buf.write("\u00e2\3\2\2\2\u05d9\u05da\5=\37\2\u05da\u05db\5O(\2\u05db") buf.write("\u05dc\5;\36\2\u05dc\u05dd\7a\2\2\u05dd\u05de\5W,\2\u05de") buf.write("\u05df\5=\37\2\u05df\u05e0\5S*\2\u05e0\u05e1\5=\37\2\u05e1") buf.write("\u05e2\5\65\33\2\u05e2\u05e3\5[.\2\u05e3\u00e4\3\2\2\2") buf.write("\u05e4\u05e5\5=\37\2\u05e5\u05e6\5O(\2\u05e6\u05e7\5;") buf.write("\36\2\u05e7\u05e8\7a\2\2\u05e8\u05e9\5a\61\2\u05e9\u05ea") buf.write("\5C\"\2\u05ea\u05eb\5E#\2\u05eb\u05ec\5K&\2\u05ec\u05ed") buf.write("\5=\37\2\u05ed\u00e6\3\2\2\2\u05ee\u05ef\5W,\2\u05ef\u05f0") buf.write("\5=\37\2\u05f0\u05f1\5\65\33\2\u05f1\u05f2\5;\36\2\u05f2") buf.write("\u05f3\7a\2\2\u05f3\u05f4\5Q)\2\u05f4\u05f5\5O(\2\u05f5") buf.write("\u05f6\5K&\2\u05f6\u05f7\5e\63\2\u05f7\u00e8\3\2\2\2\u05f8") buf.write("\u05f9\5S*\2\u05f9\u05fa\5W,\2\u05fa\u05fb\5Q)\2\u05fb") buf.write("\u05fc\5[.\2\u05fc\u05fd\5=\37\2\u05fd\u05fe\59\35\2\u05fe") buf.write("\u05ff\5[.\2\u05ff\u0600\5=\37\2\u0600\u0601\5;\36\2\u0601") buf.write("\u00ea\3\2\2\2\u0602\u0603\5_\60\2\u0603\u0604\5\65\33") buf.write("\2\u0604\u0605\5W,\2\u0605\u0606\7a\2\2\u0606\u0607\5") buf.write("E#\2\u0607\u0608\5O(\2\u0608\u0609\5S*\2\u0609\u060a\5") buf.write("]/\2\u060a\u060b\5[.\2\u060b\u00ec\3\2\2\2\u060c\u060d") buf.write("\5=\37\2\u060d\u060e\5O(\2\u060e\u060f\5;\36\2\u060f\u0610") buf.write("\7a\2\2\u0610\u0611\59\35\2\u0611\u0612\5K&\2\u0612\u0613") buf.write("\5\65\33\2\u0613\u0614\5Y-\2\u0614\u0615\5Y-\2\u0615\u00ee") buf.write("\3\2\2\2\u0616\u0617\5E#\2\u0617\u0618\5O(\2\u0618\u0619") buf.write("\5[.\2\u0619\u061a\5=\37\2\u061a\u061b\5W,\2\u061b\u061c") buf.write("\5? \2\u061c\u061d\5\65\33\2\u061d\u061e\59\35\2\u061e") buf.write("\u061f\5=\37\2\u061f\u00f0\3\2\2\2\u0620\u0621\5\65\33") buf.write("\2\u0621\u0622\5\67\34\2\u0622\u0623\5Y-\2\u0623\u0624") buf.write("\5[.\2\u0624\u0625\5W,\2\u0625\u0626\5\65\33\2\u0626\u0627") buf.write("\59\35\2\u0627\u0628\5[.\2\u0628\u00f2\3\2\2\2\u0629\u062a") buf.write("\5? \2\u062a\u062b\5]/\2\u062b\u062c\5O(\2\u062c\u062d") buf.write("\59\35\2\u062d\u062e\5[.\2\u062e\u062f\5E#\2\u062f\u0630") buf.write("\5Q)\2\u0630\u0631\5O(\2\u0631\u00f4\3\2\2\2\u0632\u0633") buf.write("\5=\37\2\u0633\u0634\5O(\2\u0634\u0635\5;\36\2\u0635\u0636") buf.write("\7a\2\2\u0636\u0637\59\35\2\u0637\u0638\5\65\33\2\u0638") buf.write("\u0639\5Y-\2\u0639\u063a\5=\37\2\u063a\u00f6\3\2\2\2\u063b") buf.write("\u063c\5W,\2\u063c\u063d\5=\37\2\u063d\u063e\5Y-\2\u063e") buf.write("\u063f\5Q)\2\u063f\u0640\5]/\2\u0640\u0641\5W,\2\u0641") buf.write("\u0642\59\35\2\u0642\u0643\5=\37\2\u0643\u00f8\3\2\2\2") buf.write("\u0644\u0645\5E#\2\u0645\u0646\5O(\2\u0646\u0647\5[.\2") buf.write("\u0647\u0648\5=\37\2\u0648\u0649\5W,\2\u0649\u064a\5O") buf.write("(\2\u064a\u064b\5\65\33\2\u064b\u064c\5K&\2\u064c\u00fa") buf.write("\3\2\2\2\u064d\u064e\59\35\2\u064e\u064f\5Q)\2\u064f\u0650") buf.write("\5O(\2\u0650\u0651\5[.\2\u0651\u0652\5E#\2\u0652\u0653") buf.write("\5O(\2\u0653\u0654\5]/\2\u0654\u0655\5=\37\2\u0655\u00fc") buf.write("\3\2\2\2\u0656\u0657\5S*\2\u0657\u0658\5W,\2\u0658\u0659") buf.write("\5E#\2\u0659\u065a\5Q)\2\u065a\u065b\5W,\2\u065b\u065c") buf.write("\5E#\2\u065c\u065d\5[.\2\u065d\u065e\5e\63\2\u065e\u00fe") buf.write("\3\2\2\2\u065f\u0660\5\67\34\2\u0660\u0661\5Q)\2\u0661") buf.write("\u0662\5Q)\2\u0662\u0663\5K&\2\u0663\u0664\7a\2\2\u0664") buf.write("\u0665\5=\37\2\u0665\u0666\5c\62\2\u0666\u0667\5S*\2\u0667") buf.write("\u0100\3\2\2\2\u0668\u0669\5=\37\2\u0669\u066a\5O(\2\u066a") buf.write("\u066b\5;\36\2\u066b\u066c\7a\2\2\u066c\u066d\5Y-\2\u066d") buf.write("\u066e\5[.\2\u066e\u066f\5=\37\2\u066f\u0670\5S*\2\u0670") buf.write("\u0102\3\2\2\2\u0671\u0672\59\35\2\u0672\u0673\5Q)\2\u0673") buf.write("\u0674\5O(\2\u0674\u0675\5Y-\2\u0675\u0676\5[.\2\u0676") buf.write("\u0677\5\65\33\2\u0677\u0678\5O(\2\u0678\u0679\5[.\2\u0679") buf.write("\u0104\3\2\2\2\u067a\u067b\5Q)\2\u067b\u067c\5_\60\2\u067c") buf.write("\u067d\5=\37\2\u067d\u067e\5W,\2\u067e\u067f\5W,\2\u067f") buf.write("\u0680\5E#\2\u0680\u0681\5;\36\2\u0681\u0682\5=\37\2\u0682") buf.write("\u0106\3\2\2\2\u0683\u0684\5_\60\2\u0684\u0685\5\65\33") buf.write("\2\u0685\u0686\5W,\2\u0686\u0687\7a\2\2\u0687\u0688\5") buf.write("[.\2\u0688\u0689\5=\37\2\u0689\u068a\5M\'\2\u068a\u068b") buf.write("\5S*\2\u068b\u0108\3\2\2\2\u068c\u068d\5=\37\2\u068d\u068e") buf.write("\5O(\2\u068e\u068f\5;\36\2\u068f\u0690\7a\2\2\u0690\u0691") buf.write("\5[.\2\u0691\u0692\5e\63\2\u0692\u0693\5S*\2\u0693\u0694") buf.write("\5=\37\2\u0694\u010a\3\2\2\2\u0695\u0696\5E#\2\u0696\u0697") buf.write("\5O(\2\u0697\u0698\5[.\2\u0698\u0699\5=\37\2\u0699\u069a") buf.write("\5W,\2\u069a\u069b\5_\60\2\u069b\u069c\5\65\33\2\u069c") buf.write("\u069d\5K&\2\u069d\u010c\3\2\2\2\u069e\u069f\5=\37\2\u069f") buf.write("\u06a0\5c\62\2\u06a0\u06a1\5[.\2\u06a1\u06a2\5=\37\2\u06a2") buf.write("\u06a3\5O(\2\u06a3\u06a4\5;\36\2\u06a4\u06a5\5Y-\2\u06a5") buf.write("\u010e\3\2\2\2\u06a6\u06a7\5S*\2\u06a7\u06a8\5W,\2\u06a8") buf.write("\u06a9\5E#\2\u06a9\u06aa\5_\60\2\u06aa\u06ab\5\65\33\2") buf.write("\u06ab\u06ac\5[.\2\u06ac\u06ad\5=\37\2\u06ad\u0110\3\2") buf.write("\2\2\u06ae\u06af\5[.\2\u06af\u06b1\7%\2\2\u06b0\u06b2") buf.write("\t\34\2\2\u06b1\u06b0\3\2\2\2\u06b2\u06b3\3\2\2\2\u06b3") buf.write("\u06b1\3\2\2\2\u06b3\u06b4\3\2\2\2\u06b4\u06b5\3\2\2\2") buf.write("\u06b5\u06b6\5M\'\2\u06b6\u06b7\5Y-\2\u06b7\u0112\3\2") buf.write("\2\2\u06b8\u06b9\5S*\2\u06b9\u06ba\5W,\2\u06ba\u06bb\5") buf.write("Q)\2\u06bb\u06bc\5A!\2\u06bc\u06bd\5W,\2\u06bd\u06be\5") buf.write("\65\33\2\u06be\u06bf\5M\'\2\u06bf\u0114\3\2\2\2\u06c0") buf.write("\u06c1\5=\37\2\u06c1\u06c2\5O(\2\u06c2\u06c3\5;\36\2\u06c3") buf.write("\u06c4\7a\2\2\u06c4\u06c5\5_\60\2\u06c5\u06c6\5\65\33") buf.write("\2\u06c6\u06c7\5W,\2\u06c7\u0116\3\2\2\2\u06c8\u06c9\5") buf.write("a\61\2\u06c9\u06ca\5Y-\2\u06ca\u06cb\5[.\2\u06cb\u06cc") buf.write("\5W,\2\u06cc\u06cd\5E#\2\u06cd\u06ce\5O(\2\u06ce\u06cf") buf.write("\5A!\2\u06cf\u0118\3\2\2\2\u06d0\u06d1\5Q)\2\u06d1\u06d2") buf.write("\5_\60\2\u06d2\u06d3\5=\37\2\u06d3\u06d4\5W,\2\u06d4\u06d5") buf.write("\5K&\2\u06d5\u06d6\5\65\33\2\u06d6\u06d7\5S*\2\u06d7\u011a") buf.write("\3\2\2\2\u06d8\u06d9\5=\37\2\u06d9\u06da\5O(\2\u06da\u06db") buf.write("\5;\36\2\u06db\u06dc\7a\2\2\u06dc\u06dd\5? \2\u06dd\u06de") buf.write("\5Q)\2\u06de\u06df\5W,\2\u06df\u011c\3\2\2\2\u06e0\u06e1") buf.write("\5W,\2\u06e1\u06e2\5=\37\2\u06e2\u06e3\5S*\2\u06e3\u06e4") buf.write("\5K&\2\u06e4\u06e5\5\65\33\2\u06e5\u06e6\59\35\2\u06e6") buf.write("\u06e7\5=\37\2\u06e7\u011e\3\2\2\2\u06e8\u06e9\5S*\2\u06e9") buf.write("\u06ea\5]/\2\u06ea\u06eb\5\67\34\2\u06eb\u06ec\5K&\2\u06ec") buf.write("\u06ed\5E#\2\u06ed\u06ee\59\35\2\u06ee\u0120\3\2\2\2\u06ef") buf.write("\u06f0\5M\'\2\u06f0\u06f1\5=\37\2\u06f1\u06f2\5[.\2\u06f2") buf.write("\u06f3\5C\"\2\u06f3\u06f4\5Q)\2\u06f4\u06f5\5;\36\2\u06f5") buf.write("\u0122\3\2\2\2\u06f6\u06f7\5\65\33\2\u06f7\u06f8\59\35") buf.write("\2\u06f8\u06f9\5[.\2\u06f9\u06fa\5E#\2\u06fa\u06fb\5Q") buf.write(")\2\u06fb\u06fc\5O(\2\u06fc\u0124\3\2\2\2\u06fd\u06fe") buf.write("\5W,\2\u06fe\u06ff\5=\37\2\u06ff\u0700\5[.\2\u0700\u0701") buf.write("\5]/\2\u0701\u0702\5W,\2\u0702\u0703\5O(\2\u0703\u0126") buf.write("\3\2\2\2\u0704\u0705\5Y-\2\u0705\u0706\5[.\2\u0706\u0707") buf.write("\5W,\2\u0707\u0708\5E#\2\u0708\u0709\5O(\2\u0709\u070a") buf.write("\5A!\2\u070a\u0128\3\2\2\2\u070b\u070c\5Y-\2\u070c\u070d") buf.write("\5[.\2\u070d\u070e\5W,\2\u070e\u070f\5]/\2\u070f\u0710") buf.write("\59\35\2\u0710\u0711\5[.\2\u0711\u012a\3\2\2\2\u0712\u0713") buf.write("\5W,\2\u0713\u0714\5=\37\2\u0714\u0715\5[.\2\u0715\u0716") buf.write("\5\65\33\2\u0716\u0717\5E#\2\u0717\u0718\5O(\2\u0718\u012c") buf.write("\3\2\2\2\u0719\u071a\5[.\2\u071a\u071c\7%\2\2\u071b\u071d") buf.write("\t\34\2\2\u071c\u071b\3\2\2\2\u071d\u071e\3\2\2\2\u071e") buf.write("\u071c\3\2\2\2\u071e\u071f\3\2\2\2\u071f\u0720\3\2\2\2") buf.write("\u0720\u0721\5Y-\2\u0721\u012e\3\2\2\2\u0722\u0723\5W") buf.write(",\2\u0723\u0724\7a\2\2\u0724\u0725\5=\37\2\u0725\u0726") buf.write("\5;\36\2\u0726\u0727\5A!\2\u0727\u0728\5=\37\2\u0728\u0130") buf.write("\3\2\2\2\u0729\u072a\5? \2\u072a\u072b\7a\2\2\u072b\u072c") buf.write("\5=\37\2\u072c\u072d\5;\36\2\u072d\u072e\5A!\2\u072e\u072f") buf.write("\5=\37\2\u072f\u0132\3\2\2\2\u0730\u0731\5W,\2\u0731\u0732") buf.write("\7a\2\2\u0732\u0733\5[.\2\u0733\u0734\5W,\2\u0734\u0735") buf.write("\5E#\2\u0735\u0736\5A!\2\u0736\u0134\3\2\2\2\u0737\u0738") buf.write("\5? \2\u0738\u0739\7a\2\2\u0739\u073a\5[.\2\u073a\u073b") buf.write("\5W,\2\u073b\u073c\5E#\2\u073c\u073d\5A!\2\u073d\u0136") buf.write("\3\2\2\2\u073e\u073f\5W,\2\u073f\u0740\5=\37\2\u0740\u0741") buf.write("\5? \2\u0741\u0742\7a\2\2\u0742\u0743\5[.\2\u0743\u0744") buf.write("\5Q)\2\u0744\u0138\3\2\2\2\u0745\u0746\5Y-\2\u0746\u0747") buf.write("\5E#\2\u0747\u0748\5O(\2\u0748\u0749\5A!\2\u0749\u074a") buf.write("\5K&\2\u074a\u074b\5=\37\2\u074b\u013a\3\2\2\2\u074c\u074d") buf.write("\5=\37\2\u074d\u074e\5O(\2\u074e\u074f\5;\36\2\u074f\u0750") buf.write("\7a\2\2\u0750\u0751\5E#\2\u0751\u0752\5? \2\u0752\u013c") buf.write("\3\2\2\2\u0753\u0754\5W,\2\u0754\u0755\5=\37\2\u0755\u0756") buf.write("\5S*\2\u0756\u0757\5=\37\2\u0757\u0758\5\65\33\2\u0758") buf.write("\u0759\5[.\2\u0759\u013e\3\2\2\2\u075a\u075b\5E#\2\u075b") buf.write("\u075c\5O(\2\u075c\u075d\5Y-\2\u075d\u075e\5=\37\2\u075e") buf.write("\u075f\5W,\2\u075f\u0760\5[.\2\u0760\u0140\3\2\2\2\u0761") buf.write("\u0762\5;\36\2\u0762\u0763\5=\37\2\u0763\u0764\5K&\2\u0764") buf.write("\u0765\5=\37\2\u0765\u0766\5[.\2\u0766\u0767\5=\37\2\u0767") buf.write("\u0142\3\2\2\2\u0768\u0769\59\35\2\u0769\u076a\5Q)\2\u076a") buf.write("\u076b\5O(\2\u076b\u076c\59\35\2\u076c\u076d\5\65\33\2") buf.write("\u076d\u076e\5[.\2\u076e\u0144\3\2\2\2\u076f\u0770\5?") buf.write(" \2\u0770\u0771\5E#\2\u0771\u0772\5O(\2\u0772\u0773\5") buf.write("\65\33\2\u0773\u0774\5K&\2\u0774\u0146\3\2\2\2\u0775\u0776") buf.write("\5Y-\2\u0776\u0777\5]/\2\u0777\u0778\5S*\2\u0778\u0779") buf.write("\5=\37\2\u0779\u077a\5W,\2\u077a\u0148\3\2\2\2\u077b\u077c") buf.write("\5\65\33\2\u077c\u077d\5W,\2\u077d\u077e\5W,\2\u077e\u077f") buf.write("\5\65\33\2\u077f\u0780\5e\63\2\u0780\u014a\3\2\2\2\u0781") buf.write("\u0782\5a\61\2\u0782\u0783\59\35\2\u0783\u0784\5C\"\2") buf.write("\u0784\u0785\5\65\33\2\u0785\u0786\5W,\2\u0786\u014c\3") buf.write("\2\2\2\u0787\u0788\5]/\2\u0788\u0789\5Y-\2\u0789\u078a") buf.write("\5E#\2\u078a\u078b\5O(\2\u078b\u078c\5A!\2\u078c\u014e") buf.write("\3\2\2\2\u078d\u078e\59\35\2\u078e\u078f\5K&\2\u078f\u0790") buf.write("\5\65\33\2\u0790\u0791\5Y-\2\u0791\u0792\5Y-\2\u0792\u0150") buf.write("\3\2\2\2\u0793\u0794\5? \2\u0794\u0795\5\65\33\2\u0795") buf.write("\u0796\5K&\2\u0796\u0797\5Y-\2\u0797\u0798\5=\37\2\u0798") buf.write("\u0152\3\2\2\2\u0799\u079a\5;\36\2\u079a\u079b\5a\61\2") buf.write("\u079b\u079c\5Q)\2\u079c\u079d\5W,\2\u079d\u079e\5;\36") buf.write("\2\u079e\u0154\3\2\2\2\u079f\u07a0\5K&\2\u07a0\u07a1\5") buf.write("a\61\2\u07a1\u07a2\5Q)\2\u07a2\u07a3\5W,\2\u07a3\u07a4") buf.write("\5;\36\2\u07a4\u0156\3\2\2\2\u07a5\u07a6\5]/\2\u07a6\u07a7") buf.write("\5Y-\2\u07a7\u07a8\5E#\2\u07a8\u07a9\5O(\2\u07a9\u07aa") buf.write("\5[.\2\u07aa\u0158\3\2\2\2\u07ab\u07ac\5]/\2\u07ac\u07ad") buf.write("\5;\36\2\u07ad\u07ae\5E#\2\u07ae\u07af\5O(\2\u07af\u07b0") buf.write("\5[.\2\u07b0\u015a\3\2\2\2\u07b1\u07b2\5]/\2\u07b2\u07b3") buf.write("\5K&\2\u07b3\u07b4\5E#\2\u07b4\u07b5\5O(\2\u07b5\u07b6") buf.write("\5[.\2\u07b6\u015c\3\2\2\2\u07b7\u07b8\5K&\2\u07b8\u07b9") buf.write("\5W,\2\u07b9\u07ba\5=\37\2\u07ba\u07bb\5\65\33\2\u07bb") buf.write("\u07bc\5K&\2\u07bc\u015e\3\2\2\2\u07bd\u07be\5K&\2\u07be") buf.write("\u07bf\5[.\2\u07bf\u07c0\5E#\2\u07c0\u07c1\5M\'\2\u07c1") buf.write("\u07c2\5=\37\2\u07c2\u0160\3\2\2\2\u07c3\u07c4\5K&\2\u07c4") buf.write("\u07c5\5;\36\2\u07c5\u07c6\5\65\33\2\u07c6\u07c7\5[.\2") buf.write("\u07c7\u07c8\5=\37\2\u07c8\u0162\3\2\2\2\u07c9\u07ca\5") buf.write("9\35\2\u07ca\u07cb\5\65\33\2\u07cb\u07cc\5K&\2\u07cc\u07cd") buf.write("\59\35\2\u07cd\u07ce\5O(\2\u07ce\u0164\3\2\2\2\u07cf\u07d0") buf.write("\5W,\2\u07d0\u07d1\5=\37\2\u07d1\u07d2\5[.\2\u07d2\u07d3") buf.write("\59\35\2\u07d3\u07d4\5O(\2\u07d4\u0166\3\2\2\2\u07d5\u07d6") buf.write("\5G$\2\u07d6\u07d7\5M\'\2\u07d7\u07d8\5S*\2\u07d8\u07d9") buf.write("\59\35\2\u07d9\u07da\5O(\2\u07da\u0168\3\2\2\2\u07db\u07dc") buf.write("\5=\37\2\u07dc\u07dd\5K&\2\u07dd\u07de\5Y-\2\u07de\u07df") buf.write("\5E#\2\u07df\u07e0\5? \2\u07e0\u016a\3\2\2\2\u07e1\u07e2") buf.write("\5a\61\2\u07e2\u07e3\5C\"\2\u07e3\u07e4\5E#\2\u07e4\u07e5") buf.write("\5K&\2\u07e5\u07e6\5=\37\2\u07e6\u016c\3\2\2\2\u07e7\u07e8") buf.write("\5]/\2\u07e8\u07e9\5O(\2\u07e9\u07ea\5[.\2\u07ea\u07eb") buf.write("\5E#\2\u07eb\u07ec\5K&\2\u07ec\u016e\3\2\2\2\u07ed\u07ee") buf.write("\5W,\2\u07ee\u07ef\5E#\2\u07ef\u07f0\5A!\2\u07f0\u07f1") buf.write("\5C\"\2\u07f1\u07f2\5[.\2\u07f2\u0170\3\2\2\2\u07f3\u07f4") buf.write("\5K&\2\u07f4\u07f5\5E#\2\u07f5\u07f6\5M\'\2\u07f6\u07f7") buf.write("\5E#\2\u07f7\u07f8\5[.\2\u07f8\u0172\3\2\2\2\u07f9\u07fa") buf.write("\5[.\2\u07fa\u07fb\5W,\2\u07fb\u07fc\5]/\2\u07fc\u07fd") buf.write("\5O(\2\u07fd\u07fe\59\35\2\u07fe\u0174\3\2\2\2\u07ff\u0800") buf.write("\5\65\33\2\u0800\u0801\5[.\2\u0801\u0802\5\65\33\2\u0802") buf.write("\u0803\5O(\2\u0803\u0804\7\64\2\2\u0804\u0176\3\2\2\2") buf.write("\u0805\u0806\5=\37\2\u0806\u0807\5c\62\2\u0807\u0808\5") buf.write("E#\2\u0808\u0809\5[.\2\u0809\u0178\3\2\2\2\u080a\u080b") buf.write("\59\35\2\u080b\u080c\5\65\33\2\u080c\u080d\5Y-\2\u080d") buf.write("\u080e\5=\37\2\u080e\u017a\3\2\2\2\u080f\u0810\5[.\2\u0810") buf.write("\u0811\5C\"\2\u0811\u0812\5E#\2\u0812\u0813\5Y-\2\u0813") buf.write("\u017c\3\2\2\2\u0814\u0815\5[.\2\u0815\u0816\5\65\33\2") buf.write("\u0816\u0817\5Y-\2\u0817\u0818\5I%\2\u0818\u017e\3\2\2") buf.write("\2\u0819\u081a\5W,\2\u081a\u081b\5=\37\2\u081b\u081c\5") buf.write("\65\33\2\u081c\u081d\5K&\2\u081d\u0180\3\2\2\2\u081e\u081f") buf.write("\5[.\2\u081f\u0820\5E#\2\u0820\u0821\5M\'\2\u0821\u0822") buf.write("\5=\37\2\u0822\u0182\3\2\2\2\u0823\u0824\5;\36\2\u0824") buf.write("\u0825\5\65\33\2\u0825\u0826\5[.\2\u0826\u0827\5=\37\2") buf.write("\u0827\u0184\3\2\2\2\u0828\u0829\5K&\2\u0829\u082a\5[") buf.write(".\2\u082a\u082b\5Q)\2\u082b\u082c\5;\36\2\u082c\u0186") buf.write("\3\2\2\2\u082d\u082e\5\67\34\2\u082e\u082f\5e\63\2\u082f") buf.write("\u0830\5[.\2\u0830\u0831\5=\37\2\u0831\u0188\3\2\2\2\u0832") buf.write("\u0833\5a\61\2\u0833\u0834\5Q)\2\u0834\u0835\5W,\2\u0835") buf.write("\u0836\5;\36\2\u0836\u018a\3\2\2\2\u0837\u0838\59\35\2") buf.write("\u0838\u0839\5\65\33\2\u0839\u083a\5K&\2\u083a\u083b\5") buf.write("9\35\2\u083b\u018c\3\2\2\2\u083c\u083d\5[.\2\u083d\u083e") buf.write("\5W,\2\u083e\u083f\5]/\2\u083f\u0840\5=\37\2\u0840\u018e") buf.write("\3\2\2\2\u0841\u0842\5\67\34\2\u0842\u0843\5Q)\2\u0843") buf.write("\u0844\5Q)\2\u0844\u0845\5K&\2\u0845\u0190\3\2\2\2\u0846") buf.write("\u0847\5a\61\2\u0847\u0848\5E#\2\u0848\u0849\5[.\2\u0849") buf.write("\u084a\5C\"\2\u084a\u0192\3\2\2\2\u084b\u084c\5Y-\2\u084c") buf.write("\u084d\5[.\2\u084d\u084e\5=\37\2\u084e\u084f\5S*\2\u084f") buf.write("\u0194\3\2\2\2\u0850\u0851\59\35\2\u0851\u0852\5C\"\2") buf.write("\u0852\u0853\5\65\33\2\u0853\u0854\5W,\2\u0854\u0196\3") buf.write("\2\2\2\u0855\u0856\5[.\2\u0856\u0857\5e\63\2\u0857\u0858") buf.write("\5S*\2\u0858\u0859\5=\37\2\u0859\u0198\3\2\2\2\u085a\u085b") buf.write("\5O(\2\u085b\u085c\5]/\2\u085c\u085d\5K&\2\u085d\u085e") buf.write("\5K&\2\u085e\u019a\3\2\2\2\u085f\u0860\5? \2\u0860\u0861") buf.write("\5W,\2\u0861\u0862\5Q)\2\u0862\u0863\5M\'\2\u0863\u019c") buf.write("\3\2\2\2\u0864\u0865\5]/\2\u0865\u0866\5E#\2\u0866\u0867") buf.write("\5O(\2\u0867\u0868\5[.\2\u0868\u019e\3\2\2\2\u0869\u086a") buf.write("\5Y-\2\u086a\u086b\5E#\2\u086b\u086c\5O(\2\u086c\u086d") buf.write("\5[.\2\u086d\u01a0\3\2\2\2\u086e\u086f\5;\36\2\u086f\u0870") buf.write("\5E#\2\u0870\u0871\5O(\2\u0871\u0872\5[.\2\u0872\u01a2") buf.write("\3\2\2\2\u0873\u0874\5K&\2\u0874\u0875\5E#\2\u0875\u0876") buf.write("\5O(\2\u0876\u0877\5[.\2\u0877\u01a4\3\2\2\2\u0878\u0879") buf.write("\5\65\33\2\u0879\u087a\5O(\2\u087a\u087b\5;\36\2\u087b") buf.write("\u087c\5O(\2\u087c\u01a6\3\2\2\2\u087d\u087e\5c\62\2\u087e") buf.write("\u087f\5Q)\2\u087f\u0880\5W,\2\u0880\u0881\5O(\2\u0881") buf.write("\u01a8\3\2\2\2\u0882\u0883\5W,\2\u0883\u0884\5=\37\2\u0884") buf.write("\u0885\5[.\2\u0885\u0886\59\35\2\u0886\u01aa\3\2\2\2\u0887") buf.write("\u0888\5G$\2\u0888\u0889\5M\'\2\u0889\u088a\5S*\2\u088a") buf.write("\u088b\59\35\2\u088b\u01ac\3\2\2\2\u088c\u088d\5[.\2\u088d") buf.write("\u088e\5C\"\2\u088e\u088f\5=\37\2\u088f\u0890\5O(\2\u0890") buf.write("\u01ae\3\2\2\2\u0891\u0892\5=\37\2\u0892\u0893\5K&\2\u0893") buf.write("\u0894\5Y-\2\u0894\u0895\5=\37\2\u0895\u01b0\3\2\2\2\u0896") buf.write("\u0897\59\35\2\u0897\u0898\5[.\2\u0898\u0899\5]/\2\u0899") buf.write("\u089a\5;\36\2\u089a\u01b2\3\2\2\2\u089b\u089c\5Y-\2\u089c") buf.write("\u089d\5U+\2\u089d\u089e\5W,\2\u089e\u089f\5[.\2\u089f") buf.write("\u01b4\3\2\2\2\u08a0\u08a1\5\65\33\2\u08a1\u08a2\5Y-\2") buf.write("\u08a2\u08a3\5E#\2\u08a3\u08a4\5O(\2\u08a4\u01b6\3\2\2") buf.write("\2\u08a5\u08a6\5\65\33\2\u08a6\u08a7\59\35\2\u08a7\u08a8") buf.write("\5Q)\2\u08a8\u08a9\5Y-\2\u08a9\u01b8\3\2\2\2\u08aa\u08ab") buf.write("\5\65\33\2\u08ab\u08ac\5[.\2\u08ac\u08ad\5\65\33\2\u08ad") buf.write("\u08ae\5O(\2\u08ae\u01ba\3\2\2\2\u08af\u08b0\5=\37\2\u08b0") buf.write("\u08b1\5c\62\2\u08b1\u08b2\5S*\2\u08b2\u08b3\5[.\2\u08b3") buf.write("\u01bc\3\2\2\2\u08b4\u08b5\5M\'\2\u08b5\u08b6\5Q)\2\u08b6") buf.write("\u08b7\5_\60\2\u08b7\u08b8\5=\37\2\u08b8\u01be\3\2\2\2") buf.write("\u08b9\u08ba\5K&\2\u08ba\u08bb\5=\37\2\u08bb\u08bc\5?") buf.write(" \2\u08bc\u08bd\5[.\2\u08bd\u01c0\3\2\2\2\u08be\u08bf") buf.write("\5? \2\u08bf\u08c0\5E#\2\u08c0\u08c1\5O(\2\u08c1\u08c2") buf.write("\5;\36\2\u08c2\u01c2\3\2\2\2\u08c3\u08c4\5? \2\u08c4\u08c5") buf.write("\5Q)\2\u08c5\u08c6\5W,\2\u08c6\u01c4\3\2\2\2\u08c7\u08c8") buf.write("\5E#\2\u08c8\u08c9\5O(\2\u08c9\u08ca\5[.\2\u08ca\u01c6") buf.write("\3\2\2\2\u08cb\u08cc\5O(\2\u08cc\u08cd\5Q)\2\u08cd\u08ce") buf.write("\5[.\2\u08ce\u01c8\3\2\2\2\u08cf\u08d0\5M\'\2\u08d0\u08d1") buf.write("\5]/\2\u08d1\u08d2\5K&\2\u08d2\u01ca\3\2\2\2\u08d3\u08d4") buf.write("\5\65\33\2\u08d4\u08d5\5;\36\2\u08d5\u08d6\5;\36\2\u08d6") buf.write("\u01cc\3\2\2\2\u08d7\u08d8\5[.\2\u08d8\u08d9\5Q)\2\u08d9") buf.write("\u08da\5;\36\2\u08da\u01ce\3\2\2\2\u08db\u08dc\5K&\2\u08dc") buf.write("\u08dd\5;\36\2\u08dd\u08de\5[.\2\u08de\u01d0\3\2\2\2\u08df") buf.write("\u08e0\5_\60\2\u08e0\u08e1\5\65\33\2\u08e1\u08e2\5W,\2") buf.write("\u08e2\u01d2\3\2\2\2\u08e3\u08e4\59\35\2\u08e4\u08e5\5") buf.write("\65\33\2\u08e5\u08e6\5K&\2\u08e6\u01d4\3\2\2\2\u08e7\u08e8") buf.write("\59\35\2\u08e8\u08e9\5K&\2\u08e9\u08ea\5I%\2\u08ea\u01d6") buf.write("\3\2\2\2\u08eb\u08ec\5Y-\2\u08ec\u08ed\5[.\2\u08ed\u08ee") buf.write("\5O(\2\u08ee\u01d8\3\2\2\2\u08ef\u08f0\5K&\2\u08f0\u08f1") buf.write("\5;\36\2\u08f1\u08f2\5O(\2\u08f2\u01da\3\2\2\2\u08f3\u08f4") buf.write("\5\65\33\2\u08f4\u08f5\5O(\2\u08f5\u08f6\5;\36\2\u08f6") buf.write("\u01dc\3\2\2\2\u08f7\u08f8\5c\62\2\u08f8\u08f9\5Q)\2\u08f9") buf.write("\u08fa\5W,\2\u08fa\u01de\3\2\2\2\u08fb\u08fc\5Q)\2\u08fc") buf.write("\u08fd\5W,\2\u08fd\u08fe\5O(\2\u08fe\u01e0\3\2\2\2\u08ff") buf.write("\u0900\5Y-\2\u0900\u0901\5]/\2\u0901\u0902\5\67\34\2\u0902") buf.write("\u01e2\3\2\2\2\u0903\u0904\5M\'\2\u0904\u0905\5Q)\2\u0905") buf.write("\u0906\5;\36\2\u0906\u01e4\3\2\2\2\u0907\u0908\5;\36\2") buf.write("\u0908\u0909\5E#\2\u0909\u090a\5_\60\2\u090a\u01e6\3\2") buf.write("\2\2\u090b\u090c\5W,\2\u090c\u090d\5=\37\2\u090d\u090e") buf.write("\5[.\2\u090e\u01e8\3\2\2\2\u090f\u0910\5W,\2\u0910\u0911") buf.write("\5=\37\2\u0911\u0912\5? \2\u0912\u01ea\3\2\2\2\u0913\u0914") buf.write("\5G$\2\u0914\u0915\5M\'\2\u0915\u0916\5S*\2\u0916\u01ec") buf.write("\3\2\2\2\u0917\u0918\59\35\2\u0918\u0919\5[.\2\u0919\u091a") buf.write("\5]/\2\u091a\u01ee\3\2\2\2\u091b\u091c\59\35\2\u091c\u091d") buf.write("\5[.\2\u091d\u091e\5;\36\2\u091e\u01f0\3\2\2\2\u091f\u0920") buf.write("\5[.\2\u0920\u0921\5Q)\2\u0921\u0922\5O(\2\u0922\u01f2") buf.write("\3\2\2\2\u0923\u0924\5[.\2\u0924\u0925\5Q)\2\u0925\u0926") buf.write("\5? \2\u0926\u01f4\3\2\2\2\u0927\u0928\5\65\33\2\u0928") buf.write("\u0929\5\67\34\2\u0929\u092a\5Y-\2\u092a\u01f6\3\2\2\2") buf.write("\u092b\u092c\5K&\2\u092c\u092d\5Q)\2\u092d\u092e\5A!\2") buf.write("\u092e\u01f8\3\2\2\2\u092f\u0930\5=\37\2\u0930\u0931\5") buf.write("c\62\2\u0931\u0932\5S*\2\u0932\u01fa\3\2\2\2\u0933\u0934") buf.write("\5Y-\2\u0934\u0935\5E#\2\u0935\u0936\5O(\2\u0936\u01fc") buf.write("\3\2\2\2\u0937\u0938\59\35\2\u0938\u0939\5Q)\2\u0939\u093a") buf.write("\5Y-\2\u093a\u01fe\3\2\2\2\u093b\u093c\5[.\2\u093c\u093d") buf.write("\5\65\33\2\u093d\u093e\5O(\2\u093e\u0200\3\2\2\2\u093f") buf.write("\u0940\5Y-\2\u0940\u0941\5C\"\2\u0941\u0942\5K&\2\u0942") buf.write("\u0202\3\2\2\2\u0943\u0944\5Y-\2\u0944\u0945\5C\"\2\u0945") buf.write("\u0946\5W,\2\u0946\u0204\3\2\2\2\u0947\u0948\5W,\2\u0948") buf.write("\u0949\5Q)\2\u0949\u094a\5K&\2\u094a\u0206\3\2\2\2\u094b") buf.write("\u094c\5W,\2\u094c\u094d\5Q)\2\u094d\u094e\5W,\2\u094e") buf.write("\u0208\3\2\2\2\u094f\u0950\5Y-\2\u0950\u0951\5=\37\2\u0951") buf.write("\u0952\5K&\2\u0952\u020a\3\2\2\2\u0953\u0954\5M\'\2\u0954") buf.write("\u0955\5\65\33\2\u0955\u0956\5c\62\2\u0956\u020c\3\2\2") buf.write("\2\u0957\u0958\5M\'\2\u0958\u0959\5E#\2\u0959\u095a\5") buf.write("O(\2\u095a\u020e\3\2\2\2\u095b\u095c\5M\'\2\u095c\u095d") buf.write("\5]/\2\u095d\u095e\5c\62\2\u095e\u0210\3\2\2\2\u095f\u0960") buf.write("\5K&\2\u0960\u0961\5=\37\2\u0961\u0962\5O(\2\u0962\u0212") buf.write("\3\2\2\2\u0963\u0964\5M\'\2\u0964\u0965\5E#\2\u0965\u0966") buf.write("\5;\36\2\u0966\u0214\3\2\2\2\u0967\u0968\5[.\2\u0968\u0969") buf.write("\5S*\2\u0969\u0216\3\2\2\2\u096a\u096b\5Y-\2\u096b\u096c") buf.write("\5W,\2\u096c\u0218\3\2\2\2\u096d\u096e\5W,\2\u096e\u096f") buf.write("\5Y-\2\u096f\u021a\3\2\2\2\u0970\u0971\5\67\34\2\u0971") buf.write("\u0972\5e\63\2\u0972\u021c\3\2\2\2\u0973\u0974\5;\36\2") buf.write("\u0974\u0975\5Q)\2\u0975\u021e\3\2\2\2\u0976\u0977\5Y") buf.write("-\2\u0977\u0978\5;\36\2\u0978\u0220\3\2\2\2\u0979\u097a") buf.write("\5;\36\2\u097a\u097b\5Y-\2\u097b\u0222\3\2\2\2\u097c\u097d") buf.write("\5Y-\2\u097d\u097e\5K&\2\u097e\u0224\3\2\2\2\u097f\u0980") buf.write("\5;\36\2\u0980\u0981\5[.\2\u0981\u0226\3\2\2\2\u0982\u0983") buf.write("\5\65\33\2\u0983\u0984\5[.\2\u0984\u0228\3\2\2\2\u0985") buf.write("\u0986\59\35\2\u0986\u0987\5]/\2\u0987\u022a\3\2\2\2\u0988") buf.write("\u0989\5S*\2\u0989\u098a\5_\60\2\u098a\u022c\3\2\2\2\u098b") buf.write("\u098c\5S*\2\u098c\u098d\5[.\2\u098d\u022e\3\2\2\2\u098e") buf.write("\u098f\5E#\2\u098f\u0990\5O(\2\u0990\u0230\3\2\2\2\u0991") buf.write("\u0992\5Q)\2\u0992\u0993\5? \2\u0993\u0232\3\2\2\2\u0994") buf.write("\u0995\5K&\2\u0995\u0996\5;\36\2\u0996\u0234\3\2\2\2\u0997") buf.write("\u0998\5[.\2\u0998\u0999\5Q)\2\u0999\u0236\3\2\2\2\u099a") buf.write("\u099b\5Q)\2\u099b\u099c\5O(\2\u099c\u0238\3\2\2\2\u099d") buf.write("\u099e\5Y-\2\u099e\u099f\5[.\2\u099f\u023a\3\2\2\2\u09a0") buf.write("\u09a1\59\35\2\u09a1\u09a2\5;\36\2\u09a2\u023c\3\2\2\2") buf.write("\u09a3\u09a4\5Q)\2\u09a4\u09a5\5W,\2\u09a5\u023e\3\2\2") buf.write("\2\u09a6\u09a7\5A!\2\u09a7\u09a8\5[.\2\u09a8\u0240\3\2") buf.write("\2\2\u09a9\u09aa\5A!\2\u09aa\u09ab\5=\37\2\u09ab\u0242") buf.write("\3\2\2\2\u09ac\u09ad\5=\37\2\u09ad\u09ae\5U+\2\u09ae\u0244") buf.write("\3\2\2\2\u09af\u09b0\5K&\2\u09b0\u09b1\5[.\2\u09b1\u0246") buf.write("\3\2\2\2\u09b2\u09b3\5K&\2\u09b3\u09b4\5=\37\2\u09b4\u0248") buf.write("\3\2\2\2\u09b5\u09b6\5O(\2\u09b6\u09b7\5=\37\2\u09b7\u024a") buf.write("\3\2\2\2\u09b8\u09b9\5E#\2\u09b9\u09ba\5? \2\u09ba\u024c") buf.write("\3\2\2\2\u09bb\u09bc\5K&\2\u09bc\u09bd\5O(\2\u09bd\u024e") buf.write("\3\2\2\2\u09be\u09bf\t\35\2\2\u09bf\u09c1\t\36\2\2\u09c0") buf.write("\u09c2\t\37\2\2\u09c1\u09c0\3\2\2\2\u09c1\u09c2\3\2\2") buf.write("\2\u09c2\u09c4\3\2\2\2\u09c3\u09c5\t \2\2\u09c4\u09c3") buf.write("\3\2\2\2\u09c4\u09c5\3\2\2\2\u09c5\u09c7\3\2\2\2\u09c6") buf.write("\u09c8\t\34\2\2\u09c7\u09c6\3\2\2\2\u09c8\u09c9\3\2\2") buf.write("\2\u09c9\u09c7\3\2\2\2\u09c9\u09ca\3\2\2\2\u09ca\u09d3") buf.write("\3\2\2\2\u09cb\u09cd\7\60\2\2\u09cc\u09ce\t\34\2\2\u09cd") buf.write("\u09cc\3\2\2\2\u09ce\u09cf\3\2\2\2\u09cf\u09cd\3\2\2\2") buf.write("\u09cf\u09d0\3\2\2\2\u09d0\u09d2\3\2\2\2\u09d1\u09cb\3") buf.write("\2\2\2\u09d2\u09d5\3\2\2\2\u09d3\u09d1\3\2\2\2\u09d3\u09d4") buf.write("\3\2\2\2\u09d4\u0250\3\2\2\2\u09d5\u09d3\3\2\2\2\u09d6") buf.write("\u09da\t!\2\2\u09d7\u09d9\t\"\2\2\u09d8\u09d7\3\2\2\2") buf.write("\u09d9\u09dc\3\2\2\2\u09da\u09d8\3\2\2\2\u09da\u09db\3") buf.write("\2\2\2\u09db\u0252\3\2\2\2\u09dc\u09da\3\2\2\2\u09dd\u09de") buf.write("\t!\2\2\u09de\u0254\3\2\2\2\u09df\u09e1\t\34\2\2\u09e0") buf.write("\u09df\3\2\2\2\u09e1\u09e2\3\2\2\2\u09e2\u09e0\3\2\2\2") buf.write("\u09e2\u09e3\3\2\2\2\u09e3\u0256\3\2\2\2\u09e4\u09e5\7") buf.write("\64\2\2\u09e5\u09e6\7%\2\2\u09e6\u09eb\3\2\2\2\u09e7\u09e9") buf.write("\7a\2\2\u09e8\u09e7\3\2\2\2\u09e8\u09e9\3\2\2\2\u09e9") buf.write("\u09ea\3\2\2\2\u09ea\u09ec\t#\2\2\u09eb\u09e8\3\2\2\2") buf.write("\u09ec\u09ed\3\2\2\2\u09ed\u09eb\3\2\2\2\u09ed\u09ee\3") buf.write("\2\2\2\u09ee\u0258\3\2\2\2\u09ef\u09f0\7:\2\2\u09f0\u09f1") buf.write("\7%\2\2\u09f1\u09f6\3\2\2\2\u09f2\u09f4\7a\2\2\u09f3\u09f2") buf.write("\3\2\2\2\u09f3\u09f4\3\2\2\2\u09f4\u09f5\3\2\2\2\u09f5") buf.write("\u09f7\t$\2\2\u09f6\u09f3\3\2\2\2\u09f7\u09f8\3\2\2\2") buf.write("\u09f8\u09f6\3\2\2\2\u09f8\u09f9\3\2\2\2\u09f9\u025a\3") buf.write("\2\2\2\u09fa\u09fb\7\63\2\2\u09fb\u09fc\78\2\2\u09fc\u09fd") buf.write("\7%\2\2\u09fd\u0a02\3\2\2\2\u09fe\u0a00\7a\2\2\u09ff\u09fe") buf.write("\3\2\2\2\u09ff\u0a00\3\2\2\2\u0a00\u0a01\3\2\2\2\u0a01") buf.write("\u0a03\t%\2\2\u0a02\u09ff\3\2\2\2\u0a03\u0a04\3\2\2\2") buf.write("\u0a04\u0a02\3\2\2\2\u0a04\u0a05\3\2\2\2\u0a05\u025c\3") buf.write("\2\2\2\u0a06\u0a08\t&\2\2\u0a07\u0a06\3\2\2\2\u0a08\u0a09") buf.write("\3\2\2\2\u0a09\u0a07\3\2\2\2\u0a09\u0a0a\3\2\2\2\u0a0a") buf.write("\u0a0b\3\2\2\2\u0a0b\u0a0c\b\u012f\3\2\u0a0c\u025e\3\2") buf.write("\2\2\u0a0d\u0a11\7}\2\2\u0a0e\u0a10\13\2\2\2\u0a0f\u0a0e") buf.write("\3\2\2\2\u0a10\u0a13\3\2\2\2\u0a11\u0a12\3\2\2\2\u0a11") buf.write("\u0a0f\3\2\2\2\u0a12\u0a14\3\2\2\2\u0a13\u0a11\3\2\2\2") buf.write("\u0a14\u0a15\7\177\2\2\u0a15\u0a16\3\2\2\2\u0a16\u0a17") buf.write("\b\u0130\2\2\u0a17\u0260\3\2\2\2\u0a18\u0a19\13\2\2\2") buf.write("\u0a19\u0262\3\2\2\2\33\2\u02d3\u02d5\u02e2\u02e4\u02f2") buf.write("\u04ae\u04e1\u06b3\u071e\u09c1\u09c4\u09c9\u09cf\u09d3") buf.write("\u09da\u09e2\u09e8\u09ed\u09f3\u09f8\u09ff\u0a04\u0a09") buf.write("\u0a11\4\2\3\2\b\2\2") return buf.getvalue() class IEC61131ParserLexer(Lexer): atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] T__0 = 1 T__1 = 2 T__2 = 3 T__3 = 4 T__4 = 5 T__5 = 6 T__6 = 7 T__7 = 8 T__8 = 9 T__9 = 10 T__10 = 11 T__11 = 12 T__12 = 13 T__13 = 14 T__14 = 15 T__15 = 16 T__16 = 17 T__17 = 18 T__18 = 19 T__19 = 20 T__20 = 21 T__21 = 22 T__22 = 23 T__23 = 24 T__24 = 25 IEC_COMMENT = 26 C_COMMENT = 27 LINE_COMMENT = 28 UDINT_TO_USINT = 29 USINT_TO_UDINT = 30 DINT_TO_UDINT = 31 UDINT_TO_DINT = 32 UINT_TO_USINT = 33 USINT_TO_BYTE = 34 BYTE_TO_USINT = 35 USINT_TO_UINT = 36 USINT_TO_DINT = 37 DINT_TO_USINT = 38 BYTE_TO_WORD = 39 BYTE_TO_UINT = 40 WORD_TO_BYTE = 41 WORD_TO_UINT = 42 REAL_TO_UINT = 43 INT_TO_USINT = 44 UINT_TO_BOOL = 45 UINT_TO_WORD = 46 UINT_TO_REAL = 47 DINT_TO_UINT = 48 UINT_TO_DINT = 49 WORD_TO_INT = 50 REAL_TO_INT = 51 INT_TO_BOOL = 52 BOOL_TO_INT = 53 INT_TO_WORD = 54 INT_TO_REAL = 55 INT_TO_UINT = 56 UINT_TO_INT = 57 END_FUNCTION_BLOCK = 58 END_CONFIGURATION = 59 END_TRANSITION = 60 LOC_PARTLY_VAR = 61 FUNCTION_BLOCK = 62 END_INTERFACE = 63 CONFIGURATION = 64 MULTIBIT_PART = 65 DATE_AND_TIME = 66 END_NAMESPACE = 67 VAR_EXTERNAL = 68 END_FUNCTION = 69 END_RESOURCE = 70 INITIAL_STEP = 71 TIME_OF_DAY = 72 END_PROGRAM = 73 END_ACTION = 74 END_METHOD = 75 TRANSITION = 76 VAR_GLOBAL = 77 NON_RETAIN = 78 NAMESPACE = 79 VAR_OUTPUT = 80 VAR_IN_OUT = 81 VAR_ACCESS = 82 END_STRUCT = 83 READ_WRITE = 84 IMPLEMENTS = 85 VAR_CONFIG = 86 END_REPEAT = 87 END_WHILE = 88 READ_ONLY = 89 PROTECTED = 90 VAR_INPUT = 91 END_CLASS = 92 INTERFACE = 93 ABSTRACT = 94 FUNCTION = 95 END_CASE = 96 RESOURCE = 97 INTERNAL = 98 CONTINUE = 99 PRIORITY = 100 BOOL_EXP = 101 END_STEP = 102 CONSTANT = 103 OVERRIDE = 104 VAR_TEMP = 105 END_TYPE = 106 INTERVAL = 107 EXTENDS = 108 PRIVATE = 109 TIME_MS = 110 PROGRAM = 111 END_VAR = 112 WSTRING = 113 OVERLAP = 114 END_FOR = 115 REPLACE = 116 PUBLIC = 117 METHOD = 118 ACTION = 119 RETURN = 120 STRING = 121 STRUCT = 122 RETAIN = 123 TIME_S = 124 R_EDGE = 125 F_EDGE = 126 R_TRIG = 127 F_TRIG = 128 REF_TO = 129 SINGLE = 130 END_IF = 131 REPEAT = 132 INSERT = 133 DELETE = 134 CONCAT = 135 FINAL = 136 SUPER = 137 ARRAY = 138 WCHAR = 139 USING = 140 CLASS = 141 FALSE = 142 DWORD = 143 LWORD = 144 USINT = 145 UDINT = 146 ULINT = 147 LREAL = 148 LTIME = 149 LDATE = 150 CALCN = 151 RETCN = 152 JMPCN = 153 ELSIF = 154 WHILE = 155 UNTIL = 156 RIGHT = 157 LIMIT = 158 TRUNC = 159 ATAN2 = 160 EXIT = 161 CASE = 162 THIS = 163 TASK = 164 REAL = 165 TIME = 166 DATE = 167 LTOD = 168 BYTE = 169 WORD = 170 CALC = 171 TRUE = 172 BOOL = 173 WITH = 174 STEP = 175 CHAR = 176 TYPE = 177 NULL = 178 FROM = 179 UINT = 180 SINT = 181 DINT = 182 LINT = 183 ANDN = 184 XORN = 185 RETC = 186 JMPC = 187 THEN = 188 ELSE = 189 CTUD = 190 SQRT = 191 ASIN = 192 ACOS = 193 ATAN = 194 EXPT = 195 MOVE = 196 LEFT = 197 FIND = 198 FOR = 199 INT = 200 NOT = 201 MUL = 202 ADD = 203 TOD = 204 LDT = 205 VAR = 206 CAL = 207 CLK = 208 STN = 209 LDN = 210 AND = 211 XOR = 212 ORN = 213 SUB = 214 MOD = 215 DIV = 216 RET = 217 REF = 218 JMP = 219 CTU = 220 CTD = 221 TON = 222 TOF = 223 ABS = 224 LOG = 225 EXP = 226 SIN = 227 COS = 228 TAN = 229 SHL = 230 SHR = 231 ROL = 232 ROR = 233 SEL = 234 MAX = 235 MIN = 236 MUX = 237 LEN = 238 MID = 239 TP = 240 SR = 241 RS = 242 BY = 243 DO = 244 SD = 245 DS = 246 SL = 247 DT = 248 AT = 249 CU = 250 PV = 251 PT = 252 IN = 253 OF = 254 LD = 255 TO = 256 ON = 257 ST = 258 CD = 259 OR = 260 GT = 261 GE = 262 EQ = 263 LT = 264 LE = 265 NE = 266 IF = 267 LN = 268 DIRECTVARIABLE = 269 IDENTIFIER = 270 LETTER = 271 DIGITS = 272 BINARY_INT = 273 OCTAL_INT = 274 HEX_INT = 275 WS = 276 PRAGMA = 277 ErrorCharacter = 278 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] modeNames = [ "DEFAULT_MODE" ] literalNames = [ "<INVALID>", "';'", "':'", "':='", "'['", "'..'", "']'", "'('", "')'", "'R'", "'S'", "'.'", "','", "'>='", "'<='", "'='", "'<'", "'>'", "'<>'", "'*'", "'/'", "'+'", "'-'", "'#'", "'_'", "'E'" ] symbolicNames = [ "<INVALID>", "IEC_COMMENT", "C_COMMENT", "LINE_COMMENT", "UDINT_TO_USINT", "USINT_TO_UDINT", "DINT_TO_UDINT", "UDINT_TO_DINT", "UINT_TO_USINT", "USINT_TO_BYTE", "BYTE_TO_USINT", "USINT_TO_UINT", "USINT_TO_DINT", "DINT_TO_USINT", "BYTE_TO_WORD", "BYTE_TO_UINT", "WORD_TO_BYTE", "WORD_TO_UINT", "REAL_TO_UINT", "INT_TO_USINT", "UINT_TO_BOOL", "UINT_TO_WORD", "UINT_TO_REAL", "DINT_TO_UINT", "UINT_TO_DINT", "WORD_TO_INT", "REAL_TO_INT", "INT_TO_BOOL", "BOOL_TO_INT", "INT_TO_WORD", "INT_TO_REAL", "INT_TO_UINT", "UINT_TO_INT", "END_FUNCTION_BLOCK", "END_CONFIGURATION", "END_TRANSITION", "LOC_PARTLY_VAR", "FUNCTION_BLOCK", "END_INTERFACE", "CONFIGURATION", "MULTIBIT_PART", "DATE_AND_TIME", "END_NAMESPACE", "VAR_EXTERNAL", "END_FUNCTION", "END_RESOURCE", "INITIAL_STEP", "TIME_OF_DAY", "END_PROGRAM", "END_ACTION", "END_METHOD", "TRANSITION", "VAR_GLOBAL", "NON_RETAIN", "NAMESPACE", "VAR_OUTPUT", "VAR_IN_OUT", "VAR_ACCESS", "END_STRUCT", "READ_WRITE", "IMPLEMENTS", "VAR_CONFIG", "END_REPEAT", "END_WHILE", "READ_ONLY", "PROTECTED", "VAR_INPUT", "END_CLASS", "INTERFACE", "ABSTRACT", "FUNCTION", "END_CASE", "RESOURCE", "INTERNAL", "CONTINUE", "PRIORITY", "BOOL_EXP", "END_STEP", "CONSTANT", "OVERRIDE", "VAR_TEMP", "END_TYPE", "INTERVAL", "EXTENDS", "PRIVATE", "TIME_MS", "PROGRAM", "END_VAR", "WSTRING", "OVERLAP", "END_FOR", "REPLACE", "PUBLIC", "METHOD", "ACTION", "RETURN", "STRING", "STRUCT", "RETAIN", "TIME_S", "R_EDGE", "F_EDGE", "R_TRIG", "F_TRIG", "REF_TO", "SINGLE", "END_IF", "REPEAT", "INSERT", "DELETE", "CONCAT", "FINAL", "SUPER", "ARRAY", "WCHAR", "USING", "CLASS", "FALSE", "DWORD", "LWORD", "USINT", "UDINT", "ULINT", "LREAL", "LTIME", "LDATE", "CALCN", "RETCN", "JMPCN", "ELSIF", "WHILE", "UNTIL", "RIGHT", "LIMIT", "TRUNC", "ATAN2", "EXIT", "CASE", "THIS", "TASK", "REAL", "TIME", "DATE", "LTOD", "BYTE", "WORD", "CALC", "TRUE", "BOOL", "WITH", "STEP", "CHAR", "TYPE", "NULL", "FROM", "UINT", "SINT", "DINT", "LINT", "ANDN", "XORN", "RETC", "JMPC", "THEN", "ELSE", "CTUD", "SQRT", "ASIN", "ACOS", "ATAN", "EXPT", "MOVE", "LEFT", "FIND", "FOR", "INT", "NOT", "MUL", "ADD", "TOD", "LDT", "VAR", "CAL", "CLK", "STN", "LDN", "AND", "XOR", "ORN", "SUB", "MOD", "DIV", "RET", "REF", "JMP", "CTU", "CTD", "TON", "TOF", "ABS", "LOG", "EXP", "SIN", "COS", "TAN", "SHL", "SHR", "ROL", "ROR", "SEL", "MAX", "MIN", "MUX", "LEN", "MID", "TP", "SR", "RS", "BY", "DO", "SD", "DS", "SL", "DT", "AT", "CU", "PV", "PT", "IN", "OF", "LD", "TO", "ON", "ST", "CD", "OR", "GT", "GE", "EQ", "LT", "LE", "NE", "IF", "LN", "DIRECTVARIABLE", "IDENTIFIER", "LETTER", "DIGITS", "BINARY_INT", "OCTAL_INT", "HEX_INT", "WS", "PRAGMA", "ErrorCharacter" ] ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13", "T__14", "T__15", "T__16", "T__17", "T__18", "T__19", "T__20", "T__21", "T__22", "T__23", "T__24", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "IEC_COMMENT", "C_COMMENT", "LINE_COMMENT", "UDINT_TO_USINT", "USINT_TO_UDINT", "DINT_TO_UDINT", "UDINT_TO_DINT", "UINT_TO_USINT", "USINT_TO_BYTE", "BYTE_TO_USINT", "USINT_TO_UINT", "USINT_TO_DINT", "DINT_TO_USINT", "BYTE_TO_WORD", "BYTE_TO_UINT", "WORD_TO_BYTE", "WORD_TO_UINT", "REAL_TO_UINT", "INT_TO_USINT", "UINT_TO_BOOL", "UINT_TO_WORD", "UINT_TO_REAL", "DINT_TO_UINT", "UINT_TO_DINT", "WORD_TO_INT", "REAL_TO_INT", "INT_TO_BOOL", "BOOL_TO_INT", "INT_TO_WORD", "INT_TO_REAL", "INT_TO_UINT", "UINT_TO_INT", "END_FUNCTION_BLOCK", "END_CONFIGURATION", "END_TRANSITION", "LOC_PARTLY_VAR", "FUNCTION_BLOCK", "END_INTERFACE", "CONFIGURATION", "MULTIBIT_PART", "DATE_AND_TIME", "END_NAMESPACE", "VAR_EXTERNAL", "END_FUNCTION", "END_RESOURCE", "INITIAL_STEP", "TIME_OF_DAY", "END_PROGRAM", "END_ACTION", "END_METHOD", "TRANSITION", "VAR_GLOBAL", "NON_RETAIN", "NAMESPACE", "VAR_OUTPUT", "VAR_IN_OUT", "VAR_ACCESS", "END_STRUCT", "READ_WRITE", "IMPLEMENTS", "VAR_CONFIG", "END_REPEAT", "END_WHILE", "READ_ONLY", "PROTECTED", "VAR_INPUT", "END_CLASS", "INTERFACE", "ABSTRACT", "FUNCTION", "END_CASE", "RESOURCE", "INTERNAL", "CONTINUE", "PRIORITY", "BOOL_EXP", "END_STEP", "CONSTANT", "OVERRIDE", "VAR_TEMP", "END_TYPE", "INTERVAL", "EXTENDS", "PRIVATE", "TIME_MS", "PROGRAM", "END_VAR", "WSTRING", "OVERLAP", "END_FOR", "REPLACE", "PUBLIC", "METHOD", "ACTION", "RETURN", "STRING", "STRUCT", "RETAIN", "TIME_S", "R_EDGE", "F_EDGE", "R_TRIG", "F_TRIG", "REF_TO", "SINGLE", "END_IF", "REPEAT", "INSERT", "DELETE", "CONCAT", "FINAL", "SUPER", "ARRAY", "WCHAR", "USING", "CLASS", "FALSE", "DWORD", "LWORD", "USINT", "UDINT", "ULINT", "LREAL", "LTIME", "LDATE", "CALCN", "RETCN", "JMPCN", "ELSIF", "WHILE", "UNTIL", "RIGHT", "LIMIT", "TRUNC", "ATAN2", "EXIT", "CASE", "THIS", "TASK", "REAL", "TIME", "DATE", "LTOD", "BYTE", "WORD", "CALC", "TRUE", "BOOL", "WITH", "STEP", "CHAR", "TYPE", "NULL", "FROM", "UINT", "SINT", "DINT", "LINT", "ANDN", "XORN", "RETC", "JMPC", "THEN", "ELSE", "CTUD", "SQRT", "ASIN", "ACOS", "ATAN", "EXPT", "MOVE", "LEFT", "FIND", "FOR", "INT", "NOT", "MUL", "ADD", "TOD", "LDT", "VAR", "CAL", "CLK", "STN", "LDN", "AND", "XOR", "ORN", "SUB", "MOD", "DIV", "RET", "REF", "JMP", "CTU", "CTD", "TON", "TOF", "ABS", "LOG", "EXP", "SIN", "COS", "TAN", "SHL", "SHR", "ROL", "ROR", "SEL", "MAX", "MIN", "MUX", "LEN", "MID", "TP", "SR", "RS", "BY", "DO", "SD", "DS", "SL", "DT", "AT", "CU", "PV", "PT", "IN", "OF", "LD", "TO", "ON", "ST", "CD", "OR", "GT", "GE", "EQ", "LT", "LE", "NE", "IF", "LN", "DIRECTVARIABLE", "IDENTIFIER", "LETTER", "DIGITS", "BINARY_INT", "OCTAL_INT", "HEX_INT", "WS", "PRAGMA", "ErrorCharacter" ] grammarFileName = "IEC61131Parser.g4" def __init__(self, input=None, output:TextIO = sys.stdout): super().__init__(input, output) self.checkVersion("4.9.1") self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) self._actions = None self._predicates = None
python
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_quickD3map ---------------------------------- Tests for `quickD3map` module. """ import nose.tools as nt from nose.tools import raises import pandas as pd import numpy as np from itertools import combinations import geojson from quickD3map import PointMap, LineMap from quickD3map.utilities import latitude, longitude, projections from quickD3map.check_data import check_column, check_center, check_for_NA #To add: #Datachecking tests. #MapWriting test # these aren't supergreat but they at least run data through each of the three current classes ## Test That Check DataFrames ####################################################### @raises(ValueError) def test_for_Lat_Lon1(): df = pd.DataFrame( np.random.randn(3,2), columns =["A","B"]) check_column(df, latitude,"Latitude") def test_for_Lat_Lon2(): df = pd.DataFrame( np.random.randn(3,2), columns=["Latitude","Longitude"]) nt.assert_equal( check_column (df, latitude,"Latitude"), "Latitude" ) @raises(ValueError) def test_for_NAs1(): df = pd.DataFrame( np.random.randn(3,2), columns=["Latitude","Longitude"]) df.ix[3,'Latitude'] = np.nan print(df) check_for_NA(df, "Latitude","Longitude") class testcheck_center(): nt.assert_equals((100,0), check_center( (100,0)) ) nt.assert_equals([100,0], check_center( [100,0] ) ) nt.assert_equals( None, check_center([100,0,10] )) ## Tests That Check GeoJsonConversion ####################################################### #def test_PointMap_to_geojson(): # df = pd.DataFrame( {"Latitude": [82.85,87.65,-83.03], "Longitude": [41.68,41.62, -41.12]}) # pm = PointMap(df) # expected_output ="""{"type": "FeatureCollection", "features": [ # {"geometry": {"type": "Point", "coordinates": [82.85, 41.68]}, "type": "Feature", "id": null, "properties": {}}, # {"geometry": {"type": "Point", "coordinates": [87.67, 41.62]}, "type": "Feature", "id": null, "properties": {}}, # {"geometry": {"type": "Point", "coordinates": [-83.03, -41.12]}, "type": "Feature", "id": null, "properties": {}}] } # """ # geojson_out = pm.convert_to_geojson() ## print( geojson.loads(geojson_out) ) ## print("okay") ## print(geojson_out) ## print(geojson.loads(geojson_out)) ## print("okay") ## print(geojson.loads(expected_output)) # nt.assert_equal(geojson.loads(expected_output), geojson.loads(geojson_out)) # ### Fails becoase of differences in the lenght of the numbers. native pyhton has lon number # #but the typed answer has only two digits. SHould I add rounding/decimal to the progrma # # or use a different test ## Test That Check BaseMap Object Funcitonality ####################################################### ## Test That Check Map Object Funcitonality ####################################################### def testPointMap(): df = pd.read_csv('../examples/data/omdf.csv') p = PointMap(df) nt.assert_is_instance(p, PointMap) def testWeather_data(): df = pd.read_csv('../examples/data/weatherstations.csv') mc = PointMap(df, columns = ['LAT','LON','ELEV'] ,scale_exp = 3) nt.assert_is_instance(mc, PointMap) def testPopulation_data(): df = pd.read_csv('../examples/data/city_population.csv') smalldf = df.sort('population_2010', ascending=False)[:15] def return_top(group): return group[:1] smalldf = smalldf.groupby('city').apply(return_top) top_comb = combinations( list(smalldf.city) ,2) comb = [ [c[0],c[1],1 ] for c in top_comb ] distance_df = pd.DataFrame(comb) lm = LineMap( smalldf, "city", distance_df) nt.assert_is_instance(lm, LineMap)
python
# Copyright Contributors to the Packit project. # SPDX-License-Identifier: MIT from enum import Enum from logging import getLogger logger = getLogger(__name__) class TaskName(str, Enum): copr_build_start = "task.run_copr_build_start_handler" copr_build_end = "task.run_copr_build_end_handler" copr_build = "task.run_copr_build_handler" dist_git_pr = "task.run_dist_git_pr_handler"
python
from collections import Mapping from colorama import Fore, Style def log(msg): print("{}{}".format(Style.RESET_ALL, msg)) def log_highlight(msg): print("{}{}".format(Fore.GREEN, msg)) def info(msg): print("{}[INFO] {}".format(Fore.CYAN, msg)) def warn(msg): print("{}[WARN] {}".format(Fore.YELLOW, msg)) def error(msg): print("{}[ERROR] {}".format(Fore.RED, msg)) def deep_merge(dct, merge_dct): for k, v in merge_dct.items(): if (k in dct and isinstance(dct[k], dict) and isinstance(merge_dct[k], Mapping)): deep_merge(dct[k], merge_dct[k]) else: dct[k] = merge_dct[k]
python
# -*- coding: utf-8 -*- from openelevationservice import SETTINGS from openelevationservice.server.api import api_exceptions from openelevationservice.server.utils import logger, convert, codec from openelevationservice.server.api import querybuilder, validator from openelevationservice.server.api.response import ResponseBuilder from openelevationservice.server.api.api_exceptions import InvalidUsage from shapely import wkt from shapely.errors import WKTReadingError import json from flask import Blueprint, request, jsonify log = logger.get_logger(__name__) main_blueprint = Blueprint('main', __name__, ) @main_blueprint.route('/elevation/line', methods=['POST']) def elevationline(): """ Function called when user posts to /elevation/line. :raises InvalidUsage: internal HTTP 500 error with more detailed description. :returns: elevation response :rtype: Response """ # Cerberus validates and returns a processed arg dict req_args = validator.validate_request(request) # Incoming parameters geometry_str = req_args['geometry'] format_in = req_args['format_in'] format_out = req_args['format_out'] dataset = req_args['dataset'] # Get the geometry if format_in == 'geojson': geom = convert.geojson_to_geometry(geometry_str) elif format_in in ['encodedpolyline', 'encodedpolyline5']: geom = codec.decode(geometry_str, precision=5, is3d=False) elif format_in == 'encodedpolyline6': geom = codec.decode(geometry_str, precision=6, is3d=False) elif format_in == 'polyline': geom = convert.polyline_to_geometry(geometry_str) else: raise api_exceptions.InvalidUsage(400, 4000, f'Invalid format_in value "{format_in}"') if len(list(geom.coords)) > SETTINGS['maximum_nodes']: raise api_exceptions.InvalidUsage(status_code=400, error_code=4003, message='Maximum number of nodes exceeded.') results = ResponseBuilder().__dict__ geom_queried = querybuilder.line_elevation(geom, format_out, dataset) # decision tree for format_out if format_out != 'geojson': try: geom_out = wkt.loads(geom_queried) except WKTReadingError: raise InvalidUsage(404, 4002, f"Only 1 point has elevation in {dataset}, not possible to generate a LineString.") coords = geom_out.coords if format_out in ['encodedpolyline', 'encodedpolyline5']: results['geometry'] = codec.encode(coords, precision=5, is3d=True) elif format_out == 'encodedpolyline6': results['geometry'] = codec.encode(coords, precision=6, is3d=True) else: results['geometry'] = list(coords) elif format_out == 'geojson': results['geometry'] = json.loads(geom_queried) coords = results['geometry']['coordinates'] else: raise api_exceptions.InvalidUsage(400, 4000, f'Invalid format_out value "{format_out}"') if len(coords) != len(geom.coords): raise InvalidUsage(404, 4002, f"{len(geom.coords) - len(coords)} points have no elevation in {dataset}") return jsonify(results) @main_blueprint.route('/elevation/point', methods=['POST', 'GET']) def elevationpoint(): """ Function called when user posts to/gets /elevation/point. :raises InvalidUsage: internal HTTP 500 error with more detailed description. :returns: elevation response :rtype: Response class """ req_args = validator.validate_request(request) log.debug(req_args) if request.method == 'POST': # Check incoming parameters req_geometry = req_args['geometry'] format_in = req_args['format_in'] format_out = req_args['format_out'] dataset = req_args['dataset'] # Get the geometry if format_in == 'geojson': geom = convert.geojson_to_geometry(req_geometry) elif format_in == 'point': geom = convert.point_to_geometry(req_geometry) else: raise api_exceptions.InvalidUsage( 400, 4000, f"Invalid format_in value {format_in}" ) else: req_geometry = req_args['geometry'] format_out = req_args['format_out'] dataset = req_args['dataset'] try: # Catch errors when parsing the input string point_coords = [float(x) for x in req_geometry.split(',')] except: raise api_exceptions.InvalidUsage(500, 4000, '{} is not a comma separated list of long, lat'.format(req_geometry)) geom = convert.point_to_geometry(point_coords) # Build response with attribution etc. results = ResponseBuilder().__dict__ geom_queried = querybuilder.point_elevation(geom, format_out, dataset) if format_out == 'point': geom_out = wkt.loads(geom_queried) results['geometry'] = list(geom_out.coords[0]) elif format_out == 'geojson': results['geometry'] = json.loads(geom_queried) else: raise api_exceptions.InvalidUsage(400, 4000, f'Invalid format_out value "{format_out}"') return jsonify(results)
python
from bs4 import BeautifulSoup import requests # def parse_a_website(url) -> BeautifulSoup: response = requests.get(url) data = response.text soup = BeautifulSoup(data, 'html.parser') return soup
python
from sqlalchemy.sql.functions import func from model.db import db import json from controller.logicTopoBasin import LogicTopoBasin from controller.logicTopoLivingArea import LogicTopoLivingArea from controller.logicTopoAgricultureArea import LogicTopoAgricultureArea from controller.logicTopoWaterwork import LogicTopoWaterwork from controller.logicTopoWaterin import LogicTopoWaterin from controller.logicTopoFlowPath import LogicTopoFlowPath from controller.logicTopoCatchment import LogicTopoCatchment from controller.logicTopoPollution import LogicTopoPollution from controller.logicTopoIndustryArea import LogicTopoIndustryArea from controller.logicTopoFactory import LogicTopoFactory from controller.logicTopoSewageTreatmentPlant import LogicTopoSewageTreatmentPlant from controller.logicTopoReservoir import LogicTopoReservoir from controller.logicTopoDebris import LogicTopoDebris from controller.logicTopoRainStation import LogicTopoRainStation from controller.logicTopoFloodStation import LogicTopoFloodStation from controller.logicTopoWaterLevelStation import LogicTopoWaterLevelStation from controller.util import GetSInfoPoint class LogicTopoController(): def ListKind(self): sql = "select * from s_topology_kind" rows = db.engine.execute(sql) result = [dict(r) for r in rows] return result def ListTransfer(self,kind=None): sql = "select * from s_topology_transfer" if kind is not None: sql += " where from_類別='%s'" % kind rows = db.engine.execute(sql) result = [dict(r) for r in rows] return result def FindNodeByKind(self,param): if not "kind" in param: return {"error":"no kind parameter"} kind = param["kind"] if kind == "流域": return LogicTopoBasin().FindBasinByID(param) elif kind == "地點": return LogicTopoPlace().FindVillageByLatLng(param) elif kind == "淨水場": return LogicTopoWaterwork().FindWaterworkByID(param) else: return {"error":"not implemented"} def FindNodeByTransfer(self,param): if not "kind" in param: return {"error":"no kind parameter"} if not "transfer" in param: return {"error":"no transfer parameter"} kind = param["kind"] transfer = param["transfer"] if kind == "流域": ltb = LogicTopoBasin() if transfer == "流域範圍": return ltb.FindBasinByID(param) elif transfer in ["主要河道","源頭到海洋路徑"]: return ltb.FindMainRiverByID(param) elif transfer == "所有河道": return ltb.FindStreams(param) elif transfer == "流域分區": return ltb.FindSubBasins(param) elif transfer == "生活區域": return ltb.FindLivingArea(param) elif transfer == "農業區域": return ltb.FindAgricultureArea(param) elif transfer == "工業區域": return ltb.FindIndustryArea(param) elif transfer == "水庫堰壩": return ltb.FindReservoir(param) elif transfer == "水質水量保護區": return ltb.FindProtectArea(param) elif transfer == "淹水潛勢圖": return ltb.FindFloodArea(param) elif transfer == "土石流潛勢溪流": return ltb.FindDebris(param) elif transfer in ["雨水下水道","污水下水道","圳路"]: return {"error":"無開放資料"} elif kind == "流路": ltfp = LogicTopoFlowPath() if transfer == "上游集水區": return ltfp.FindUpstreamCatchment(param) elif transfer == "下游入海線": return ltfp.FindDownstreamPath(param) elif transfer == "所屬流域": return ltfp.FindBasin(param) elif transfer == "鳥覽流路": return ltfp.BirdView(param) elif kind == "生活區域": ltla = LogicTopoLivingArea() if transfer == "淨水廠為何": return ltla.FindVillageWaterwork(param) elif transfer == "水源在哪": return ltla.FindVillageWaterin(param) elif transfer == "有哪些污染源": return ltla.FindVillagePollution(param) elif transfer == "用水統計(三級經濟區)": return ltla.FindWaterUse(param) elif kind == "農業區域": ltaa = LogicTopoAgricultureArea() if transfer == "有哪些污染源": return ltaa.FindAgriculturePollution(param) elif transfer == "有哪些農作物": return ltaa.FindCrop(param) elif kind == "淨水場": ltww = LogicTopoWaterwork() if transfer == "取水口為何": return ltww.FindWaterinByID(param) elif transfer == "淨水場水質": return ltww.FindWaterworkQuality(param) elif transfer == "淨水場供水量": return ltww.FindWaterworkQuantity(param) elif transfer == "供給哪些區域": return ltww.FindSupplyLivingArea(param) elif kind == "取水口": ltwi = LogicTopoWaterin() if transfer == "集水區為何": return ltwi.FindCatchmentByID(param) elif transfer == "取水量": return ltwi.FindWaterinQuantity(param) elif transfer == "生活供給範圍": return ltwi.FindSupplyLivingArea(param) elif kind == "集水區": ltc = LogicTopoCatchment() if transfer == "有哪些污染源": return ltc.FindCatchmentPollution(param) elif transfer == "雨量站": return ltc.FindRainStation(param) elif transfer == "河川水位站": return ltc.FindWaterLevelStation(param) elif transfer == "淹水感測站": return ltc.FindFloodStation(param) elif kind == "鄰近污染源": ltp = LogicTopoPollution() if transfer == "工廠": return ltp.FindFactory(param) elif transfer == "環境保護列管對象": return ltp.FindEPAFactoryBase(param) elif transfer == "工業區域": return ltp.FindIndustryArea(param) elif transfer == "工業污水處理廠": return ltp.FindSewageTreatmentPlant(param) elif transfer == "農地工廠": return ltp.FindFactoryInFarm(param) elif transfer == "水污染源放流口": return ltp.FindWaterpRecord(param) elif kind == "工業區域": ltia = LogicTopoIndustryArea() if transfer == "哪個污水廠": return ltia.FindSewageTreatmentPlant(param) elif transfer == "有哪些工廠": return ltia.FindFactory(param) elif kind == "工廠": ltf = LogicTopoFactory() if transfer == "哪個污水廠": return ltf.FindSewageTreatmentPlant(param) elif transfer == "屬於哪個工業區": return ltf.FindIndustryArea(param) elif kind == "工業污水處理廠": ltstp = LogicTopoSewageTreatmentPlant() if transfer == "處理範圍": return ltstp.FindProcessingArea(param) elif kind == "水庫": ltr = LogicTopoReservoir() if transfer == "蓄水範圍": return ltr.FindStorageArea(param) elif transfer == "集水區為何": return ltr.FindCatchment(param) elif transfer == "水質水量保護區": return ltr.FindProtectArea(param) elif kind == "土石流": ltd = LogicTopoDebris() if transfer == "集水區為何": return ltd.FindCatchment(param) elif transfer == "影響範圍": return ltd.FindInfluence(param) elif transfer == "歷史影像": return ltd.FindHistoryPhoto(param) elif transfer == "流路": return ltd.FindFlowPath(param) elif kind == "雨量站": ltrs = LogicTopoRainStation() if transfer == "雨量資料": return ltrs.FindRainData(param) elif transfer == "鄰近河川水位站": return ltrs.FindWaterLevelStation(param) elif transfer == "鄰近淹水感測站": return ltrs.FindFloodStation(param) elif transfer == "淹水潛勢圖": return ltrs.FindFloodArea(param) elif kind == "河川水位站": ltwls = LogicTopoWaterLevelStation() if transfer == "水位資料": return ltwls.FindWaterLevelData(param) elif transfer == "鄰近雨量站": return ltwls.FindRainStation(param) elif transfer == "鄰近淹水感測站": return ltwls.FindFloodStation(param) elif transfer == "淹水潛勢圖": return ltwls.FindFloodArea(param) elif kind == "淹水感測站": ltfs = LogicTopoFloodStation() if transfer == "淹水資料": return ltfs.FindFloodData(param) elif transfer == "鄰近雨量站": return ltfs.FindRainStation(param) elif transfer == "鄰近河川水位站": return ltfs.FindWaterLevelStation(param) elif transfer == "淹水潛勢圖": return ltfs.FindFloodArea(param) return {"error":"not implemented"} def GetNodeInfo(self,param): if not "kind" in param: return {"error":"no kind parameter"} kind = param["kind"] nodeName = None if "nodeName" in param: nodeName = param["nodeName"] if nodeName is None: return {"error":"no nodeName parameter"} info = GetSInfoPoint(param["kind"],nodeName) if info is None: return {"error":" 查無基本資料"} else: return info
python
# coding: utf-8 # In[3]: import cv2 import numpy as np import sys sys.path.append('../') from Mod.utils import * from tqdm import tqdm # In[4]: def py_nms(dets,thresh): '''剔除太相似的box''' x1 = dets[:, 0] y1 = dets[:, 1] x2 = dets[:, 2] y2 = dets[:, 3] scores = dets[:, 4] areas = (x2 - x1 + 1) * (y2 - y1 + 1) #将概率值从大到小排列 order = scores.argsort()[::-1] keep = [] while order.size > 0: i = order[0] keep.append(i) xx1 = np.maximum(x1[i], x1[order[1:]]) yy1 = np.maximum(y1[i], y1[order[1:]]) xx2 = np.minimum(x2[i], x2[order[1:]]) yy2 = np.minimum(y2[i], y2[order[1:]]) w = np.maximum(0.0, xx2 - xx1 + 1) h = np.maximum(0.0, yy2 - yy1 + 1) inter = w * h ovr = inter / (areas[i] + areas[order[1:]] - inter+1e-10) #保留小于阈值的下标,因为order[0]拿出来做比较了,所以inds+1是原来对应的下标 inds = np.where(ovr <= thresh)[0] order = order[inds + 1] return keep # In[ ]: class MtcnnDetector: '''来生成人脸的图像''' def __init__(self,detectors, min_face_size=20, stride=2, threshold=[0.6,0.7,0.7], scale_factor=0.79#图像金字塔的缩小率 ): self.pnet_detector=detectors[0] self.rnet_detector=detectors[1] self.onet_detector=detectors[2] self.min_face_size=min_face_size self.stride=stride self.thresh=threshold self.scale_factor=scale_factor def detect_face(self,test_data): all_boxes=[] landmarks=[] batch_idx=0 num_of_img=test_data.size empty_array=np.array([]) for databatch in tqdm(test_data): batch_idx+=1 im=databatch if self.pnet_detector: boxes,boxes_c,landmark=self.detect_pnet(im) if boxes_c is None: all_boxes.append(empty_array) landmarks.append(empty_array) continue if self.rnet_detector: boxes, boxes_c, landmark = self.detect_rnet(im, boxes_c) if boxes_c is None: all_boxes.append(empty_array) landmarks.append(empty_array) continue if self.onet_detector: boxes, boxes_c, landmark = self.detect_onet(im, boxes_c) if boxes_c is None: all_boxes.append(empty_array) landmarks.append(empty_array) continue all_boxes.append(boxes_c) landmark = [1] landmarks.append(landmark) return all_boxes, landmarks def detect_pnet(self,im): '''通过pnet筛选box和landmark 参数: im:输入图像[h,2,3] ''' h,w,c=im.shape net_size=12 #人脸和输入图像的比率 current_scale=float(net_size)/self.min_face_size im_resized=self.processed_image(im,current_scale) current_height,current_width,_=im_resized.shape all_boxes=list() #图像金字塔 while min(current_height,current_width)>net_size: #类别和box cls_cls_map,reg=self.pnet_detector.predict(im_resized) boxes=self.generate_bbox(cls_cls_map[:,:,1],reg,current_scale,self.thresh[0]) current_scale*=self.scale_factor#继续缩小图像做金字塔 im_resized=self.processed_image(im,current_scale) current_height,current_width,_=im_resized.shape if boxes.size==0: continue #非极大值抑制留下重复低的box keep=py_nms(boxes[:,:5],0.5) boxes=boxes[keep] all_boxes.append(boxes) if len(all_boxes)==0: return None,None,None all_boxes=np.vstack(all_boxes) #将金字塔之后的box也进行非极大值抑制 keep = py_nms(all_boxes[:, 0:5], 0.7) all_boxes = all_boxes[keep] boxes = all_boxes[:, :5] #box的长宽 bbw = all_boxes[:, 2] - all_boxes[:, 0] + 1 bbh = all_boxes[:, 3] - all_boxes[:, 1] + 1 #对应原图的box坐标和分数 boxes_c = np.vstack([all_boxes[:, 0] + all_boxes[:, 5] * bbw, all_boxes[:, 1] + all_boxes[:, 6] * bbh, all_boxes[:, 2] + all_boxes[:, 7] * bbw, all_boxes[:, 3] + all_boxes[:, 8] * bbh, all_boxes[:, 4]]) boxes_c = boxes_c.T return boxes, boxes_c, None def detect_rnet(self,im,dets): '''通过rent选择box 参数: im:输入图像 dets:pnet选择的box,是相对原图的绝对坐标 返回值: box绝对坐标 ''' h,w,c=im.shape #将pnet的box变成包含它的正方形,可以避免信息损失 dets=convert_to_square(dets) dets[:,0:4]=np.round(dets[:,0:4]) #调整超出图像的box [dy,edy,dx,edx,y,ey,x,ex,tmpw,tmph]=self.pad(dets,w,h) delete_size=np.ones_like(tmpw)*20 ones=np.ones_like(tmpw) zeros=np.zeros_like(tmpw) num_boxes=np.sum(np.where((np.minimum(tmpw,tmph)>=delete_size),ones,zeros)) cropped_ims=np.zeros((num_boxes,24,24,3),dtype=np.float32) for i in range(num_boxes): #将pnet生成的box相对与原图进行裁剪,超出部分用0补 if tmph[i]<20 or tmpw[i]<20: continue tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.uint8) tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = im[y[i]:ey[i] + 1, x[i]:ex[i] + 1, :] cropped_ims[i, :, :, :] = (cv2.resize(tmp, (24, 24)) - 127.5) / 128 cls_scores, reg, _ = self.rnet_detector.predict(cropped_ims) cls_scores = cls_scores[:, 1] keep_inds = np.where(cls_scores > self.thresh[1])[0] if len(keep_inds) > 0: boxes = dets[keep_inds] boxes[:, 4] = cls_scores[keep_inds] reg = reg[keep_inds] else: return None, None, None keep = py_nms(boxes, 0.6) boxes = boxes[keep] #对pnet截取的图像的坐标进行校准,生成rnet的人脸框对于原图的绝对坐标 boxes_c = self.calibrate_box(boxes, reg[keep]) return boxes, boxes_c, None def detect_onet(self,im,dets): '''将onet的选框继续筛选基本和rnet差不多但多返回了landmark''' h,w,c=im.shape dets=convert_to_square(dets) dets[:, 0:4] = np.round(dets[:, 0:4]) [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph] = self.pad(dets, w, h) num_boxes = dets.shape[0] cropped_ims = np.zeros((num_boxes, 48, 48, 3), dtype=np.float32) for i in range(num_boxes): tmp = np.zeros((tmph[i], tmpw[i], 3), dtype=np.uint8) tmp[dy[i]:edy[i] + 1, dx[i]:edx[i] + 1, :] = im[y[i]:ey[i] + 1, x[i]:ex[i] + 1, :] cropped_ims[i, :, :, :] = (cv2.resize(tmp, (48, 48)) - 127.5) / 128 cls_scores, reg, landmark = self.onet_detector.predict(cropped_ims) cls_scores = cls_scores[:, 1] keep_inds = np.where(cls_scores > self.thresh[2])[0] if len(keep_inds) > 0: boxes = dets[keep_inds] boxes[:, 4] = cls_scores[keep_inds] reg = reg[keep_inds] landmark = landmark[keep_inds] else: return None, None, None w = boxes[:, 2] - boxes[:, 0] + 1 h = boxes[:, 3] - boxes[:, 1] + 1 landmark[:, 0::2] = (np.tile(w, (5, 1)) * landmark[:, 0::2].T + np.tile(boxes[:, 0], (5, 1)) - 1).T landmark[:, 1::2] = (np.tile(h, (5, 1)) * landmark[:, 1::2].T + np.tile(boxes[:, 1], (5, 1)) - 1).T boxes_c = self.calibrate_box(boxes, reg) boxes = boxes[py_nms(boxes, 0.6)] keep = py_nms(boxes_c, 0.6) boxes_c = boxes_c[keep] landmark = landmark[keep] return boxes, boxes_c, landmark def processed_image(self, img, scale): '''预处理数据,转化图像尺度并对像素归一到[-1,1] ''' height, width, channels = img.shape new_height = int(height * scale) new_width = int(width * scale) new_dim = (new_width, new_height) img_resized = cv2.resize(img, new_dim, interpolation=cv2.INTER_LINEAR) img_resized = (img_resized - 127.5) / 128 return img_resized def generate_bbox(self, cls_map, reg, scale, threshold): """ 得到对应原图的box坐标,分类分数,box偏移量 """ #pnet大致将图像size缩小2倍 stride = 2 cellsize = 12 #将置信度高的留下 t_index = np.where(cls_map > threshold) # 没有人脸 if t_index[0].size == 0: return np.array([]) # 偏移量 dx1, dy1, dx2, dy2 = [reg[t_index[0], t_index[1], i] for i in range(4)] reg = np.array([dx1, dy1, dx2, dy2]) score = cls_map[t_index[0], t_index[1]] #对应原图的box坐标,分类分数,box偏移量 boundingbox = np.vstack([np.round((stride * t_index[1]) / scale), np.round((stride * t_index[0]) / scale), np.round((stride * t_index[1] + cellsize) / scale), np.round((stride * t_index[0] + cellsize) / scale), score, reg]) #shape[n,9] return boundingbox.T def pad(self, bboxes, w, h): '''将超出图像的box进行处理 参数: bboxes:人脸框 w,h:图像长宽 返回值: dy, dx : 为调整后的box的左上角坐标相对于原box左上角的坐标 edy, edx : n为调整后的box右下角相对原box左上角的相对坐标 y, x : 调整后的box在原图上左上角的坐标 ex, ex : 调整后的box在原图上右下角的坐标 tmph, tmpw: 原始box的长宽 ''' #box的长宽 tmpw, tmph = bboxes[:, 2] - bboxes[:, 0] + 1, bboxes[:, 3] - bboxes[:, 1] + 1 num_box = bboxes.shape[0] dx, dy = np.zeros((num_box,)), np.zeros((num_box,)) edx, edy = tmpw.copy() - 1, tmph.copy() - 1 #box左上右下的坐标 x, y, ex, ey = bboxes[:, 0], bboxes[:, 1], bboxes[:, 2], bboxes[:, 3] #找到超出右下边界的box并将ex,ey归为图像的w,h #edx,edy为调整后的box右下角相对原box左上角的相对坐标 tmp_index = np.where(ex > w - 1) edx[tmp_index] = tmpw[tmp_index] + w - 2 - ex[tmp_index] ex[tmp_index] = w - 1 tmp_index = np.where(ey > h - 1) edy[tmp_index] = tmph[tmp_index] + h - 2 - ey[tmp_index] ey[tmp_index] = h - 1 #找到超出左上角的box并将x,y归为0 #dx,dy为调整后的box的左上角坐标相对于原box左上角的坐标 tmp_index = np.where(x < 0) dx[tmp_index] = 0 - x[tmp_index] x[tmp_index] = 0 tmp_index = np.where(y < 0) dy[tmp_index] = 0 - y[tmp_index] y[tmp_index] = 0 return_list = [dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph] return_list = [item.astype(np.int32) for item in return_list] return return_list def calibrate_box(self, bbox, reg): '''校准box 参数: bbox:pnet生成的box reg:rnet生成的box偏移值 返回值: 调整后的box是针对原图的绝对坐标 ''' bbox_c = bbox.copy() w = bbox[:, 2] - bbox[:, 0] + 1 w = np.expand_dims(w, 1) h = bbox[:, 3] - bbox[:, 1] + 1 h = np.expand_dims(h, 1) reg_m = np.hstack([w, h, w, h]) aug = reg_m * reg bbox_c[:, 0:4] = bbox_c[:, 0:4] + aug return bbox_c def detect(self, img): '''用于测试当个图像的''' boxes = None # pnet if self.pnet_detector: boxes, boxes_c, _ = self.detect_pnet(img) if boxes_c is None: return np.array([]), np.array([]) # rnet if self.rnet_detector: boxes, boxes_c, _ = self.detect_rnet(img, boxes_c) if boxes_c is None: return np.array([]), np.array([]) # onet if self.onet_detector: boxes, boxes_c, landmark = self.detect_onet(img, boxes_c) if boxes_c is None: return np.array([]), np.array([]) return boxes_c, landmark
python
from flask import render_template from flask_login import login_required from .blueprint import web @web.route("/") def index(): return render_template("index.html") @web.route("/customer") @login_required def customer(): return render_template("customer.html")
python
from peewee import * from cdm_souffleur.model.baseModel import BaseModel class mapped_concept(BaseModel): id = AutoField() name = CharField() codes_and_mapped_concepts = TextField() username = CharField() created_on = DateTimeField()
python
#coding=utf-8 ''' Created on 2016年3月3日 ''' import zmq from exception import UnimplementedException, Zmqf404 import logging import json __author__ = 'chenjian' class ZmqfPattern(object): ''' ''' MPBS = 'MPBS'# Multi Publisher -- Broker -- Multi Subscriber class ZmqfApplication(object): ''' classdocs ''' def __init__(self, *args, **kwargs): ''' Constructor ''' self.handlers = dict() for uri, hdr in kwargs['handlers']: uri = '/%s/'% uri.strip('/') if uri == '//': uri = '/' self.handlers[uri] = hdr class ZmqfServer(object): ''' ''' def __init__(self, application, addr, pattern=ZmqfPattern.MPBS): self.application = application self.addr = addr def start(self): ''' ''' context = zmq.Context() subscriber = context.socket(zmq.SUB) # @UndefinedVariable subscriber.connect(self.addr) subscriber.setsockopt(zmq.SUBSCRIBE, b"") # @UndefinedVariable while True: try: [uri, headers, body] = subscriber.recv_multipart() uri = '/%s/'% uri.strip('/') if uri == '//': uri = '/' handler_cls = self.application.handlers[uri] if not handler_cls: raise Zmqf404() # request对象 # TODO: 修改类名 request = ZmqfRequest(uri=uri, headers=headers, body=body) # 实例化handler handler = handler_cls(self.application, request) # handle handler.handle() except Exception, e: logging.exception(e) class ZmqfRequest(object): ''' ''' def __init__(self, **kwargs): ''' ''' self.uri = kwargs['uri'] self.headers = json.loads(kwargs['headers']) self.body = kwargs['body'] class ZmqfHandler(object): ''' ''' def __init__(self, application, request, **kwargs): ''' ''' try: super(ZmqfHandler, self).__init__(application, request) except: try: super(ZmqfHandler, self).__init__() except: pass self.application = application self.request = request def handle(self): ''' ''' raise UnimplementedException()
python
from django import forms from . import models from ..base.forms import SentryProjectInput from ..checklists.forms import TagInput from ..repos.forms import RepoInput class ServiceForm(forms.ModelForm): class Meta: model = models.Service fields = [ "owner", "name", "impact", "status", "slack_channel", "sentry_project", "sonarqube_project", "repository", "pagerduty_url", "dashboard_url", "docs_url", "service_url", "tags", ] labels = { "pagerduty_url": "PagerDuty URL", "dashboard_url": "Dashboard URL", "docs_url": "Documentation URL", "service_url": "Service URL", "sonarqube_project": "Sonarqube project Key", } widgets = { "repository": RepoInput(), "sentry_project": SentryProjectInput(), "tags": TagInput(), }
python
from functools import partial from itertools import chain from typing import (Optional, Sequence) from clipping.planar import (complete_intersect_multisegment_with_polygon, complete_intersect_polygons, complete_intersect_regions, complete_intersect_segment_with_polygon, subtract_multipolygon_from_polygon, subtract_polygon_from_multisegment, subtract_polygon_from_segment, subtract_polygons, symmetric_subtract_polygon_from_multisegment, symmetric_subtract_polygon_from_segment, symmetric_subtract_polygons, unite_multisegment_with_polygon, unite_polygons, unite_segment_with_polygon) from ground.hints import Scalar from locus import segmental from orient.planar import (multisegment_in_polygon, point_in_polygon, polygon_in_polygon, region_in_multiregion, segment_in_polygon) from reprit.base import generate_repr from sect.decomposition import Graph from sect.triangulation import Triangulation from .angle import Angle from .compound import (Compound, Indexable, Linear, Location, Relation, Shaped) from .contour import Contour from .geometry import (Coordinate, Geometry) from .iterable import (flatten, non_negative_min) from .multipoint import Multipoint from .packing import pack_mix from .point import Point from .segment import Segment from .utils import (to_point_nearest_segment, to_segment_nearest_segment) Triangulation = Triangulation class Polygon(Indexable[Coordinate], Shaped[Coordinate]): __slots__ = ('_border', '_holes', '_holes_set', '_locate', '_point_nearest_edge', '_segment_nearest_edge') def __init__(self, border: Contour[Coordinate], holes: Optional[Sequence[Contour[Coordinate]]] = None ) -> None: """ Initializes polygon. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) """ if holes is None: holes = [] self._border, self._holes, self._holes_set = (border, holes, frozenset(holes)) context = self._context self._locate = partial(point_in_polygon, polygon=self, context=context) edges = self.edges self._point_nearest_edge, self._segment_nearest_edge = ( partial(to_point_nearest_segment, context, edges), partial(to_segment_nearest_segment, context, edges)) __repr__ = generate_repr(__init__) def __and__(self, other: Compound) -> Compound: """ Returns intersection of the polygon with the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon & polygon == polygon True """ return (complete_intersect_segment_with_polygon(other, self, context=self._context) if isinstance(other, Segment) else (complete_intersect_multisegment_with_polygon( other, self, context=self._context) if isinstance(other, Linear) else ((complete_intersect_polygons(self, other, context=self._context) if self.holes or other.holes else complete_intersect_regions(self.border, other.border, context=self._context)) if isinstance(other, Polygon) else NotImplemented))) __rand__ = __and__ def __contains__(self, point: Point) -> bool: """ Checks if the polygon contains the point. Time complexity: ``O(log vertices_count)`` expected after indexing, ``O(vertices_count)`` worst after indexing or without it Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> Point(0, 0) in polygon True >>> Point(1, 1) in polygon True >>> Point(2, 2) in polygon True >>> Point(3, 3) in polygon False >>> Point(4, 3) in polygon True >>> Point(5, 2) in polygon True >>> Point(6, 1) in polygon True >>> Point(7, 0) in polygon False """ return bool(self.locate(point)) def __eq__(self, other: 'Polygon') -> bool: """ Checks if polygons are equal. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon == polygon True """ return self is other or (self.border == other.border and self._holes_set == other._holes_set if isinstance(other, Polygon) else NotImplemented) def __ge__(self, other: Compound) -> bool: """ Checks if the polygon is a superset of the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon >= polygon True """ return (other is self._context.empty or self == other or (self.relate(other) in (Relation.EQUAL, Relation.COMPONENT, Relation.ENCLOSED, Relation.WITHIN) if isinstance(other, Compound) else NotImplemented)) def __gt__(self, other: Compound) -> bool: """ Checks if the polygon is a strict superset of the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon > polygon False """ return (other is self._context.empty or self != other and (self.relate(other) in (Relation.COMPONENT, Relation.ENCLOSED, Relation.WITHIN) if isinstance(other, Compound) else NotImplemented)) def __hash__(self) -> int: """ Returns hash value of the polygon. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> hash(polygon) == hash(polygon) True """ return hash((self.border, self._holes_set)) def __le__(self, other: Compound) -> bool: """ Checks if the polygon is a subset of the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon <= polygon True """ return (self == other or not isinstance(other, (Multipoint, Linear)) and (self.relate(other) in (Relation.COVER, Relation.ENCLOSES, Relation.COMPOSITE, Relation.EQUAL) if isinstance(other, Shaped) else NotImplemented)) def __lt__(self, other: Compound) -> bool: """ Checks if the polygon is a strict subset of the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon < polygon False """ return (self != other and not isinstance(other, (Multipoint, Linear)) and (self.relate(other) in (Relation.COVER, Relation.ENCLOSES, Relation.COMPOSITE) if isinstance(other, Shaped) else NotImplemented)) def __or__(self, other: Compound) -> Compound: """ Returns union of the polygon with the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Multipolygon >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon | polygon == polygon True """ return (self._unite_with_multipoint(other) if isinstance(other, Multipoint) else (unite_segment_with_polygon(other, self, context=self._context) if isinstance(other, Segment) else (unite_multisegment_with_polygon(other, self, context=self._context) if isinstance(other, Linear) else (unite_polygons(self, other, context=self._context) if isinstance(other, Polygon) else NotImplemented)))) __ror__ = __or__ def __rsub__(self, other: Compound) -> Compound: """ Returns difference of the other geometry with the polygon. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) """ return (subtract_polygon_from_segment(other, self, context=self._context) if isinstance(other, Segment) else (subtract_polygon_from_multisegment(other, self, context=self._context) if isinstance(other, Linear) else NotImplemented)) def __sub__(self, other: Compound) -> Compound: """ Returns difference of the polygon with the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import EMPTY, Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon - polygon is EMPTY True """ return (self if isinstance(other, (Linear, Multipoint)) else (subtract_polygons(self, other, context=self._context) if isinstance(other, Polygon) else NotImplemented)) def __xor__(self, other: Compound) -> Compound: """ Returns symmetric difference of the polygon with the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import EMPTY, Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon ^ polygon is EMPTY True """ return (self._unite_with_multipoint(other) if isinstance(other, Multipoint) else (symmetric_subtract_polygon_from_segment(other, self, context=self._context) if isinstance(other, Segment) else (symmetric_subtract_polygon_from_multisegment( other, self, context=self._context) if isinstance(other, Linear) else (symmetric_subtract_polygons(self, other, context=self._context) if isinstance(other, Polygon) else NotImplemented)))) __rxor__ = __xor__ @property def area(self) -> Coordinate: """ Returns area of the polygon. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.area == 32 True """ region_signed_measure = self._context.region_signed_area return (abs(region_signed_measure(self.border)) - sum(abs(region_signed_measure(hole)) for hole in self.holes)) @property def border(self) -> Contour: """ Returns border of the polygon. Time complexity: ``O(1)`` Memory complexity: ``O(1)`` >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.border == Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]) True """ return self._border @property def centroid(self) -> Point: """ Returns centroid of the polygon. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.centroid == Point(3, 3) True """ return self._context.polygon_centroid(self) @property def convex_hull(self) -> 'Polygon': """ Returns convex hull of the polygon. Time complexity: ``O(border_vertices_count)`` if convex already, ``O(border_vertices_count * log border_vertices_count)`` -- otherwise Memory complexity: ``O(1)`` if convex already, ``O(border_vertices_count)`` -- otherwise where ``border_vertices_count = len(self.border.vertices)``. >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.convex_hull == Polygon(polygon.border, []) True """ context = self._context return (self if self.is_convex else context.polygon_cls( context.contour_cls(context.points_convex_hull( self.border.vertices)), [])) @property def edges(self) -> Sequence[Segment]: """ Returns edges of the polygon. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon, Segment >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.edges == [Segment(Point(0, 6), Point(0, 0)), ... Segment(Point(0, 0), Point(6, 0)), ... Segment(Point(6, 0), Point(6, 6)), ... Segment(Point(6, 6), Point(0, 6)), ... Segment(Point(4, 2), Point(2, 2)), ... Segment(Point(2, 2), Point(2, 4)), ... Segment(Point(2, 4), Point(4, 4)), ... Segment(Point(4, 4), Point(4, 2))] True """ return list(chain(self.border.segments, flatten(hole.segments for hole in self.holes))) @property def holes(self) -> Sequence[Contour]: """ Returns holes of the polygon. Time complexity: ``O(holes_count)`` Memory complexity: ``O(holes_count)`` where ``holes_count = len(self.holes)``. >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.holes == [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])] True """ return list(self._holes) @property def is_convex(self) -> bool: """ Checks if the polygon is convex. Time complexity: ``O(len(self.border.vertices))`` Memory complexity: ``O(1)`` >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.is_convex False >>> polygon.convex_hull.is_convex True """ return not self.holes and self._context.is_region_convex(self.border) @property def perimeter(self) -> Scalar: """ Returns perimeter of the polygon. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.perimeter == 32 True """ return self.border.length + sum(hole.length for hole in self.holes) def distance_to(self, other: Geometry) -> Scalar: """ Returns distance between the polygon and the other geometry. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.distance_to(polygon) == 0 True """ return (self._distance_to_point(other) if isinstance(other, Point) else (non_negative_min(self._distance_to_point(point) for point in other.points) if isinstance(other, Multipoint) else (self._distance_to_segment(other) if isinstance(other, Segment) else (non_negative_min(self._distance_to_segment(segment) for segment in other.segments) if isinstance(other, Linear) else ((non_negative_min(self._linear_distance_to_segment(edge) for edge in other.edges) if self.disjoint(other) else 0) if isinstance(other, Polygon) else other.distance_to(self)))))) def index(self) -> None: """ Pre-processes the polygon to potentially improve queries. Time complexity: ``O(vertices_count * log vertices_count)`` expected, ``O(vertices_count ** 2)`` worst Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.index() """ self._locate = Graph.from_polygon(self, context=self._context).locate tree = segmental.Tree(self.edges) self._point_nearest_edge, self._segment_nearest_edge = ( tree.nearest_to_point_segment, tree.nearest_segment) def locate(self, point: Point) -> Location: """ Finds location of the point relative to the polygon. Time complexity: ``O(log vertices_count)`` expected after indexing, ``O(vertices_count)`` worst after indexing or without it Memory complexity: ``O(1)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.locate(Point(0, 0)) is Location.BOUNDARY True >>> polygon.locate(Point(1, 1)) is Location.INTERIOR True >>> polygon.locate(Point(2, 2)) is Location.BOUNDARY True >>> polygon.locate(Point(3, 3)) is Location.EXTERIOR True >>> polygon.locate(Point(4, 3)) is Location.BOUNDARY True >>> polygon.locate(Point(5, 2)) is Location.INTERIOR True >>> polygon.locate(Point(6, 1)) is Location.BOUNDARY True >>> polygon.locate(Point(7, 0)) is Location.EXTERIOR True """ return self._locate(point) def relate(self, other: Compound) -> Relation: """ Finds relation between the polygon and the other geometry. Time complexity: ``O(vertices_count * log vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.relate(polygon) is Relation.EQUAL True """ return (segment_in_polygon(other, self) if isinstance(other, Segment) else (multisegment_in_polygon(other, self) if isinstance(other, Linear) else (polygon_in_polygon(other, self) if isinstance(other, Polygon) else other.relate(self).complement))) def rotate(self, angle: Angle, point: Optional[Point] = None) -> 'Polygon': """ Rotates the polygon by given angle around given point. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Angle, Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.rotate(Angle(1, 0)) == polygon True >>> (polygon.rotate(Angle(0, 1), Point(1, 1)) ... == Polygon(Contour([Point(2, 0), Point(2, 6), Point(-4, 6), ... Point(-4, 0)]), ... [Contour([Point(0, 2), Point(-2, 2), Point(-2, 4), ... Point(0, 4)])])) True """ return (self._context.rotate_polygon_around_origin(self, angle.cosine, angle.sine) if point is None else self._context.rotate_polygon(self, angle.cosine, angle.sine, point)) def scale(self, factor_x: Scalar, factor_y: Optional[Scalar] = None) -> 'Polygon': """ Scales the polygon by given factor. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.scale(1) == polygon True >>> (polygon.scale(1, 2) ... == Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 12), ... Point(0, 12)]), ... [Contour([Point(2, 4), Point(2, 8), Point(4, 8), ... Point(4, 4)])])) True """ return self._context.scale_polygon( self, factor_x, factor_x if factor_y is None else factor_y) def translate(self, step_x: Scalar, step_y: Scalar ) -> 'Polygon[Coordinate]': """ Translates the polygon by given step. Time complexity: ``O(vertices_count)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> (polygon.translate(1, 2) ... == Polygon(Contour([Point(1, 2), Point(7, 2), Point(7, 8), ... Point(1, 8)]), ... [Contour([Point(3, 4), Point(3, 6), Point(5, 6), ... Point(5, 4)])])) True """ return self._context.translate_polygon(self, step_x, step_y) def triangulate(self) -> Triangulation: """ Returns triangulation of the polygon. Time complexity: ``O(vertices_count ** 2)`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> triangulation = polygon.triangulate() >>> (triangulation.triangles() ... == [Contour([Point(4, 4), Point(6, 0), Point(6, 6)]), ... Contour([Point(4, 2), Point(6, 0), Point(4, 4)]), ... Contour([Point(0, 6), Point(4, 4), Point(6, 6)]), ... Contour([Point(0, 0), Point(2, 2), Point(0, 6)]), ... Contour([Point(0, 0), Point(6, 0), Point(4, 2)]), ... Contour([Point(0, 6), Point(2, 4), Point(4, 4)]), ... Contour([Point(0, 6), Point(2, 2), Point(2, 4)]), ... Contour([Point(0, 0), Point(4, 2), Point(2, 2)])]) True """ return Triangulation.constrained_delaunay(self, context=self._context) def validate(self) -> None: """ Checks if the polygon is valid. Time complexity: ``O(vertices_count * log (vertices_count))`` Memory complexity: ``O(vertices_count)`` where .. code-block:: python vertices_count = (len(self.border.vertices) + sum(len(hole.vertices)\ for hole in self.holes)) >>> from gon.base import Contour, Point, Polygon >>> polygon = Polygon(Contour([Point(0, 0), Point(6, 0), Point(6, 6), ... Point(0, 6)]), ... [Contour([Point(2, 2), Point(2, 4), Point(4, 4), ... Point(4, 2)])]) >>> polygon.validate() """ self.border.validate() if self.holes: for hole in self.holes: hole.validate() context = self._context relation = region_in_multiregion(self.border, self.holes, context=context) if not (relation is Relation.COVER or relation is Relation.ENCLOSES): raise ValueError('Holes should lie inside the border.') border_minus_holes = ( subtract_multipolygon_from_polygon( context.polygon_cls(self.border, []), context.multipolygon_cls([context.polygon_cls(hole, []) for hole in self.holes])) if len(self.holes) > 1 else subtract_polygons( context.polygon_cls(self.border, []), context.polygon_cls(self.holes[0], []))) if border_minus_holes != self: raise ValueError('Holes should not tear polygon apart.') def _distance_to_point(self, other: Point) -> Scalar: return self._context.sqrt( self._squared_distance_to_exterior_point(other) if self._locate(other) is Location.EXTERIOR else 0) def _distance_to_segment(self, other: Segment) -> Scalar: return (self._linear_distance_to_segment(other) if (self._locate(other.start) is Location.EXTERIOR and self._locate(other.end) is Location.EXTERIOR) else 0) def _linear_distance_to_segment(self, other: Segment) -> Scalar: return self._context.segments_squared_distance( self._segment_nearest_edge(other), other) def _squared_distance_to_exterior_point(self, other: Point) -> Scalar: return self._context.segment_point_squared_distance( self._point_nearest_edge(other), other) def _unite_with_multipoint(self, other: Multipoint) -> Compound: return pack_mix(other - self, self._context.empty, self, self._context.empty, self._context.mix_cls)
python
# This package will contain the spiders of your Scrapy project # # Please refer to the documentation for information on how to create and manage # your spiders. import scrapy import json import sys from scrapy.http import Request from Links.items import DSItem from __builtin__ import any as b_any class DSSpider(scrapy.Spider): name = "netbackup" allowed_domains = [] args=sys.argv[3] args2=args.split('=') start_urls = [ args2[1] ] item = DSItem() specific=[] links=[] links2=[] def parse(self, response): '''if (self.start_urls[0].endswith(".com")!=0): print "wrong data source." ''' for sel in response.xpath('//ul/li'): ''' 1. get all the links as they are. ''' title=sel.xpath('a/text()').extract() url = sel.xpath('a/@href').extract() ''' 2. now for each link, get only those whose titles match the product name[0] ''' products = [] products.append("NetBackup") if( b_any(products[0] in x for x in sel.xpath('a/text()').extract())): ''' now check if link is relative, if yes append the domain name and then change it in specific. ''' if ( url[0].find("http") == -1): url[0]= self.start_urls[0]+url[0] self.item['url'] = url self.item['title']=title self.specific.append(url) self.links.append(self.item['url'][0]+','+self.item['title'][0]) self.links2.append(len(self.specific)) self.links2.append(products[0]) for link in self.links: self.links2.append(link) for data in self.links2: print data
python
from typing import Callable, List class Route: def __init__(self, url_path: str, fn: Callable, methods: List[str]): self.url_path = url_path self.fn = fn self.methods = methods
python
import unittest from collections import MutableMapping, MutableSequence from mock import MagicMock, Mock, patch, sentinel from unittest_expander import expand, foreach, param from rabbit_tools.delete import DelQueueTool from rabbit_tools.purge import PurgeQueueTool tested_tools = [ param(tool=DelQueueTool), param(tool=PurgeQueueTool), ] @expand @foreach(tested_tools) class TestRabbitTools(unittest.TestCase): sample_get_queues_result = [ { 'name': 'queue1', 'test_attr1': 'sample value 1', 'test_attr2': 'sample value 2', }, { 'name': 'queue2', 'test_attr1': 'sample value 3', 'test_attr2': 'sample value 4', }, { 'name': 'queue3', 'test_attr1': 'sample value 5', 'test_attr2': 'sample value 6', }, ] sample_mapping = { 1: 'queue1', 3: 'queue2', 6: 'queue3', 7: 'queue4', } choose_queues_input_to_expected_output = [ param( user_input='1', expected_result={ 1: 'queue1', }, ), param( user_input='all', expected_result=sample_mapping, ), param( user_input=' AlL ', expected_result=sample_mapping, ), param( user_input='0-6', expected_result={ 1: 'queue1', 3: 'queue2', 6: 'queue3', }, ), param( user_input=' 1 - 128 ', expected_result=sample_mapping, ), param( user_input='0, 1,2,7', expected_result={ 1: 'queue1', 7: 'queue4', }, ), ] choose_queues_wrong_inputs = ['0', '1-2-8', '1-32-', '-123' 'abc', '3a', 'a3'] parsed_input_to_expected_result = [ param( user_input='123', expected_result=[123], ), param( user_input='12-13', expected_result=[12, 13], ), param( user_input='12 - 18', expected_result=range(12, 19), ), param( user_input='1, 0, 4, 9, 128', expected_result=[0, 1, 4, 9, 128], ), param( user_input='10-3', expected_result=[], ), ] parsed_input_wrong_to_expected_none = [ ' 1 ', 'a1', '-1-3', '3-8-9', '123-', '1,,2', ',1,2', ' 12-19 ', ] logger_patch = patch('rabbit_tools.base.logger') def setUp(self): self._tested_tool = self.tool.__new__(self.tool) self._tested_tool.config = MagicMock() self._tested_tool.client = Mock() self._tested_tool.client.get_queues.return_value = self.sample_get_queues_result self._tested_tool._parsed_args = Mock() self._tested_tool._vhost = sentinel.vhost self._tested_tool._method_to_call = Mock() self._tested_tool._chosen_numbers = set() def test__get_queue_mapping_first_run(self): queue_mapping = self._tested_tool._get_queue_mapping() self.assertIsInstance(queue_mapping, MutableMapping) self.assertItemsEqual([1, 2, 3], queue_mapping.keys()) self.assertItemsEqual(['queue1', 'queue2', 'queue3'], queue_mapping.values()) def test__get_queue_mapping_another_run(self): self._tested_tool._chosen_numbers = {2, 4} queue_mapping = self._tested_tool._get_queue_mapping() self.assertIsInstance(queue_mapping, MutableMapping) self.assertItemsEqual([1, 3, 5], queue_mapping.keys()) self.assertItemsEqual(['queue1', 'queue2', 'queue3'], queue_mapping.values()) @foreach(choose_queues_input_to_expected_output) def test__choose_queues(self, user_input, expected_result): with patch('__builtin__.raw_input', return_value=user_input),\ self.logger_patch as log_moc: result = self._tested_tool._get_valid_numbers(self.sample_mapping) self.assertFalse(log_moc.called) self.assertIsInstance(result, MutableMapping) self.assertItemsEqual(expected_result, result) @foreach(choose_queues_wrong_inputs) def test__choose_queues_wrong_inputs(self, first_val): with patch('__builtin__.raw_input', side_effect=[first_val, '1']),\ self.logger_patch as log_moc: result = self._tested_tool._get_valid_numbers(self.sample_mapping) # self.assertTrue(log_moc.error.called) # log_moc.error.assert_called_with('***') self.assertIsInstance(result, MutableMapping) self.assertItemsEqual({1: 'queue1'}, result) @foreach(parsed_input_to_expected_result) def test__parse_input(self, user_input, expected_result): result = self._tested_tool._parse_input(user_input) self.assertIsInstance(result, MutableSequence) self.assertItemsEqual(expected_result, result) @foreach(parsed_input_wrong_to_expected_none) def test__parse_input_wrong_values(self, user_input): result = self._tested_tool._parse_input(user_input) self.assertIsNone(result) @foreach(['q', 'Q', 'QUIT', 'quit', 'QuIt', ' eXit ', ' e', 'E ']) def test_quit_command(self, command): with patch('__builtin__.raw_input', return_value=command): result = self._tested_tool._get_valid_numbers(self.sample_mapping) self.assertIsNone(result) def test_queue_from_args(self): sample_queue_name = 'some queue' self._tested_tool._parsed_args.queue_name = sample_queue_name self._tested_tool.run() self._tested_tool._method_to_call.assert_called_with(sentinel.vhost, sample_queue_name) def test_queue_chosen_by_user(self): self._tested_tool._parsed_args.queue_name = None with patch('__builtin__.raw_input', side_effect=['2', 'q']): self._tested_tool.run() self._tested_tool._method_to_call.assert_called_once_with(sentinel.vhost, 'queue2') def test_queue_chosen_by_user_next_choice(self): self._tested_tool._parsed_args.queue_name = None self._tested_tool._chosen_numbers = {2} with patch('__builtin__.raw_input', side_effect=['2', 'q']): self._tested_tool.run() self.assertFalse(self._tested_tool._method_to_call.called)
python
''' Created on 30.08.2015 @author: mEDI ''' from PySide import QtCore, QtGui, QtSvg from datetime import datetime class guitools(object): def __init__(self, parent): self.parent = parent def getPixmapFromSvg(self, svgfile, w=48, h=48): svg_renderer = QtSvg.QSvgRenderer(svgfile) image = QtGui.QImage(w, h, QtGui.QImage.Format_ARGB32) image.fill(0x00000000) svg_renderer.render(QtGui.QPainter(image)) pixmap = QtGui.QPixmap.fromImage(image) return pixmap def getIconFromsvg(self, svgfile, w=48, h=48): pixmap = self.getPixmapFromSvg(svgfile, w, h) icon = QtGui.QIcon(pixmap) return icon def setSystemComplete(self, station, editor): rawSysList = self.parent.mydb.getSystemsWithStationName(station) mylist = [] for system in rawSysList: mylist.append(system["System"]) completer = QtGui.QCompleter(mylist) completer.ModelSorting(QtGui.QCompleter.CaseSensitivelySortedModel) completer.setMaxVisibleItems(20) completer.setCaseSensitivity(QtCore.Qt.CaseInsensitive) editor.setCompleter(completer) def setStationComplete(self, system, editor): rawsystemlist = self.parent.mydb.getStationsFromSystem(system) mylist = [] for system in rawsystemlist: mylist.append(system[1]) completer = QtGui.QCompleter(mylist) completer.ModelSorting(QtGui.QCompleter.CaseSensitivelySortedModel) completer.setMaxVisibleItems(20) completer.setCaseSensitivity(QtCore.Qt.CaseInsensitive) completer.setCompletionMode(QtGui.QCompleter.UnfilteredPopupCompletion) editor.setCompleter(completer) def copyToClipboard(self): ''' copy a multi select column/row to clipboard''' indexes = self.parent.listView.selectedIndexes() clip = [] lastRowCount = None for item in indexes: if lastRowCount is None: lastRowCount = item.row() elif lastRowCount != item.row(): lastRowCount = item.row() clip.append( "\n" ) if item.data(): if isinstance( item.data(), str): clip.append( item.data() ) elif isinstance( item.data(), QtCore.QDateTime): clip.append( item.data().toString("dd.MM.yyyy hh:mm:ss") ) else: #print(type(item.data())) clip.append( str(item.data()) ) # print(type(item.data())) if clip: string = ", ".join(clip) self.parent.main.clipboard.setText( string.replace(", \n, ", "\n") ) class LineEdit(QtGui.QLineEdit): def __init__(self, parent=None): QtGui.QLineEdit.__init__(self, parent) def focusInEvent(self, event): QtGui.QLineEdit.focusInEvent(self, event) self.completer().complete() def convertDateimeToAgeStr(dt=datetime.utcnow() ): age = datetime.utcnow() - dt if age.days >= 1: return "%dd" % age.days elif age.seconds / 60 / 60 >= 1: return "%dh" % (age.seconds / 60 / 60) else: return "%dm" % (age.seconds / 60) def createCenteredWidget( wg ): widget = QtGui.QWidget() layout = QtGui.QVBoxLayout() layout.setAlignment(QtCore.Qt.AlignVCenter | QtCore.Qt.AlignCenter) layout.addWidget( wg ) widget.setLayout(layout) return widget def getChildByType(obj, myType): for child in obj.children(): if isinstance(child, myType): return child def isInt(s): try: int(s) return True except ValueError: return False
python
from animal import * from species import * from habitat import * from transport import * bob = Betta('orange', 'Bob') betty = Betta('blue', 'Betty')
python
# Generated by Django 3.1.5 on 2021-01-18 09:57 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('courses', '0004_delete_card'), ] operations = [ migrations.CreateModel( name='Card', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(help_text='Введите заголовок карточки', max_length=100, verbose_name='Заголовок')), ('text', models.TextField(help_text='Введите контент карточки', max_length=1000, verbose_name='Контент карточки')), ('queue', models.IntegerField(help_text='Введите место в очереди', verbose_name='Место в очереди')), ('course', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='courses.course')), ], options={ 'ordering': ['queue'], }, ), ]
python
"""Test suites for numerical compatibility with librosa""" import os import unittest import torch import torchaudio import torchaudio.functional as F from torchaudio._internal.module_utils import is_module_available from parameterized import parameterized, param LIBROSA_AVAILABLE = is_module_available('librosa') if LIBROSA_AVAILABLE: import librosa import scipy from torchaudio_unittest import common_utils def _load_audio_asset(*asset_paths, **kwargs): file_path = common_utils.get_asset_path(*asset_paths) sound, sample_rate = torchaudio.load(file_path, **kwargs) return sound, sample_rate @unittest.skipIf(not LIBROSA_AVAILABLE, "Librosa not available") class TestTransforms(common_utils.TorchaudioTestCase): """Test suite for functions in `transforms` module.""" @parameterized.expand([ param(n_fft=400, hop_length=200, power=2.0), param(n_fft=600, hop_length=100, power=2.0), param(n_fft=400, hop_length=200, power=3.0), param(n_fft=200, hop_length=50, power=2.0), ]) def test_spectrogram(self, n_fft, hop_length, power): sample_rate = 16000 sound = common_utils.get_sinusoid(n_channels=1, sample_rate=sample_rate) sound_librosa = sound.cpu().numpy().squeeze() spect_transform = torchaudio.transforms.Spectrogram( n_fft=n_fft, hop_length=hop_length, power=power) out_librosa, _ = librosa.core.spectrum._spectrogram( y=sound_librosa, n_fft=n_fft, hop_length=hop_length, power=power) out_torch = spect_transform(sound).squeeze().cpu() self.assertEqual(out_torch, torch.from_numpy(out_librosa), atol=1e-5, rtol=1e-5) @parameterized.expand([ param(norm=norm, **p.kwargs) for p in [ param(n_fft=400, hop_length=200, n_mels=128), param(n_fft=600, hop_length=100, n_mels=128), param(n_fft=200, hop_length=50, n_mels=128), ] for norm in [None, 'slaney'] ]) def test_mel_spectrogram(self, n_fft, hop_length, n_mels, norm): sample_rate = 16000 sound = common_utils.get_sinusoid(n_channels=1, sample_rate=sample_rate) sound_librosa = sound.cpu().numpy().squeeze() melspect_transform = torchaudio.transforms.MelSpectrogram( sample_rate=sample_rate, window_fn=torch.hann_window, hop_length=hop_length, n_mels=n_mels, n_fft=n_fft, norm=norm) librosa_mel = librosa.feature.melspectrogram( y=sound_librosa, sr=sample_rate, n_fft=n_fft, hop_length=hop_length, n_mels=n_mels, htk=True, norm=norm) librosa_mel_tensor = torch.from_numpy(librosa_mel) torch_mel = melspect_transform(sound).squeeze().cpu() self.assertEqual( torch_mel.type(librosa_mel_tensor.dtype), librosa_mel_tensor, atol=5e-3, rtol=1e-5) @parameterized.expand([ param(norm=norm, **p.kwargs) for p in [ param(n_fft=400, hop_length=200, power=2.0, n_mels=128), param(n_fft=600, hop_length=100, power=2.0, n_mels=128), param(n_fft=400, hop_length=200, power=3.0, n_mels=128), # NOTE: Test passes offline, but fails on TravisCI (and CircleCI), see #372. param(n_fft=200, hop_length=50, power=2.0, n_mels=128, skip_ci=True), ] for norm in [None, 'slaney'] ]) def test_s2db(self, n_fft, hop_length, power, n_mels, norm, skip_ci=False): if skip_ci and 'CI' in os.environ: self.skipTest('Test is known to fail on CI') sample_rate = 16000 sound = common_utils.get_sinusoid(n_channels=1, sample_rate=sample_rate) sound_librosa = sound.cpu().numpy().squeeze() spect_transform = torchaudio.transforms.Spectrogram( n_fft=n_fft, hop_length=hop_length, power=power) out_librosa, _ = librosa.core.spectrum._spectrogram( y=sound_librosa, n_fft=n_fft, hop_length=hop_length, power=power) melspect_transform = torchaudio.transforms.MelSpectrogram( sample_rate=sample_rate, window_fn=torch.hann_window, hop_length=hop_length, n_mels=n_mels, n_fft=n_fft, norm=norm) librosa_mel = librosa.feature.melspectrogram( y=sound_librosa, sr=sample_rate, n_fft=n_fft, hop_length=hop_length, n_mels=n_mels, htk=True, norm=norm) power_to_db_transform = torchaudio.transforms.AmplitudeToDB('power', 80.) power_to_db_torch = power_to_db_transform(spect_transform(sound)).squeeze().cpu() power_to_db_librosa = librosa.core.spectrum.power_to_db(out_librosa) self.assertEqual(power_to_db_torch, torch.from_numpy(power_to_db_librosa), atol=5e-3, rtol=1e-5) mag_to_db_transform = torchaudio.transforms.AmplitudeToDB('magnitude', 80.) mag_to_db_torch = mag_to_db_transform(torch.abs(sound)).squeeze().cpu() mag_to_db_librosa = librosa.core.spectrum.amplitude_to_db(sound_librosa) self.assertEqual(mag_to_db_torch, torch.from_numpy(mag_to_db_librosa), atol=5e-3, rtol=1e-5) power_to_db_torch = power_to_db_transform(melspect_transform(sound)).squeeze().cpu() db_librosa = librosa.core.spectrum.power_to_db(librosa_mel) db_librosa_tensor = torch.from_numpy(db_librosa) self.assertEqual( power_to_db_torch.type(db_librosa_tensor.dtype), db_librosa_tensor, atol=5e-3, rtol=1e-5) @parameterized.expand([ param(n_fft=400, hop_length=200, n_mels=128, n_mfcc=40), param(n_fft=600, hop_length=100, n_mels=128, n_mfcc=20), param(n_fft=200, hop_length=50, n_mels=128, n_mfcc=50), ]) def test_mfcc(self, n_fft, hop_length, n_mels, n_mfcc): sample_rate = 16000 sound = common_utils.get_sinusoid(n_channels=1, sample_rate=sample_rate) sound_librosa = sound.cpu().numpy().squeeze() librosa_mel = librosa.feature.melspectrogram( y=sound_librosa, sr=sample_rate, n_fft=n_fft, hop_length=hop_length, n_mels=n_mels, htk=True, norm=None) db_librosa = librosa.core.spectrum.power_to_db(librosa_mel) # librosa.feature.mfcc doesn't pass kwargs properly since some of the # kwargs for melspectrogram and mfcc are the same. We just follow the # function body in # https://librosa.github.io/librosa/_modules/librosa/feature/spectral.html#melspectrogram # to mirror this function call with correct args: # # librosa_mfcc = librosa.feature.mfcc( # y=sound_librosa, sr=sample_rate, n_mfcc = n_mfcc, # hop_length=hop_length, n_fft=n_fft, htk=True, norm=None, n_mels=n_mels) librosa_mfcc = scipy.fftpack.dct(db_librosa, axis=0, type=2, norm='ortho')[:n_mfcc] librosa_mfcc_tensor = torch.from_numpy(librosa_mfcc) melkwargs = {'hop_length': hop_length, 'n_fft': n_fft} mfcc_transform = torchaudio.transforms.MFCC( sample_rate=sample_rate, n_mfcc=n_mfcc, norm='ortho', melkwargs=melkwargs) torch_mfcc = mfcc_transform(sound).squeeze().cpu() self.assertEqual( torch_mfcc.type(librosa_mfcc_tensor.dtype), librosa_mfcc_tensor, atol=5e-3, rtol=1e-5) @parameterized.expand([ param(n_fft=400, hop_length=200), param(n_fft=600, hop_length=100), param(n_fft=200, hop_length=50), ]) def test_spectral_centroid(self, n_fft, hop_length): sample_rate = 16000 sound = common_utils.get_sinusoid(n_channels=1, sample_rate=sample_rate) sound_librosa = sound.cpu().numpy().squeeze() spect_centroid = torchaudio.transforms.SpectralCentroid( sample_rate=sample_rate, n_fft=n_fft, hop_length=hop_length) out_torch = spect_centroid(sound).squeeze().cpu() out_librosa = librosa.feature.spectral_centroid( y=sound_librosa, sr=sample_rate, n_fft=n_fft, hop_length=hop_length) out_librosa = torch.from_numpy(out_librosa)[0] self.assertEqual(out_torch.type(out_librosa.dtype), out_librosa, atol=1e-5, rtol=1e-5) def test_MelScale(self): """MelScale transform is comparable to that of librosa""" n_fft = 2048 n_mels = 256 hop_length = n_fft // 4 sample_rate = 44100 sound = common_utils.get_whitenoise(sample_rate=sample_rate, duration=60) sound = sound.mean(dim=0, keepdim=True) spec_ta = F.spectrogram( sound, pad=0, window=torch.hann_window(n_fft), n_fft=n_fft, hop_length=hop_length, win_length=n_fft, power=2, normalized=False) spec_lr = spec_ta.cpu().numpy().squeeze() # Perform MelScale with torchaudio and librosa melspec_ta = torchaudio.transforms.MelScale(n_mels=n_mels, sample_rate=sample_rate)(spec_ta) melspec_lr = librosa.feature.melspectrogram( S=spec_lr, sr=sample_rate, n_fft=n_fft, hop_length=hop_length, win_length=n_fft, center=True, window='hann', n_mels=n_mels, htk=True, norm=None) # Note: Using relaxed rtol instead of atol self.assertEqual(melspec_ta, torch.from_numpy(melspec_lr[None, ...]), atol=1e-8, rtol=1e-3) def test_InverseMelScale(self): """InverseMelScale transform is comparable to that of librosa""" n_fft = 2048 n_mels = 256 n_stft = n_fft // 2 + 1 hop_length = n_fft // 4 # Prepare mel spectrogram input. We use torchaudio to compute one. path = common_utils.get_asset_path('steam-train-whistle-daniel_simon.wav') sound, sample_rate = common_utils.load_wav(path) sound = sound[:, 2**10:2**10 + 2**14] sound = sound.mean(dim=0, keepdim=True) spec_orig = F.spectrogram( sound, pad=0, window=torch.hann_window(n_fft), n_fft=n_fft, hop_length=hop_length, win_length=n_fft, power=2, normalized=False) melspec_ta = torchaudio.transforms.MelScale(n_mels=n_mels, sample_rate=sample_rate)(spec_orig) melspec_lr = melspec_ta.cpu().numpy().squeeze() # Perform InverseMelScale with torch audio and librosa spec_ta = torchaudio.transforms.InverseMelScale( n_stft, n_mels=n_mels, sample_rate=sample_rate)(melspec_ta) spec_lr = librosa.feature.inverse.mel_to_stft( melspec_lr, sr=sample_rate, n_fft=n_fft, power=2.0, htk=True, norm=None) spec_lr = torch.from_numpy(spec_lr[None, ...]) # Align dimensions # librosa does not return power spectrogram while torchaudio returns power spectrogram spec_orig = spec_orig.sqrt() spec_ta = spec_ta.sqrt() threshold = 2.0 # This threshold was choosen empirically, based on the following observation # # torch.dist(spec_lr, spec_ta, p=float('inf')) # >>> tensor(1.9666) # # The spectrograms reconstructed by librosa and torchaudio are not comparable elementwise. # This is because they use different approximation algorithms and resulting values can live # in different magnitude. (although most of them are very close) # See # https://github.com/pytorch/audio/pull/366 for the discussion of the choice of algorithm # https://github.com/pytorch/audio/pull/448/files#r385747021 for the distribution of P-inf # distance over frequencies. self.assertEqual(spec_ta, spec_lr, atol=threshold, rtol=1e-5) threshold = 1700.0 # This threshold was choosen empirically, based on the following observations # # torch.dist(spec_orig, spec_ta, p=1) # >>> tensor(1644.3516) # torch.dist(spec_orig, spec_lr, p=1) # >>> tensor(1420.7103) # torch.dist(spec_lr, spec_ta, p=1) # >>> tensor(943.2759) assert torch.dist(spec_orig, spec_ta, p=1) < threshold
python
# Copyright 2016 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from cloudferry import model from cloudferry.model import identity from cloudferry.model import image as image_model from cloudferry.model import storage @model.type_alias('flavors') class Flavor(model.Model): object_id = model.PrimaryKey() flavor_id = model.String(required=True) is_deleted = model.Boolean(required=True) is_disabled = model.Boolean(required=True) is_public = model.Boolean(required=True) name = model.String(required=True) vcpus = model.Integer(required=True) memory_mb = model.Integer(required=True) root_gb = model.Integer(required=True) ephemeral_gb = model.Integer(required=True) swap_mb = model.Integer(required=True) vcpu_weight = model.Integer(allow_none=True, missing=None) rxtx_factor = model.Float(required=True) extra_specs = model.Dict(missing=dict) def equals(self, other): # pylint: disable=no-member if super(Flavor, self).equals(other): return True return (self.is_public == other.is_public and self.is_disabled == other.is_disabled and self.name == other.name and self.vcpus == other.vcpus and self.memory_mb == other.memory_mb and self.root_gb == self.root_gb and self.ephemeral_gb == self.ephemeral_gb and self.swap_mb == self.swap_mb and self.vcpu_weight == self.vcpu_weight and self.rxtx_factor == self.rxtx_factor and model.Dict.equals(self.extra_specs, other.extra_specs)) @model.type_alias('compute_nodes') class ComputeNode(model.Model): object_id = model.PrimaryKey() interfaces = model.Dict(required=True, missing=list) def equals(self, other): return False class SecurityGroup(model.Model): name = model.String(required=True) class EphemeralDisk(model.Model): path = model.String(required=True) size = model.Integer(required=True) format = model.String(required=True) base_path = model.String(required=True, allow_none=True) base_size = model.Integer(required=True, allow_none=True) base_format = model.String(required=True, allow_none=True) @model.type_alias('vms') class Server(model.Model): object_id = model.PrimaryKey() name = model.String(required=True) security_groups = model.Nested(SecurityGroup, many=True, missing=list) status = model.String(required=True) tenant = model.Dependency(identity.Tenant) image = model.Dependency(image_model.Image, allow_none=True) image_membership = model.Dependency(image_model.ImageMember, allow_none=True) user_id = model.String(required=True) # TODO: user reference key_name = model.String(required=True, allow_none=True) flavor = model.Dependency(Flavor) config_drive = model.String(required=True) availability_zone = model.String(required=True, allow_none=True) host = model.String(required=True) hypervisor_hostname = model.String(required=True) instance_name = model.String(required=True) metadata = model.Dict(missing=dict) ephemeral_disks = model.Nested(EphemeralDisk, many=True, missing=list) attached_volumes = model.Dependency(storage.Attachment, many=True, missing=list) compute_node = model.Reference(ComputeNode, required=True, ensure_existence=True) # TODO: ports def equals(self, other): # pylint: disable=no-member if super(Server, self).equals(other): return True # TODO: consider comparing metadata # TODO: consider comparing security_groups if not self.tenant.equals(other.tenant): return False if not self.flavor.equals(other.flavor): return False if not self.image.equals(other.image): return False if self.key_name != other.key_name or self.name != other.name: return False return True
python
import augument as myaug from loader.fb_image_gen_pre import * from settings import * from utils import getMinMax import numpy as np import time from models.resnet50Reg import * def plot_images(imlist): imlen= len(imlist) plt.figure(figsize=(6, 2)) for i in range(imlen): plt.subplot(1, imlen,i+1) plt.imshow(imlist[i]) if i==0: plt.title("original") else: plt.title("augumented") plt.show() def mytest_augumentation(): data = get_triplet(0, mode ='train') plot_triplets(data)# this will only add noise #test1: idx=0 a = read_image(Q_List[idx]) aug_im = myaug.apply_augumentaion_wrapper(a) getMinMax(a,"original") getMinMax(aug_im, "augumented") plot_images([a, aug_im]) #for i in range(10): #plot_images([a, myaug.apply_augumentaion_wrapper(a)]) transform1 = myaug.A.Compose([ myaug.A.RandomBrightnessContrast(contrast_limit=0.3,brightness_limit=0.3,brightness_by_max=True, p=1.0) ]) transform2 = myaug.A.Compose([ myaug.A.ElasticTransform(p=1.0 ) ]) transform3 = myaug.A.Compose([ myaug.A.RandomSnow(p=1.0, brightness_coeff=1.5) ])#inverted type transform4 = myaug.A.Compose([ myaug.A.RandomGridShuffle(p=1.0,grid=(1,1)) ])#lower grid size(default also good) '''transform5 = myaug.A.Compose([ myaug.A.RandomSunFlare(p=1.0,src_color=(50,60,80), num_flare_circles_lower=1, num_flare_circles_upper=6) ])#redice it ''' transform5 = myaug.A.Compose([ myaug.A.ShiftScaleRotate(shift_limit=0.0625, scale_limit=0.50, rotate_limit=45, p=1.0) ])# lower grid size(default also good) transform6 = myaug.A.Compose([ myaug.A.ColorJitter(p=1.0) ]) b1 = transform1(image=a)["image"] b2 = transform2(image=a)["image"] b3 = transform3(image=a)["image"] b4 = transform4(image=a)["image"] b5 = transform5(image=a)["image"] b6 = transform6(image=a)["image"] plot_images([a, b1,b2,b3]) plot_images([a,b4,b5,b6]) def mytest_hdf5loader(): myHd5File = './data/image/im_subset_query.hdf5' hdf5loader = Hdf5Sequence(myHd5File, idlist='', batch_size=2) batchdata= hdf5loader[0] batchdata1 = hdf5loader[1] plot_batches(batchdata) def mytest_train_hdf5_loader(): #''' train_generator = generate_triplets_train_hdfseq(start=0, stop=40000, batch_sz=1)#sz=1 will have same anchor and neg data = next(train_generator) i = 0 # 0->>>>>>99 plot_triplets_batch(data) test_generator = generate_triplets_train_hdfseq(start=0, stop=40000, batch_sz=100, forcePrep = False) data = next(test_generator) i = 0 # 0->>>>>>99 plot_triplets_batch(data) #''' test_generator = generate_triplets_hdfseq(batch_sz=1) data = next(test_generator) plot_triplets_batch(data) test_generator = generate_triplets_hdfseq( batch_sz=100, forcePrep = False) data = next(test_generator) plot_triplets_batch(data) base_model = embedding_model() triplets, labels = get_batch_semihardNeg(base_model, test_generator, draw_batch_size=100, actual_batch_size=16, alpha=1.0) plot_triplets_batch((triplets, labels)) def main(): #mytest_augumentation() #mergeHdf5Files() mytest_train_hdf5_loader() def dummy(): import h5py import os d_names = ['./data/image/image0.hdf5', './data/image/image1.hdf5'] d_struct = {} # Here we will store the database structure for i in d_names: f = h5py.File(i, 'r+') print("filename: ", i) d_struct[i] = f.keys() #print("keys: ",d_struct[i]) f.close() for i in d_names: for j in d_struct[i]: os.system('h5copy -i %s -o output.h5 -s %s -d %s' % (i, j, j)) def mergeHdf5Files(): import h5py import os d_names = ['./data/image/image_extended_Ref.hdf5', './data/image/image_full_ref_0.hdf5', './data/image/image_full_ref_1.hdf5','./data/image/image_full_ref_2.hdf5'] outfilename= './data/image/mergedRefExtended0_2_chunk100_cont.hdf5' print("creating merged filename with name: ", outfilename) timeStart = time.time() with h5py.File(outfilename, mode='w') as h5fw: row1 = 0 file_ids =[] for h5name in d_names: h5fr = h5py.File(h5name, 'r') dset1 = list(h5fr.keys())[1]# 1->vectors; 2->image_names #arr_data = h5fr['vectors'][:] dslen = h5fr['vectors'].shape[0] dsshape = h5fr['vectors'].shape if row1 == 0: maxrows = dslen+(len(d_names)-1)*50000 chunksz = (100,160,160,3) h5fw.create_dataset('vectors', dtype='uint8', shape=dsshape, maxshape=(maxrows, 160,160,3), chunks=chunksz) if row1 + dslen <= len(h5fw['vectors']): h5fw['vectors'][row1:row1 + dslen, :] = np.ascontiguousarray(h5fr['vectors'], dtype='uint8')#[:] #im_names= np.array(myfile["image_names"][:]).astype(str).tolist() else: h5fw['vectors'].resize((row1 + dslen, 160,160,3)) h5fw['vectors'][row1:row1 + dslen, :,:] = np.ascontiguousarray(h5fr['vectors'], dtype='uint8') row1 += dslen im_names = np.array(h5fr["image_names"][:]).astype(str).tolist() file_ids.extend(im_names) image_names = np.array([bytes(name, "ascii") for name in file_ids]) h5fw.create_dataset("image_names", data=image_names) print("========completeing writing merged file") timestop = time.time() print("Time for creatinf file {} mins".format((timestop - timeStart) / 60)) if __name__ == '__main__': main()
python
constants = { "L": { "short_name": "L", "description": "Canopy background adjustment", "default": 1.0, }, "g": { "short_name": "g", "description": "Gain factor", "default": 2.5 }, "C1": { "short_name": "C1", "description": "Coefficient 1 for the aerosol resistance term", "default": 6.0, }, "C2": { "short_name": "C2", "description": "Coefficient 2 for the aerosol resistance term", "default": 7.5, }, "cexp": { "short_name": "cexp", "description": "Exponent used for OCVI", "default": 1.16, }, "nexp": { "short_name": "nexp", "description": "Exponent used for GDVI", "default": 2.0, }, "alpha": { "short_name": "alpha", "description": "Weighting coefficient used for WDRVI", "default": 0.1, }, "beta": { "short_name": "beta", "description": "Calibration parameter used for NDSInw", "default": 0.05, }, "gamma": { "short_name": "gamma", "description": "Weighting coefficient used for ARVI", "default": 1.0, }, "omega": { "short_name": "omega", "description": "Weighting coefficient used for MBWI", "default": 2.0, }, "k": { "short_name": "k", "description": "Slope parameter by soil used for NIRvH2", "default": 0.0, }, "PAR": { "short_name": "PAR", "description": "Photosynthetically Active Radiation", "default": None, }, "lambdaG": { "short_name": "lambdaG", "description": "Green wavelength (nm) used for NDGI", "default": None, }, "lambdaR": { "short_name": "lambdaR", "description": "Red wavelength (nm) used for NIRvH2 and NDGI", "default": None, }, "lambdaN": { "short_name": "lambdaN", "description": "NIR wavelength (nm) used for NIRvH2 and NDGI", "default": None, }, "sla": { "short_name": "sla", "description": "Soil line slope", "default": 1.0, }, "slb": { "short_name": "slb", "description": "Soil line intercept", "default": 0.0, }, "sigma": { "short_name": "sigma", "description": "Length-scale parameter in the RBF kernel", "default": 0.5, }, "p": { "short_name": "p", "description": "Kernel degree in the polynomial kernel", "default": 2.0, }, "c": { "short_name": "c", "description": "Trade-off parameter in the polynomial kernel", "default": 1.0, }, }
python
########################## # Test script to check if advisors have duplicated idea tokens # By Pelmen, https://github.com/Pelmen323 ########################## import re from ..test_classes.generic_test_class import ResultsReporter from ..test_classes.characters_class import Characters def test_check_advisors_duplicate_idea_tokens(test_runner: object): advisors, paths = Characters.get_all_advisors(test_runner=test_runner, return_paths=True) idea_tokens = [] results = [] for adv in advisors: if "characters" not in paths[adv]: # Workaround for advisors from not characters file that can be defined multiple times continue try: token = re.findall("idea_token = (.+)", adv)[0] except IndexError: results.append((adv, paths[adv], "Advisor with missing idea token encountered")) continue idea_tokens.append(token) duplicated_tokens = sorted(list(set([i for i in idea_tokens if idea_tokens.count(i) > 1]))) for i in duplicated_tokens: results.append((i, "Duplicated advisor token encountered")) ResultsReporter.report_results(results=results, message="Advisors with non-unique idea tokens were encountered. Check console output")
python
from setuptools import setup from distutils.util import convert_path # Additional keyword arguments for setup kwargs = {} d = {} execfile(convert_path('cinspect/__init__.py'), d) kwargs['version'] = d['__version__'] with open('README.md') as f: kwargs['long_description'] = f.read() packages = [ 'cinspect', 'cinspect.index', 'cinspect.tests', 'cinspect.vendor.clang', ] package_data = {} setup( name="cinspect", author="Puneeth Chaganti", author_email="[email protected]", url = "https://github.com/punchagan/cinspect", license="BSD", description = "C-source introspection for packages.", packages = packages, package_data=package_data, entry_points = { "console_scripts": [ "cinspect-index = cinspect.index.writer:main", ], }, **kwargs )
python
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for hostprovider update""" import allure import coreapi import pytest from adcm_client.objects import ADCMClient from adcm_pytest_plugin.utils import get_data_dir from tests.library.errorcodes import UPGRADE_ERROR @allure.step('Create host') def create_host(hostprovider): """Create host""" return hostprovider.host_create('localhost') # pylint: disable=too-many-locals def test_upgrade_with_two_hostproviders(sdk_client_fs: ADCMClient): """Upgrade hostprovider when we have two created hostproviders with hosts from one bundle Scenario: 1. Create two hostproviders from one bundle 2. Upload upgradable bundle 3. Create host for each hostprovider 4. Upgrade first hostprovider 5. Check that only first hostprovider and hosts was upgraded """ with allure.step('Create two hostproviders from one bundle. Upload upgradable bundle'): bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider')) sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_hostprovider')) hostprovider_first = bundle.provider_create("hp_first") hostprovider_first_proto_before = hostprovider_first.prototype() hostprovider_first_id_before = hostprovider_first.id hostprovider_second = bundle.provider_create("hp_second") hostprovider_second_proto_before = hostprovider_second.prototype() hostprovider_second_id_before = hostprovider_second.id with allure.step('Create host for each hostprovider'): hp1_host1 = hostprovider_first.host_create(fqdn="localhost") hp1_host1_id_before = hp1_host1.id hp1_host1_proto_before = hp1_host1.prototype() hp1_host2 = hostprovider_first.host_create(fqdn="localhost2") hp1_host3 = hostprovider_first.host_create(fqdn="localhost3") hp2_host1 = hostprovider_second.host_create(fqdn="hp2-localhost") hp2_host1_proto_before = hp2_host1.prototype() hp2_host1_id_before = hp2_host1.id hp2_host2 = hostprovider_second.host_create(fqdn="hp2-localhost2") hp2_host3 = hostprovider_second.host_create(fqdn="hp2-localhost3") with allure.step('Upgrade first hostprovider'): upgr = hostprovider_first.upgrade(name='upgrade to 2.0') upgr.do() with allure.step('Check that only first hostprovider and hosts was upgraded'): hostprovider_first.reread() hostprovider_second.reread() hp1_host1.reread() hp1_host2.reread() hp1_host3.reread() hp2_host1.reread() hp2_host2.reread() hp2_host3.reread() hp_first_proto_after = hostprovider_first.prototype() hp1_host_proto_after = hp1_host1.prototype() hp_second_proto_after = hostprovider_second.prototype() hp2_host1_proto_after = hp2_host1.prototype() assert hostprovider_first.prototype().version == '2.0' assert hp1_host1.prototype().version == '00.10' assert hostprovider_second.prototype().version == '1.0' assert hp2_host1.prototype().version == '00.09' assert hostprovider_first_id_before == hostprovider_first.id assert hp1_host1_id_before == hp1_host1.id assert hostprovider_first_proto_before.id != hp_first_proto_after.id assert hp1_host1_proto_before.id != hp1_host_proto_after.id assert hostprovider_second_id_before == hostprovider_second.id assert hp2_host1_id_before == hp2_host1.id assert hostprovider_second_proto_before.id == hp_second_proto_after.id assert hp2_host1_proto_before.id == hp2_host1_proto_after.id def test_check_prototype(sdk_client_fs: ADCMClient): """Check prototype for provider and host after upgrade""" with allure.step('Create upgradable hostprovider and get id'): bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider')) sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_hostprovider')) hostprovider = bundle.provider_create("test") host = hostprovider.host_create(fqdn="localhost") hostprovider_proto_before = hostprovider.prototype() hp_id_before = hostprovider.id host_proto_before = host.prototype() ht_id_before = host.id with allure.step('Upgrade hostprovider to 2.0'): upgr = hostprovider.upgrade(name='upgrade to 2.0') upgr.do() with allure.step('Check prototype for provider and host after upgrade'): hostprovider.reread() host.reread() hostprovider_proto_after = hostprovider.prototype() host_proto_after = host.prototype() assert hp_id_before == hostprovider.id assert ht_id_before == host.id assert hostprovider_proto_before.id != hostprovider_proto_after.id assert host_proto_before.id != host_proto_after.id def test_multiple_upgrade_bundles(sdk_client_fs: ADCMClient): """Upgrade hostprovider multiple time from version to another""" with allure.step('Create upgradable hostprovider'): bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider')) sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_hostprovider')) hostprovider = bundle.provider_create("test") with allure.step('First upgrade hostprovider to 2.0'): upgr = hostprovider.upgrade(name='upgrade to 2.0') upgr.do() hostprovider.reread() with allure.step('Second upgrade hostprovider to 2'): upgr = hostprovider.upgrade(name='upgrade 2') upgr.do() with allure.step('Check hostprovider state'): hostprovider.reread() assert hostprovider.state == 'ver2.4' def test_change_config(sdk_client_fs: ADCMClient): """Upgrade hostprovider with other config""" with allure.step('Create upgradable hostprovider with new change values'): bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider')) sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_hostprovider_new_change_values')) hostprovider = bundle.provider_create("test") host = create_host(hostprovider) hostprovider_config_before = hostprovider.config() host_config_before = host.config() hostprovider_config_before['required'] = 25 hostprovider_config_before['str-key'] = "new_value" host_config_before['str_param'] = "str_param_new" with allure.step('Set config'): hostprovider.config_set(hostprovider_config_before) host.config_set(host_config_before) with allure.step('Upgrade hostprovider with other config'): upgr = hostprovider.upgrade(name='upgrade to 2.0') upgr.do() with allure.step('Check hostprovider config'): hostprovider.reread() host.reread() hostprovider_config_after = hostprovider.config() host_config_after = host.config() assert len(hostprovider_config_before.keys()) == len(hostprovider_config_after.keys()) for key in hostprovider_config_before: assert hostprovider_config_before[key] == hostprovider_config_after[key] for key in host_config_before: assert host_config_before[key] == host_config_after[key] def test_cannot_upgrade_with_state(sdk_client_fs: ADCMClient): """Upgrade hostprovider from unsupported state""" with allure.step('Create hostprovider with unsupported state'): bundle = sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'hostprovider')) sdk_client_fs.upload_from_fs(get_data_dir(__file__, 'upgradable_hostprovider_unsupported_state')) hostprovider = bundle.provider_create("test") with allure.step('Upgrade hostprovider from unsupported state'): upgr = hostprovider.upgrade(name='upgrade to 2.0') upgr.do() hostprovider.reread() upgr = hostprovider.upgrade(name='upgrade 2') with pytest.raises(coreapi.exceptions.ErrorMessage) as e: upgr.do() with allure.step('Check error: provider state is not in available states list'): UPGRADE_ERROR.equal(e, 'provider state', 'is not in available states list')
python
import typing as _t from django.contrib.auth import get_user_model, update_session_auth_hash from django.contrib.auth.password_validation import validate_password from django.contrib.auth.models import AbstractUser from django.db import transaction from django_filters import BooleanFilter, CharFilter from rest_framework import serializers, exceptions, request as drf_request from vstutils.api import fields, base, permissions, responses, decorators as deco from vstutils.api.filters import DefaultIDFilter, name_filter, name_help from vstutils.api.serializers import VSTSerializer, DataSerializer User = get_user_model() class ChangePasswordPermission(permissions.IsAuthenticatedOpenApiRequest): def has_object_permission(self, request: drf_request.Request, view: base.GenericViewSet, obj: User): # type: ignore return request.user.is_superuser or (isinstance(obj, request.user.__class__) and request.user.pk == obj.pk) class UserSerializer(VSTSerializer): is_active = serializers.BooleanField(default=True) is_staff = serializers.BooleanField(default=False) email = serializers.EmailField(required=False) class UserExist(exceptions.ValidationError): status_code = 409 class Meta: model = User fields = ( 'id', 'username', 'is_active', 'is_staff', 'email', ) read_only_fields = ('is_superuser',) ref_name = 'User' def create(self, data): """ Create user from validated data. """ if not self.context['request'].user.is_staff: raise exceptions.PermissionDenied # nocv valid_fields = [ 'username', 'password', 'is_active', 'is_staff', "email", "first_name", "last_name" ] creditals = { d: data[d] for d in valid_fields if data.get(d, None) is not None } raw_passwd = self.initial_data.get("raw_password", "False") user = super().create(creditals) if not raw_passwd == "True": user.set_password(creditals['password']) user.save() return user def is_valid(self, raise_exception=False): if self.instance is None: try: initial_data = self.initial_data User.objects.get(username=initial_data.get('username', None)) raise self.UserExist({'username': ["Already exists."]}) except User.DoesNotExist: pass return super().is_valid(raise_exception) def update(self, instance, validated_data): if not self.context['request'].user.is_staff and instance.id != self.context['request'].user.id: # can't be tested because PATCH from non privileged user to other # user fails at self.get_object() in View raise exceptions.PermissionDenied # nocv instance.username = validated_data.get('username', instance.username) instance.is_active = validated_data.get('is_active', instance.is_active) instance.email = validated_data.get('email', instance.email) instance.first_name = validated_data.get('first_name', instance.first_name) instance.last_name = validated_data.get('last_name', instance.last_name) instance.is_staff = validated_data.get('is_staff', instance.is_staff) instance.save() return instance class OneUserSerializer(UserSerializer): class Meta: model = User fields: _t.Union[_t.List[_t.Text], _t.Tuple] = ( 'id', 'username', 'is_active', 'is_staff', 'first_name', 'last_name', 'email', ) read_only_fields = ( 'is_superuser', 'date_joined', ) class CreateUserSerializer(OneUserSerializer): password = fields.VSTCharField(write_only=True) password2 = fields.VSTCharField(write_only=True, label='Repeat password') class Meta(OneUserSerializer.Meta): fields = list(OneUserSerializer.Meta.fields) + ['password', 'password2'] def run_validation(self, data=serializers.empty): validated_data = super().run_validation(data) if validated_data['password'] != validated_data.pop('password2', None): raise exceptions.ValidationError('Passwords do not match.') return validated_data class ChangePasswordSerializer(DataSerializer): old_password = fields.PasswordField(required=True) password = fields.PasswordField(required=True, label='New password') password2 = fields.PasswordField(required=True, label='Confirm new password') def update(self, instance, validated_data): if not instance.check_password(validated_data['old_password']): raise exceptions.PermissionDenied('Password is not correct.') if validated_data['password'] != validated_data['password2']: raise exceptions.ValidationError("New passwords' values are not equal.") validate_password(validated_data['password']) instance.set_password(validated_data['password']) instance.save() return instance def to_representation(self, value): return { 'old_password': '***', 'password': '***', 'password2': '***' } class UserFilter(DefaultIDFilter): is_active = BooleanFilter(help_text='Boolean value meaning status of user.') first_name = CharFilter(help_text='Users first name.') last_name = CharFilter(help_text='Users last name.') email = CharFilter(help_text="Users e-mail value.") username__not = CharFilter(method=name_filter, help_text=name_help) username = CharFilter(method=name_filter, help_text=name_help) class Meta: model = User fields = ( 'id', 'username', 'is_active', 'first_name', 'last_name', 'email', ) class UserViewSet(base.ModelViewSet): ''' API endpoint that allows users to be viewed or edited. ''' # pylint: disable=invalid-name model: _t.Type[AbstractUser] = User serializer_class: _t.Type[UserSerializer] = UserSerializer serializer_class_one: _t.Type[OneUserSerializer] = OneUserSerializer serializer_class_create: _t.Type[CreateUserSerializer] = CreateUserSerializer serializer_class_change_password: _t.Type[DataSerializer] = ChangePasswordSerializer filterset_class = UserFilter permission_classes = (permissions.SuperUserPermission,) def destroy(self, request: drf_request.Request, *args, **kwargs): user = self.get_object() if user == request.user: return responses.HTTP_409_CONFLICT("Could not remove youself.") return super().destroy(request, *args, **kwargs) # pylint: disable=no-member @transaction.atomic def partial_update(self, request: drf_request.Request, *args, **kwargs): return self.update(request, partial=True) @transaction.atomic def update(self, request: drf_request.Request, *args, **kwargs): partial = kwargs.pop('partial', False) instance = self.get_object() serializer = self.get_serializer(instance, data=request.data, partial=partial) serializer.is_valid(raise_exception=True) self.perform_update(serializer) return responses.HTTP_200_OK(serializer.data) @deco.action(["post"], detail=True, permission_classes=(ChangePasswordPermission,)) def change_password(self, request: drf_request.Request, *args, **kwargs): user = self.get_object() serializer = self.get_serializer(user, data=request.data) serializer.is_valid(raise_exception=True) serializer.save() update_session_auth_hash(request, user) return responses.HTTP_201_CREATED(serializer.data)
python
from utils import * from sklearn.neighbors import KNeighborsClassifier from sklearn.metrics import classification_report from sklearn.metrics import confusion_matrix train_X = get_attributes('train_binary.csv') train_Y = get_classes('train_binary.csv') test_X = get_attributes('test_binary.csv') test_Y = get_classes('test_binary.csv') neigh = KNeighborsClassifier(n_neighbors=10) neigh.fit(train_X, train_Y) y_pred_benchmark = neigh.predict(test_X) print(classification_report(test_Y, y_pred_benchmark)) print(confusion_matrix(test_Y, y_pred_benchmark))
python
"""This file contains functions to handle /delete_webhook command.""" from aiohttp import web from jinja2 import Environment from webhook_telegram_bot.database.backends.types import DatabaseWrapperImpl from webhook_telegram_bot.database.exceptions import ChatNotFound from webhook_telegram_bot.database.models import Chat from webhook_telegram_bot.telegram.commands import Command from webhook_telegram_bot.telegram.constants import ( TELEGRAM_TEMPLATE_CHAT_NOT_FOUND, TELEGRAM_TEMPLATE_WEBHOOK_DELETED, ) from webhook_telegram_bot.telegram.telegram_api import TelegramAPI async def delete_webhook_command_handler( chat_id: int, webhook_id: str, db: DatabaseWrapperImpl, telegram_api: TelegramAPI, template_engine: Environment, ) -> web.Response: """ Return message about webhook deletion. :param chat_id: chat identification number :param webhook_id: chat identification string :param db: DatabaseWrapper implementation instance :param telegram_api: TelegramAPI instance :param template_engine: template engine instance :return: bot response """ try: chat: Chat = await db.get_chat_by_chat_id(chat_id) chat.delete_webhook_by_id(webhook_id) await db.save_chat(chat) template = template_engine.get_template(TELEGRAM_TEMPLATE_WEBHOOK_DELETED) text = template.render() inline_keyboard = [ [ { 'text': '🔙 Back', 'callback_data': Command.EDIT_WEBHOOKS if len(chat.webhooks) else Command.START, } ] ] except ChatNotFound: template = template_engine.get_template(TELEGRAM_TEMPLATE_CHAT_NOT_FOUND) text = template.render() inline_keyboard = [ [ { 'text': '➕ Add Webhook', 'callback_data': Command.ADD_WEBHOOK, } ] ] return telegram_api.send_message_as_response( chat_id=chat_id, text=text, parse_mode='HTML', disable_notification=True, reply_markup={'inline_keyboard': inline_keyboard}, )
python
from ..estimators.estimator_base import H2OEstimator from h2o.utils.typechecks import Enum from h2o.utils.typechecks import assert_is_type class H2OPCA(H2OEstimator): """ Principal Component Analysis """ algo = "pca" def __init__(self, model_id=None, k=None, max_iterations=None, seed=None, transform="NONE", use_all_factor_levels=False, pca_method="GramSVD", pca_impl="mtj_evd_symmmatrix", ignore_const_cols=True, impute_missing=False, compute_metrics=True): """ Principal Components Analysis :param str model_id: The unique hex key assigned to the resulting model. Automatically generated if none is provided. :param int k: The number of principal components to be computed. This must be between ``1`` and ``min(ncol(training_frame), nrow(training_frame))`` inclusive. :param str transform: A character string that indicates how the training data should be transformed before running PCA. Possible values are: - ``"NONE"``: for no transformation, - ``"DEMEAN"``: for subtracting the mean of each column, - ``"DESCALE"``: for dividing by the standard deviation of each column, - ``"STANDARDIZE"``: for demeaning and descaling, and - ``"NORMALIZE"``: for demeaning and dividing each column by its range (max - min). :param int seed: Random seed used to initialize the right singular vectors at the beginning of each power method iteration. :param int max_iterations: The maximum number of iterations when pca_method is "Power". :param bool use_all_factor_levels: A logical value indicating whether all factor levels should be included in each categorical column expansion. If False, the indicator column corresponding to the first factor level of every categorical variable will be dropped. Default is False. :param str pca_method: A character string that indicates how PCA should be calculated. Possible values are: - ``"GramSVD"``: distributed computation of the Gram matrix followed by a local SVD using the JAMA package, - ``"Power"``: computation of the SVD using the power iteration method, - ``"GLRM"``: fit a generalized low rank model with an l2 loss function (no regularization) and solve for the SVD using local matrix algebra. - ``"Randomized"``: computation of the SVD using the randomized method from thesis of Nathan P. Halko, Randomized methods for computing low-rank approximation of matrices. :param str pca_impl: A character string that indicates the implementation to use for computing PCA (via SVD or EVD). - ``"mtj_evd_densematrix"``: eigenvalue decompositions for dense matrix using MTJ - ``"mtj_evd_symmmatrix"``: eigenvalue decompositions for symmetric matrix using MTJ - ``"mtj_svd_densematrix"``: singular-value decompositions for dense matrix using MTJ - ``"jama"``: eigenvalue decompositions for dense matrix using JAMA References: - JAMA: http://math.nist.gov/javanumerics/jama/ - MTJ: https://github.com/fommil/matrix-toolkits-java/ One of the following implementations are available: ``"mtj_evd_densematrix"``, ``"mtj_evd_symmmatrix"``, ``"mtj_svd_densematrix"``, ``"jama"`` (default: ``"mtj_evd_symmmatrix"``). :param bool ignore_const_cols: If true, will ignore constant columns. Default is True. :param bool impute_missing: whether to impute NA/missing values. :param bool compute_metrics: whether to compute metrics on training data. Default to True :returns: A new instance of H2OPCA. """ super(H2OPCA, self).__init__() self._parms = locals() self._parms = {k: v for k, v in self._parms.items() if k != "self"} assert_is_type(pca_method, Enum("GramSVD", "Power", "GLRM", "Randomized")) self._parms["pca_method"] = pca_method assert_is_type(pca_impl, Enum("MTJ_EVD_DENSEMATRIX", "MTJ_EVD_SYMMMATRIX", "MTJ_SVD_DENSEMATRIX", "JAMA")) self._parms["pca_impl"] = pca_impl assert_is_type(transform, Enum("NONE", "DEMEAN", "DESCALE", "STANDARDIZE", "NORMALIZE")) self._parms["transform"] = transform def fit(self, X, y=None, **params): return super(H2OPCA, self).fit(X) def transform(self, X, y=None, **params): """ Transform the given H2OFrame with the fitted PCA model. :param H2OFrame X: May contain NAs and/or categorical data. :param H2OFrame y: Ignored for PCA. Should be None. :param params: Ignored. :returns: The input H2OFrame transformed by the Principal Components. """ return self.predict(X) class H2OSVD(H2OEstimator): """Singular Value Decomposition""" algo = "svd" def __init__(self, nv=None, max_iterations=None, transform=None, seed=None, use_all_factor_levels=None, svd_method="GramSVD"): """ Singular value decomposition of an H2OFrame. :param int nv: The number of right singular vectors to be computed. This must be between 1 and min(ncol(training_frame), snrow(training_frame)) inclusive. :param int max_iterations: The maximum number of iterations to run each power iteration loop. Must be between 1 and 1e6 inclusive. :param str transform: A character string that indicates how the training data should be transformed before running SVD. Possible values are: - ``"NONE"``: for no transformation, - ``"DEMEAN"``: for subtracting the mean of each column, - ``"DESCALE"``: for dividing by the standard deviation of each column, - ``"STANDARDIZE"``: for demeaning and descaling, and - ``"NORMALIZE"``: for demeaning and dividing each column by its range (max - min). :param int seed: Random seed used to initialize the right singular vectors at the beginning of each power method iteration. :param bool use_all_factor_levels: A logical value indicating whether all factor levels should be included in each categorical column expansion. If False, the indicator column corresponding to the first factor level of every categorical variable will be dropped. Defaults to True. :param str svd_method: A character string that indicates how SVD should be calculated. Possible values are: - ``"GramSVD"``: distributed computation of the Gram matrix followed by a local SVD using the JAMA package, - ``"Power"``: computation of the SVD using the power iteration method, - ``"Randomized"``: approximate SVD by projecting onto a random subspace. :returns: a new H2OSVD model """ super(H2OSVD, self).__init__() self._parms = locals() self._parms = {k: v for k, v in self._parms.items() if k != "self"} assert_is_type(svd_method, Enum("GramSVD", "Power", "GLRM", "Randomized")) self._parms["svd_method"] = svd_method assert_is_type(transform, Enum("NONE", "DEMEAN", "DESCALE", "STANDARDIZE", "NORMALIZE")) self._parms["transform"]=transform self._parms['_rest_version'] = 99 def fit(self, X, y=None, **params): return super(H2OSVD, self).fit(X) def transform(self, X, y=None, **params): """ Transform the given H2OFrame with the fitted SVD model. :param H2OFrame X: May contain NAs and/or categorical data. :param H2OFrame y: Ignored for SVD. Should be None. :param params: Ignored. :returns: The input H2OFrame transformed by the SVD. """ return self.predict(X)
python
#!/usr/bin/env python import os import sys from django.conf import settings import django DIRNAME = os.path.dirname(__file__) settings.configure(DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='feedme.urls', INSTALLED_APPS=('django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.admin', 'feedme',), USE_TZ=True, MIDDLEWARE_CLASSES=('django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware')) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner(verbosity=1) failures = test_runner.run_tests(['feedme', ]) if failures: sys.exit(failures)
python
# -*- coding: utf-8 -*- N = int(input()) numbers = list(map(int, input().split())) print("Menor valor: %d" % min(numbers)) print("Posicao: %d" % (numbers.index(min(numbers))))
python
#!/usr/bin/python """ * Copyright 2015 Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. """ try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'name': 'alibabacloud-nls-python-sdk', 'version': '2.0.0', 'description': 'ali_speech python sdk', 'author': 'Alibaba Cloud NLS Team', 'author_email': '[email protected]', 'license': 'Apache License 2.0', 'url': 'https://github.com/aliyun/alibabacloud-nls-python-sdk.git', 'install_requires': ['websocket-client', 'requests'], 'packages': ['ali_speech'], 'classifiers': ( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Topic :: Software Development', ) } setup(**config)
python
#! /usr/bin/env python3 import rospy from sensor_msgs.msg import PointCloud2 import pcl import pcl_helper def do_euclidian_clustering(cloud): # Euclidean Clustering white_cloud = pcl_helper.XYZRGB_to_XYZ(cloud) # <type 'pcl._pcl.PointCloud'> tree = white_cloud.make_kdtree() # <type 'pcl._pcl.KdTree'> ec = white_cloud.make_EuclideanClusterExtraction() ec.set_ClusterTolerance(0.02) # for hammer ec.set_MinClusterSize(10) ec.set_MaxClusterSize(250) ec.set_SearchMethod(tree) cluster_indices = ec.Extract() # indices for each cluster (a list of lists) # Assign a color to each cluster cluster_color = pcl_helper.random_color_gen() #cluster_color = pcl_helper.get_color_list(len(cluster_indices)) color_cluster_point_list = [] for j, indices in enumerate(cluster_indices): for i, indice in enumerate(indices): color_cluster_point_list.append([white_cloud[indice][0], white_cloud[indice][1], white_cloud[indice][2], pcl_helper.rgb_to_float(cluster_color)]) # Create new cloud containing all clusters, each with unique color cluster_cloud = pcl.PointCloud_PointXYZRGB() cluster_cloud.from_list(color_cluster_point_list) # publish to cloud ros_cluster_cloud = pcl_helper.pcl_to_ros(cluster_cloud) return cluster_cloud def callback(input_ros_msg): cloud = pcl_helper.ros_to_pcl(input_ros_msg) cloud = do_euclidian_clustering(cloud) cloud_new = pcl_helper.pcl_to_ros(cloud) pub.publish(cloud_new) if __name__ == '__main__': rospy.init_node("euclidean" , anonymous= True) rospy.Subscriber("/velodyne" , PointCloud2 , callback) pub = rospy.Publisher("/velodyne_new" , PointCloud2 , queue_size=1) rospy.spin()
python
# -*- coding: utf-8 -*- #/* # * Copyright (c) 2022 Renwei # * # * This is a free software; you can redistribute it and/or modify # * it under the terms of the MIT license. See LICENSE for details. # */ import pickle # ===================================================================== def t_class_save(file_path, class_body): output_hal = open(file_path, 'wb') str = pickle.dumps(class_body) output_hal.write(str) output_hal.close() return def t_class_load(file_path): class_body = None with open(file_path, 'rb') as file: class_body = pickle.loads(file.read()) return class_body
python
# Copyright 2020 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module for Pyxir IO APIs """ import io import os import json import zipfile from pyxir.graph.xgraph import XGraph from pyxir.graph.io.xgraph_io import XGraphIO from pyxir.opaque_func_registry import register_opaque_func, OpaqueFuncRegistry from pyxir.type import TypeCode from pyxir.shared.container import StrContainer, BytesContainer from .util import zip_dir def visualize(xgraph, pngfile='xgraph.png'): # type: (XGraph, str) -> None xgraph.visualize(pngfile) def save(xgraph, filename): # type: (str) -> None """ Save this XGraph to disk. The network graph information is written to json and the network paraemeters are written to an h5 file Arguments --------- xgraph: XGraph the XGraph to be saved filename: str the name of the files storing the graph inormation and network parameters the graph information is stored in `filename`.json the network paraemeters are stored in `filename`.h5 """ XGraphIO.save(xgraph, filename) @register_opaque_func('pyxir.io.save', [TypeCode.XGraph, TypeCode.Str]) def save_opaque_func(xg, filename): save(xg, filename) def load(net_file, params_file): # type: (str, str) -> XGraph """ Load the graph network information and weights from the json network file respectively h5 parameters file Arguments --------- net_file: str the path to the file containing the network graph information params_file: str the path to the file containing the network weights """ xgraph = XGraphIO.load(net_file, params_file) return xgraph @register_opaque_func('pyxir.io.load', [TypeCode.Str, TypeCode.Str, TypeCode.XGraph]) def load_opaque_func(net_file, params_file, xg_callback): xg_callback.copy_from(load(net_file, params_file)) @register_opaque_func('pyxir.io.load_scheduled_xgraph_from_meta', [TypeCode.Str, TypeCode.XGraph]) def load_scheduled_xgraph_opaque_func(build_dir: str, cb_scheduled_xgraph: XGraph): """ Expose the load scheduled xgraph function as an opaque function so it can be called in a language agnostic way Arguments --------- build_dir: str the path to the build directory containing a meta.json file cb_scheduled_xgraph: XGraph return the scheduled XGraph """ meta_file = os.path.join(build_dir, 'meta.json') if (not os.path.isfile(meta_file)): raise ValueError("Could not find meta file at: {}" .format(meta_file)) with open(meta_file) as json_file: meta_d = json.load(json_file) px_net_file = meta_d['px_model'] px_params_file = meta_d['px_params'] if not os.path.isabs(px_net_file): px_net_file = os.path.join(build_dir, px_net_file) if not os.path.isabs(px_params_file): px_params_file = os.path.join(build_dir, px_params_file) scheduled_xgraph = load(px_net_file, px_params_file) cb_scheduled_xgraph.copy_from(scheduled_xgraph) @register_opaque_func('pyxir.io.to_string', [TypeCode.XGraph, TypeCode.BytesContainer, TypeCode.BytesContainer]) def write_to_string(xg, xgraph_json_str_callback, xgraph_params_str_callback): graph_str, data_str = XGraphIO.to_string(xg) xgraph_json_str_callback.set_bytes(graph_str) xgraph_params_str_callback.set_bytes(data_str) def get_xgraph_str(xg: XGraph): # graph_str, data_str = XGraphIO.to_string(xg) # return " " + str(len(graph_str)) + " " + graph_str + " " + str(len(data_str) + 1) + " " + data_str of = OpaqueFuncRegistry.Get("pyxir.io.get_serialized_xgraph") s = BytesContainer(b"") of(xg, s) # import pdb; pdb.set_trace() return s.get_bytes() def read_xgraph_str(xg_str: bytes): of = OpaqueFuncRegistry.Get("pyxir.io.deserialize_xgraph") xg = XGraph() s = BytesContainer(xg_str) # import pdb; pdb.set_trace() of(xg, s) return xg @register_opaque_func('pyxir.io.from_string', [TypeCode.XGraph, TypeCode.Byte, TypeCode.Byte]) def read_from_string(xg, xgraph_json_str, xgraph_params_str): # graph_str, data_str = xgraph_str.split(";") xg_load = XGraphIO.from_string(xgraph_json_str, xgraph_params_str) xg.copy_from(xg_load) @register_opaque_func('pyxir.io.serialize_dir', [TypeCode.Str, TypeCode.BytesContainer]) def serialize_dir(dir_path, serial_str_cb): if not os.path.isdir(dir_path): serial_str_cb.set_bytes(b"") else: bio = io.BytesIO() with zipfile.ZipFile(bio, 'w', zipfile.ZIP_DEFLATED) as zip_f: zip_dir(dir_path, zip_f) s = bio.getvalue() # .hex() serial_str_cb.set_bytes(s) # import pdb; pdb.set_trace() @register_opaque_func('pyxir.io.deserialize_dir', [TypeCode.Str, TypeCode.Byte]) def deserialize_dir(dir_path, serial_str): # import pdb; pdb.set_trace() if serial_str != b"": bio = io.BytesIO(serial_str) # .encode('latin1') bytes.fromhex(serial_str)) with zipfile.ZipFile(bio, 'r') as zip_f: zip_f.extractall(dir_path) # If empty directory got zipped, recreate empty directory if not os.path.exists(dir_path): os.makedirs(dir_path) # import pdb; pdb.set_trace()
python
# Copyright (c) 2022 OpenCyphal # This software is distributed under the terms of the MIT License. # Author: Pavel Kirienko <[email protected]> from __future__ import annotations import asyncio import time from typing import Any import json import tempfile from pathlib import Path from pprint import pprint import pytest from tests.dsdl import OUTPUT_DIR from tests.transport import TransportFactory from tests.subprocess import execute_cli, Subprocess @pytest.mark.asyncio async def _unittest_caller(compiled_dsdl: Any) -> None: from pycyphal.transport.loopback import LoopbackTransport import pycyphal.application from pycyphal.application.register import ValueProxy, Natural64, Value, String from yakut.cmd.register_batch._directive import Directive from yakut.cmd.register_batch._caller import Skipped, Timeout, TypeCoercionFailure, do_calls _ = compiled_dsdl node = pycyphal.application.make_node(pycyphal.application.NodeInfo(), transport=LoopbackTransport(10)) try: node.registry.clear() node.registry["a"] = ValueProxy("a") node.registry["b"] = ValueProxy(Natural64([1, 2, 3])) node.registry["c"] = ValueProxy(Natural64([3, 2, 1])) node.start() res = await do_calls( node, lambda x: print("Progress:", x), timeout=1.0, directive=Directive( registers_per_node={ 10: { "c": lambda _: None, # Type coercion failure does not interrupt further processing. "a": Value(string=String("z")), "d": Value(string=String("n")), # No such register. "b": lambda v: v, }, 11: { "y": lambda _: None, "z": lambda _: None, }, } ), ) pprint(res.responses_per_node) assert res.responses_per_node.keys() == {10, 11} assert res.responses_per_node[10]["a"].value.string.value.tobytes().decode() == "z" # type: ignore assert list(res.responses_per_node[10]["b"].value.natural64.value) == [1, 2, 3] # type: ignore assert isinstance(res.responses_per_node[10]["c"], TypeCoercionFailure) assert res.responses_per_node[10]["d"].value.empty # type: ignore assert res.responses_per_node[11]["y"] == Timeout() assert res.responses_per_node[11]["z"] == Skipped() finally: node.close() await asyncio.sleep(1) def _unittest_cmd(compiled_dsdl: Any, transport_factory: TransportFactory) -> None: _ = compiled_dsdl file = Path(tempfile.mktemp("yakut_register_batch_test.yaml")) # Run dummy nodes which we can query. bg_nodes = [ Subprocess.cli( "sub", "1000:uavcan.primitive.empty", environment_variables={ **transport_factory(10 + idx).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) for idx in range(2) ] time.sleep(1) try: # READ INPUT KEYED file.write_text("{10: [uavcan.node.id, uavcan.node.description], 11: [uavcan.node.id]}") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) assert status == 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 2 assert data["10"]["uavcan.node.id"] == 10 assert data["10"]["uavcan.node.description"] == "" assert data["11"]["uavcan.node.id"] == 11 # MODIFY INPUT KEYED file.write_text("{10: {uavcan.node.description: TEN}, 11: {uavcan.node.description: ELEVEN}}") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) assert status == 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 2 assert data["10"]["uavcan.node.description"] == "TEN" assert data["11"]["uavcan.node.description"] == "ELEVEN" # READ INPUT FLAT, OUTPUT FLAT file.write_text("[uavcan.node.id, uavcan.node.description]") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", "10", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) assert status == 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 2 assert data["uavcan.node.id"] == 10 assert data["uavcan.node.description"] == "TEN" # MODIFY INPUT FLAT, OUTPUT KEYED file.write_text("{uavcan.node.description: 'TEN OR ELEVEN'}") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", "10,11", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) assert status == 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 2 assert data["10"]["uavcan.node.description"] == "TEN OR ELEVEN" assert data["11"]["uavcan.node.description"] == "TEN OR ELEVEN" # MODIFY INPUT FLAT, OUTPUT KEYED, ONE TIMED OUT WITH ERROR file.write_text("{uavcan.node.description: XXX}") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", "10-13", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ensure_success=False, ) assert status != 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 3 assert data["10"]["uavcan.node.description"] == "XXX" assert data["11"]["uavcan.node.description"] == "XXX" assert not data["12"] # MODIFY INPUT FLAT, OUTPUT KEYED, NO SUCH REGISTER ERROR file.write_text("{nonexistent.register: 123}") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", "10,11", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ensure_success=False, ) assert status != 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 2 assert data["10"]["nonexistent.register"] is None assert data["11"]["nonexistent.register"] is None # MODIFY INPUT FLAT, OUTPUT KEYED, NO SUCH REGISTER, ERROR IGNORED file.write_text("{nonexistent.register: 123}") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", "10,11", "--optional-register", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) assert status == 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 2 assert data["10"]["nonexistent.register"] is None assert data["11"]["nonexistent.register"] is None # MODIFY INPUT FLAT, OUTPUT FLAT, DETAILED file.write_text("[uavcan.node.id]") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", "10", "--detailed", environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) assert status == 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert len(data) == 1 assert data["uavcan.node.id"]["natural16"]["value"] == [10] # MODIFY INPUT FLAT, OUTPUT FLAT, DETAILED, FILTERED EMPTY file.write_text("[uavcan.node.id]") status, stdout, _ = execute_cli( "register-batch", f"--file={file}", "10", "--only=iv", # The requested register is not immutable-volatile so it will be skipped. environment_variables={ **transport_factory(100).environment, "YAKUT_PATH": str(OUTPUT_DIR), }, ) assert status == 0 data = json.loads(stdout.strip()) print(json.dumps(data, indent=4)) assert data == {} finally: for bg in bg_nodes: bg.wait(10, interrupt=True) file.unlink()
python
from .mesh_adv_dataset import MeshAdversarialDataset from .mesh_h36m_dataset import MeshH36MDataset from .mesh_mix_dataset import MeshMixDataset from .mosh_dataset import MoshDataset __all__ = [ 'MeshH36MDataset', 'MoshDataset', 'MeshMixDataset', 'MeshAdversarialDataset' ]
python
import numpy as np from matplotlib.testing.decorators import image_comparison import matplotlib.pyplot as plt import matplotlib.patheffects as path_effects from matplotlib.path import Path import matplotlib.patches as patches @image_comparison(['patheffect1'], remove_text=True) def test_patheffect1(): ax1 = plt.subplot() ax1.imshow([[1, 2], [2, 3]]) txt = ax1.annotate("test", (1., 1.), (0., 0), arrowprops=dict(arrowstyle="->", connectionstyle="angle3", lw=2), size=20, ha="center", path_effects=[path_effects.withStroke(linewidth=3, foreground="w")]) txt.arrow_patch.set_path_effects([path_effects.Stroke(linewidth=5, foreground="w"), path_effects.Normal()]) pe = [path_effects.withStroke(linewidth=3, foreground="w")] ax1.grid(True, linestyle="-", path_effects=pe) @image_comparison(['patheffect2'], remove_text=True, style='mpl20') def test_patheffect2(): ax2 = plt.subplot() arr = np.arange(25).reshape((5, 5)) ax2.imshow(arr, interpolation='nearest') cntr = ax2.contour(arr, colors="k") plt.setp(cntr.collections, path_effects=[path_effects.withStroke(linewidth=3, foreground="w")]) clbls = ax2.clabel(cntr, fmt="%2.0f", use_clabeltext=True) plt.setp(clbls, path_effects=[path_effects.withStroke(linewidth=3, foreground="w")]) @image_comparison(['patheffect3']) def test_patheffect3(): p1, = plt.plot([1, 3, 5, 4, 3], 'o-b', lw=4) p1.set_path_effects([path_effects.SimpleLineShadow(), path_effects.Normal()]) plt.title( r'testing$^{123}$', path_effects=[path_effects.withStroke(linewidth=1, foreground="r")]) leg = plt.legend([p1], [r'Line 1$^2$'], fancybox=True, loc='upper left') leg.legendPatch.set_path_effects([path_effects.withSimplePatchShadow()]) text = plt.text(2, 3, 'Drop test', color='white', bbox={'boxstyle': 'circle,pad=0.1', 'color': 'red'}) pe = [path_effects.Stroke(linewidth=3.75, foreground='k'), path_effects.withSimplePatchShadow((6, -3), shadow_rgbFace='blue')] text.set_path_effects(pe) text.get_bbox_patch().set_path_effects(pe) pe = [path_effects.PathPatchEffect(offset=(4, -4), hatch='xxxx', facecolor='gray'), path_effects.PathPatchEffect(edgecolor='white', facecolor='black', lw=1.1)] t = plt.gcf().text(0.02, 0.1, 'Hatch shadow', fontsize=75, weight=1000, va='center') t.set_path_effects(pe) @image_comparison(['stroked_text.png']) def test_patheffects_stroked_text(): text_chunks = [ 'A B C D E F G H I J K L', 'M N O P Q R S T U V W', 'X Y Z a b c d e f g h i j', 'k l m n o p q r s t u v', 'w x y z 0123456789', r"!@#$%^&*()-=_+[]\;'", ',./{}|:"<>?' ] font_size = 50 ax = plt.axes([0, 0, 1, 1]) for i, chunk in enumerate(text_chunks): text = ax.text(x=0.01, y=(0.9 - i * 0.13), s=chunk, fontdict={'ha': 'left', 'va': 'center', 'size': font_size, 'color': 'white'}) text.set_path_effects([path_effects.Stroke(linewidth=font_size / 10, foreground='black'), path_effects.Normal()]) ax.set_xlim(0, 1) ax.set_ylim(0, 1) ax.axis('off') def test_PathEffect_points_to_pixels(): fig = plt.figure(dpi=150) p1, = plt.plot(range(10)) p1.set_path_effects([path_effects.SimpleLineShadow(), path_effects.Normal()]) renderer = fig.canvas.get_renderer() pe_renderer = path_effects.PathEffectRenderer( p1.get_path_effects(), renderer) # Confirm that using a path effects renderer maintains point sizes # appropriately. Otherwise rendered font would be the wrong size. assert renderer.points_to_pixels(15) == pe_renderer.points_to_pixels(15) def test_SimplePatchShadow_offset(): pe = path_effects.SimplePatchShadow(offset=(4, 5)) assert pe._offset == (4, 5) @image_comparison(['collection'], tol=0.03, style='mpl20') def test_collection(): x, y = np.meshgrid(np.linspace(0, 10, 150), np.linspace(-5, 5, 100)) data = np.sin(x) + np.cos(y) cs = plt.contour(data) pe = [path_effects.PathPatchEffect(edgecolor='black', facecolor='none', linewidth=12), path_effects.Stroke(linewidth=5)] for collection in cs.collections: collection.set_path_effects(pe) for text in plt.clabel(cs, colors='white'): text.set_path_effects([path_effects.withStroke(foreground='k', linewidth=3)]) text.set_bbox({'boxstyle': 'sawtooth', 'facecolor': 'none', 'edgecolor': 'blue'}) @image_comparison(['tickedstroke'], remove_text=True, extensions=['png']) def test_tickedstroke(): fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(12, 4)) path = Path.unit_circle() patch = patches.PathPatch(path, facecolor='none', lw=2, path_effects=[ path_effects.withTickedStroke(angle=-90, spacing=10, length=1)]) ax1.add_patch(patch) ax1.axis('equal') ax1.set_xlim(-2, 2) ax1.set_ylim(-2, 2) ax2.plot([0, 1], [0, 1], label=' ', path_effects=[path_effects.withTickedStroke(spacing=7, angle=135)]) nx = 101 x = np.linspace(0.0, 1.0, nx) y = 0.3 * np.sin(x * 8) + 0.4 ax2.plot(x, y, label=' ', path_effects=[path_effects.withTickedStroke()]) ax2.legend() nx = 101 ny = 105 # Set up survey vectors xvec = np.linspace(0.001, 4.0, nx) yvec = np.linspace(0.001, 4.0, ny) # Set up survey matrices. Design disk loading and gear ratio. x1, x2 = np.meshgrid(xvec, yvec) # Evaluate some stuff to plot g1 = -(3 * x1 + x2 - 5.5) g2 = -(x1 + 2 * x2 - 4) g3 = .8 + x1 ** -3 - x2 cg1 = ax3.contour(x1, x2, g1, [0], colors=('k',)) plt.setp(cg1.collections, path_effects=[path_effects.withTickedStroke(angle=135)]) cg2 = ax3.contour(x1, x2, g2, [0], colors=('r',)) plt.setp(cg2.collections, path_effects=[path_effects.withTickedStroke(angle=60, length=2)]) cg3 = ax3.contour(x1, x2, g3, [0], colors=('b',)) plt.setp(cg3.collections, path_effects=[path_effects.withTickedStroke(spacing=7)]) ax3.set_xlim(0, 4) ax3.set_ylim(0, 4)
python
import sys import irefindex_parser reload(irefindex_parser) from irefindex_parser import * import metrics_nx reload(metrics_nx) from metrics_nx import * try: import metrics_gt reload(metrics_gt) except ImportError: sys.stderr.write("[warning] Cannot import graph_tool\n")
python
import asyncio import logging import re import time import traceback from musicbot import _func_, _get_variable, exceptions, factory from musicbot.bot import MusicBot from musicbot.constructs import Response from musicbot.opus_loader import load_opus_lib from musicbot.utils import fixg, ftimedelta load_opus_lib() log = logging.getLogger(__name__) class PlayCmd(): async def Run(self, bot: MusicBot, message: discord.Message, player, channel, author, permissions, leftover_args, song_url): """ Usage: {command_prefix}play song_link {command_prefix}play text to search for {command_prefix}play spotify_uri Adds the song to the playlist. If a link is not provided, the first result from a youtube search is added to the queue. If enabled in the config, the bot will also support Spotify URIs, however it will use the metadata (e.g song name and artist) to find a YouTube equivalent of the song. Streaming from Spotify is not possible. """ song_url = song_url.strip('<>') await bot.send_typing(channel) if leftover_args: song_url = ' '.join([song_url, *leftover_args]) leftover_args = None # prevent some crazy shit happening down the line # Make sure forward slashes work properly in search queries linksRegex = '((http(s)*:[/][/]|www.)([a-z]|[A-Z]|[0-9]|[/.]|[~])*)' pattern = re.compile(linksRegex) matchUrl = pattern.match(song_url) song_url = song_url.replace( '/', '%2F') if matchUrl is None else song_url # Rewrite YouTube playlist URLs if the wrong URL type is given playlistRegex = r'watch\?v=.+&(list=[^&]+)' matches = re.search(playlistRegex, song_url) groups = matches.groups() if matches is not None else [] song_url = "https://www.youtube.com/playlist?" + \ groups[0] if len(groups) > 0 else song_url if bot.config._spotify: if 'open.spotify.com' in song_url: song_url = 'spotify:' + \ re.sub('(http[s]?:\/\/)?(open.spotify.com)\/', '', song_url).replace('/', ':') # remove session id (and other query stuff) song_url = re.sub('\?.*', '', song_url) if song_url.startswith('spotify:'): parts = song_url.split(":") try: if 'track' in parts: res = await bot.spotify.get_track(parts[-1]) song_url = res['artists'][0]['name'] + \ ' ' + res['name'] elif 'album' in parts: res = await bot.spotify.get_album(parts[-1]) await bot._do_playlist_checks(permissions, player, author, res['tracks']['items']) procmesg = await bot.safe_send_message(channel, bot.str.get('cmd-play-spotify-album-process', 'Processing album `{0}` (`{1}`)').format(res['name'], song_url)) for i in res['tracks']['items']: song_url = i['name'] + ' ' + \ i['artists'][0]['name'] log.debug('Processing {0}'.format(song_url)) await bot.cmd_play(message, player, channel, author, permissions, leftover_args, song_url) await bot.safe_delete_message(procmesg) return Response(bot.str.get('cmd-play-spotify-album-queued', "Enqueued `{0}` with **{1}** songs.").format(res['name'], len(res['tracks']['items']))) elif 'playlist' in parts: res = [] r = await bot.spotify.get_playlist_tracks(parts[-1]) while True: res.extend(r['items']) if r['next'] is not None: r = await bot.spotify.make_spotify_req(r['next']) continue else: break await bot._do_playlist_checks(permissions, player, author, res) procmesg = await bot.safe_send_message(channel, bot.str.get('cmd-play-spotify-playlist-process', 'Processing playlist `{0}` (`{1}`)').format(parts[-1], song_url)) for i in res: song_url = i['track']['name'] + ' ' + \ i['track']['artists'][0]['name'] log.debug('Processing {0}'.format(song_url)) await bot.cmd_play(message, player, channel, author, permissions, leftover_args, song_url) await bot.safe_delete_message(procmesg) return Response(bot.str.get('cmd-play-spotify-playlist-queued', "Enqueued `{0}` with **{1}** songs.").format(parts[-1], len(res))) else: raise exceptions.CommandError(bot.str.get( 'cmd-play-spotify-unsupported', 'That is not a supported Spotify URI.'), expire_in=30) except exceptions.SpotifyError: raise exceptions.CommandError(bot.str.get( 'cmd-play-spotify-invalid', 'You either provided an invalid URI, or there was a problem.')) # This lock prevent spamming play command to add entries that exceeds time limit/ maximum song limit async with bot.aiolocks[_func_() + ':' + str(author.id)]: if permissions.max_songs and player.playlist.count_for_user(author) >= permissions.max_songs: raise exceptions.PermissionsError( bot.str.get('cmd-play-limit', "You have reached your enqueued song limit ({0})").format(permissions.max_songs), expire_in=30 ) if player.karaoke_mode and not permissions.bypass_karaoke_mode: raise exceptions.PermissionsError( bot.str.get('karaoke-enabled', "Karaoke mode is enabled, please try again when its disabled!"), expire_in=30 ) # Try to determine entry type, if _type is playlist then there should be entries while True: try: info = await bot.downloader.extract_info(player.playlist.loop, song_url, download=False, process=False) # If there is an exception arise when processing we go on and let extract_info down the line report it # because info might be a playlist and thing that's broke it might be individual entry try: info_process = await bot.downloader.extract_info(player.playlist.loop, song_url, download=False) except: info_process = None log.debug(info) if info_process and info and info_process.get('_type', None) == 'playlist' and 'entries' not in info and not info.get('url', '').startswith('ytsearch'): use_url = info_process.get( 'webpage_url', None) or info_process.get('url', None) if use_url == song_url: log.warning( "Determined incorrect entry type, but suggested url is the same. Help.") break # If we break here it will break things down the line and give "This is a playlist" exception as a result log.debug( "Assumed url \"%s\" was a single entry, was actually a playlist" % song_url) log.debug("Using \"%s\" instead" % use_url) song_url = use_url else: break except Exception as e: if 'unknown url type' in str(e): # it's probably not actually an extractor song_url = song_url.replace(':', '') info = await bot.downloader.extract_info(player.playlist.loop, song_url, download=False, process=False) else: raise exceptions.CommandError(e, expire_in=30) if not info: raise exceptions.CommandError( bot.str.get( 'cmd-play-noinfo', "That video cannot be played. Try using the {0}stream command.").format(bot.config.command_prefix), expire_in=30 ) if info.get('extractor', '') not in permissions.extractors and permissions.extractors: raise exceptions.PermissionsError( bot.str.get('cmd-play-badextractor', "You do not have permission to play media from this service."), expire_in=30 ) # abstract the search handling away from the user # our ytdl options allow us to use search strings as input urls if info.get('url', '').startswith('ytsearch'): # print("[Command:play] Searching for \"%s\"" % song_url) info = await bot.downloader.extract_info( player.playlist.loop, song_url, download=False, process=True, # ASYNC LAMBDAS WHEN on_error=lambda e: asyncio.ensure_future( bot.safe_send_message(channel, "```\n%s\n```" % e, expire_in=120), loop=bot.loop), retry_on_error=True ) if not info: raise exceptions.CommandError( bot.str.get('cmd-play-nodata', "Error extracting info from search string, youtubedl returned no data. " "You may need to restart the bot if this continues to happen."), expire_in=30 ) if not all(info.get('entries', [])): # empty list, no data log.debug("Got empty list, no data") return # TODO: handle 'webpage_url' being 'ytsearch:...' or extractor type song_url = info['entries'][0]['webpage_url'] info = await bot.downloader.extract_info(player.playlist.loop, song_url, download=False, process=False) # Now I could just do: return await bot.cmd_play(player, channel, author, song_url) # But this is probably fine # If it's playlist if 'entries' in info: await bot._do_playlist_checks(permissions, player, author, info['entries']) num_songs = sum(1 for _ in info['entries']) if info['extractor'].lower() in ['youtube:playlist', 'soundcloud:set', 'bandcamp:album']: try: return await bot._cmd_play_playlist_async(player, channel, author, permissions, song_url, info['extractor']) except exceptions.CommandError: raise except Exception as e: log.error("Error queuing playlist", exc_info=True) raise exceptions.CommandError(bot.str.get( 'cmd-play-playlist-error', "Error queuing playlist:\n`{0}`").format(e), expire_in=30) t0 = time.time() # My test was 1.2 seconds per song, but we maybe should fudge it a bit, unless we can # monitor it and edit the message with the estimated time, but that's some ADVANCED SHIT # I don't think we can hook into it anyways, so this will have to do. # It would probably be a thread to check a few playlists and get the speed from that # Different playlists might download at different speeds though wait_per_song = 1.2 procmesg = await bot.safe_send_message( channel, bot.str.get('cmd-play-playlist-gathering-1', 'Gathering playlist information for {0} songs{1}').format( num_songs, bot.str.get('cmd-play-playlist-gathering-2', ', ETA: {0} seconds').format(fixg( num_songs * wait_per_song)) if num_songs >= 10 else '.')) # We don't have a pretty way of doing this yet. We need either a loop # that sends these every 10 seconds or a nice context manager. await bot.send_typing(channel) # TODO: I can create an event emitter object instead, add event functions, and every play list might be asyncified # Also have a "verify_entry" hook with the entry as an arg and returns the entry if its ok entry_list, position = await player.playlist.import_from(song_url, channel=channel, author=author) tnow = time.time() ttime = tnow - t0 listlen = len(entry_list) drop_count = 0 if permissions.max_song_length: for e in entry_list.copy(): if e.duration > permissions.max_song_length: player.playlist.entries.remove(e) entry_list.remove(e) drop_count += 1 # Im pretty sure there's no situation where this would ever break # Unless the first entry starts being played, which would make this a race condition if drop_count: print("Dropped %s songs" % drop_count) log.info("Processed {} songs in {} seconds at {:.2f}s/song, {:+.2g}/song from expected ({}s)".format( listlen, fixg(ttime), ttime / listlen if listlen else 0, ttime / listlen - wait_per_song if listlen - wait_per_song else 0, fixg(wait_per_song * num_songs)) ) await bot.safe_delete_message(procmesg) if not listlen - drop_count: raise exceptions.CommandError( bot.str.get('cmd-play-playlist-maxduration', "No songs were added, all songs were over max duration (%ss)") % permissions.max_song_length, expire_in=30 ) reply_text = bot.str.get( 'cmd-play-playlist-reply', "Enqueued **%s** songs to be played. Position in queue: %s") btext = str(listlen - drop_count) # If it's an entry else: # youtube:playlist extractor but it's actually an entry if info.get('extractor', '').startswith('youtube:playlist'): try: info = await bot.downloader.extract_info(player.playlist.loop, 'https://www.youtube.com/watch?v=%s' % info.get('url', ''), download=False, process=False) except Exception as e: raise exceptions.CommandError(e, expire_in=30) if permissions.max_song_length and info.get('duration', 0) > permissions.max_song_length: raise exceptions.PermissionsError( bot.str.get('cmd-play-song-limit', "Song duration exceeds limit ({0} > {1})").format( info['duration'], permissions.max_song_length), expire_in=30 ) entry, position = await player.playlist.add_entry(song_url, channel=channel, author=author) reply_text = bot.str.get( 'cmd-play-song-reply', "Enqueued `%s` to be played. Position in queue: %s") btext = entry.title if position == 1 and player.is_stopped: position = bot.str.get('cmd-play-next', 'Up next!') reply_text %= (btext, position) else: try: time_until = await player.playlist.estimate_time_until(position, player) reply_text += bot.str.get('cmd-play-eta', ' - estimated time until playing: %s') except: traceback.print_exc() time_until = '' reply_text %= (btext, position, ftimedelta(time_until)) return Response(reply_text, delete_after=30) def register() -> None: factory.register("play", PlayCmd, ["p", ])
python
def const_ver(): return "v8.0" def is_gpvdm_next(): return False
python
from setuptools import setup, find_packages import codecs import os import re import sys here = os.path.abspath(os.path.dirname(__file__)) min_requires = [ "pycarol>=2.45.0" , "pandas" ] extras_require = { } extras_require["complete"] = sorted( {v for req in extras_require.values() for v in req} ) def read(*parts): # intentionally *not* adding an encoding option to open, See: # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 with codecs.open(os.path.join(here, *parts), 'r') as fp: return fp.read() readme_note = """\ .. note:: For the latest source, discussion, etc, please visit the `GitHub repository <https://github.com/jnefoussi/pytechfin>`_\n\n """ with open('README.rst') as fobj: long_description = readme_note + fobj.read() def find_version(*file_paths): version_file = read(*file_paths) version_match = re.search( r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M, ) if version_match: return version_match.group(1) raise RuntimeError("Unable to find version string.") setup( name='pytechfin', setup_requires=["wheel","pandas"], packages=find_packages(exclude=['docs', 'doc']), version=find_version("pytechfin", "__init__.py"), license='MIT', description='Techfin Python API and Tools', long_description=long_description, long_description_content_type="text/x-rst", author='Jonathan J Nefoussi', maintainer='Jonathan J Nefoussi', author_email='[email protected]', url='https://github.com/jnefoussi/pytechfin', keywords=['TOTVS Techfin', 'Carol.ai', 'TOTVS'], install_requires=min_requires, extras_require=extras_require, classifiers=[ # Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package 'Development Status :: 5 - Production/Stable', # Define that your audience are developers 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', "Operating System :: OS Independent", ], )
python
from MeioDeTransporte import MeioDeTransporte class Aereo(MeioDeTransporte): def __init__(self, numAsa): super() self.__numAsa = numAsa #Geters e Seters #*******************************# def get_numAsas(self): return self.__numAsa def set_numAsas(self, num:int): self.__numAsa = num def informacoes(self): print(f'Número de Asas: {self.__numAsa}')
python
import os from unittest import TestCase from xml.etree import ElementTree as ET from xam import Addon try: from collections import OrderedDict except ImportError: from collective.ordereddict import OrderedDict class TestAddon(TestCase): def assert_attrs(self, obj, attrs): for attr_name, expected_value in attrs.items(): attr_value = getattr(obj, attr_name) self.assertEqual(expected_value, attr_value) self.assertTrue(isinstance(attr_value, unicode)) def assert_dict(self, expected, actual): for key, val in actual.items(): self.assertTrue(isinstance(key, unicode)) self.assertTrue(isinstance(val, unicode)) self.assertEqual(expected, actual) def test_parse(self): addon = Addon.from_filename(os.path.join(os.path.dirname(__file__), 'data', 'addon.xml')) expected = { # attr_name: expected_value 'version': u'1.2.1', 'id': u'plugin.video.academicearth', 'name': u'Academic Earth', 'provider': u'Jonathan Beluch (jbel)', } self.assert_attrs(addon, expected) self.assert_dict({ u'xbmc.python': u'2.0', u'script.module.beautifulsoup': u'3.0.8', u'script.module.xbmcswift': u'0.2.0', u'plugin.video.youtube': u'2.9.1', }, addon.dependencies) self.assertEqual(addon.languages, ['en', 'fr']) self.assertNotEqual(None, addon.metadata) self.assertEqual('all', addon.platform) self.assertEqual(OrderedDict( [(None, 'Watch lectures from Academic Earth (http://academicearth.org)')] ), addon.summaries) self.assertEqual('Watch lectures from Academic Earth (http://academicearth.org)', addon.summary()) #self.assertEqual('Watch lectures from Academic Earth (http://academicearth.org)', #addon.summary('en')) self.assertEqual(OrderedDict( [(None,'Browse online courses and lectures from the world\'s top scholars.')] ), addon.descriptions) self.assertEqual('Browse online courses and lectures from the world\'s top scholars.', addon.description()) def test_setters(self): xml = ET.parse(os.path.join(os.path.dirname(__file__), 'data', 'addon.xml')).getroot() addon = Addon(xml) self.assertEqual('1.2.1', addon.version) addon.version = '1.2.2' self.assertEqual('1.2.2', addon.version) def test_to_dict(self): addon = Addon.from_filename(os.path.join(os.path.dirname(__file__), 'data', 'addon.xml')) actual = addon.to_dict() with open(os.path.join(os.path.dirname(__file__), 'data', 'addon.xml')) as inp: xml = inp.read() expected = { 'id': u'plugin.video.academicearth', 'name': u'Academic Earth', 'version': u'1.2.1', 'provider': u'Jonathan Beluch (jbel)', 'dependencies': { 'xbmc.python': '2.0', 'script.module.beautifulsoup': '3.0.8', 'script.module.xbmcswift': '0.2.0', 'plugin.video.youtube': '2.9.1', }, 'summaries': {None: u"Watch lectures from Academic Earth (http://academicearth.org)"}, 'descriptions': {None: u"Browse online courses and lectures from the world's top scholars."}, 'platform': 'all', '_xml': xml, } for key, val in expected.items(): if not key.startswith('_'): self.assertEqual(val, actual[key]) LANG_XML_TMP = ''' <addon id="plugin.video.academicearth" name="Academic Earth" provider-name="Jonathan Beluch (jbel)" version="1.2.1"> <extension point="xbmc.addon.metadata"> %s </extension> </addon> ''' class TestLangTags(TestCase): def test_no_lang_tag(self): xmlstr = LANG_XML_TMP % '' addon = Addon(ET.fromstring(xmlstr)) self.assertEqual(addon.languages, []) def test_self_close_lang_tag(self): xmlstr = LANG_XML_TMP % '<language/>' addon = Addon(ET.fromstring(xmlstr)) self.assertEqual(addon.languages, []) def test_empty_lang_tag(self): xmlstr = LANG_XML_TMP % '<language></language>' addon = Addon(ET.fromstring(xmlstr)) self.assertEqual(addon.languages, []) def test_data_lang_tag(self): xmlstr = LANG_XML_TMP % '<language>en</language>' addon = Addon(ET.fromstring(xmlstr)) self.assertEqual(addon.languages, ['en']) xmlstr = LANG_XML_TMP % '<language>en fr</language>' addon = Addon(ET.fromstring(xmlstr)) self.assertEqual(addon.languages, ['en', 'fr']) if __name__ == '__main__': unittest.main()
python
import modi import time """ Example script for the usage of dial module Make sure you connect 1 dial module and 1 speaker module to your network module """ if __name__ == "__main__": bundle = modi.MODI() dial = bundle.dials[0] speak = bundle.speakers[0] while True: speak.tune = 800, dial.degree time.sleep(0.02)
python
# DS3231 library for micropython # tested on ESP8266 # # Author: Sebastian Maerker # License: mit # # only 24h mode is supported # # features: # - set time # - read time # - set alarms import machine from math import floor i2cAddr = 0x68 # change I2C Address here if neccessary class DS3231: def __init__(self, i2cClockPin, i2cDataPin): # create RTC instance with I2C Pins self.sclPin = machine.Pin(i2cClockPin, pull = machine.Pin.PULL_UP, mode=machine.Pin.OPEN_DRAIN) self.sdaPin = machine.Pin(i2cDataPin, pull = machine.Pin.PULL_UP, mode=machine.Pin.OPEN_DRAIN) self.i2cVar = machine.I2C(-1, scl=self.sclPin, sda=self.sdaPin) self.i2cAddr = i2cAddr # get times functions ------------------------------------------------------------------------------------------------------- def getYear(self): return decodeToDec(self.i2cVar.readfrom_mem(self.i2cAddr, 0x06, 1)) def getMonth(self): temp = self.i2cVar.readfrom_mem(self.i2cAddr, 0x05, 1) return decodeToDec(convertToByteType(temp[0] & 0x7F)) def getDay(self): # 0 - 31 return decodeToDec(self.i2cVar.readfrom_mem(self.i2cAddr, 0x04, 1)) def getDayOfWeek(self): # 1 - 7 return decodeToDec(self.i2cVar.readfrom_mem(self.i2cAddr, 0x03, 1)) def getHour(self): temp = self.i2cVar.readfrom_mem(self.i2cAddr, 0x02, 1) return decodeToDec(convertToByteType(temp[0] & 0x3F)) def getMinutes(self): return decodeToDec(self.i2cVar.readfrom_mem(self.i2cAddr, 0x01, 1)) def getSeconds(self): return decodeToDec(self.i2cVar.readfrom_mem(self.i2cAddr, 0x00, 1)) def getDateTime(self): # returns whole date and time as list # (last two digits of year, month, day, day of week, hour, minutes, seconds) dateTime = [0, 0, 0, 0, 0, 0, 0] dateTime[0] = self.getYear() dateTime[1] = self.getMonth() dateTime[2] = self.getDay() dateTime[3] = self.getDayOfWeek() dateTime[4] = self.getHour() dateTime[5] = self.getMinutes() dateTime[6] = self.getSeconds() return dateTime # set times functions ------------------------------------------------------------------------------------------------------- def setYear(self, year): # only last two digits (last two digits are used if longer) if(year > 99): thousands = floor(year / 100) year = year - (thousands * 100) self.i2cVar.writeto_mem(self.i2cAddr, 0x06, convertToByteType(encodeToByte(year))) def setMonth(self, month): self.i2cVar.writeto_mem(self.i2cAddr, 0x05, convertToByteType(encodeToByte(month) | 0)) def setDay(self, day): # 0 - 31 self.i2cVar.writeto_mem(self.i2cAddr, 0x04, convertToByteType(encodeToByte(day))) def setDayOfWeek(self, dayOfWeek): # 1 - 7 self.i2cVar.writeto_mem(self.i2cAddr, 0x03, convertToByteType(encodeToByte(dayOfWeek))) def setHour(self, hour): self.i2cVar.writeto_mem(self.i2cAddr, 0x02, convertToByteType(encodeToByte(hour) & 0x3F)) def setMinutes(self, minutes): self.i2cVar.writeto_mem(self.i2cAddr, 0x01, convertToByteType(encodeToByte(minutes))) def setSeconds(self, seconds): self.i2cVar.writeto_mem(self.i2cAddr, 0x00, convertToByteType(encodeToByte(seconds))) def setDateTime(self, year, month, day, dayOfWeek, hour, minutes, seconds): # set all the date and times (year is last two digits of year) self.setYear(year) self.setMonth(month) self.setDay(day) self.setDayOfWeek(dayOfWeek) self.setHour(hour) self.setMinutes(minutes) self.setSeconds(seconds) # get alarm functions ------------------------------------------------------------------------------------------------------ def getAlarm1(self): # returns list as: # dayOfWeek or day (depending on setup in setAlarm), hour, minutes, seconds, type of alarm alarmTime = [0, 0, 0, 0, ""] alarmTime[0] = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0A, 1)[0] alarmTime[1] = self.i2cVar.readfrom_mem(self.i2cAddr, 0x09, 1)[0] alarmTime[2] = self.i2cVar.readfrom_mem(self.i2cAddr, 0x08, 1)[0] alarmTime[3] = self.i2cVar.readfrom_mem(self.i2cAddr, 0x07, 1)[0] alarmTime[4] = decodeAlarmType(alarmTime) alarmTime = decodeAlarmTime(alarmTime) return alarmTime def getAlarm2(self): # returns list as: # dayOfWeek or day (depending on setup in setAlarm), hour, minutes, type of alarm alarmTime = [0, 0, 0, ""] alarmTime[0] = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0D, 1)[0] alarmTime[1] = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0C, 1)[0] alarmTime[2] = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0B, 1)[0] alarmTime[3] = decodeAlarmType(alarmTime) alarmTime = decodeAlarmTime(alarmTime) return alarmTime def alarmTriggert(self, alarmNumber): # check if alarm triggert and reset alarm flag statusBits = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0F, 1)[0] if(statusBits & alarmNumber): self.resetAlarm(alarmNumber) return True else: return False # set alarm functions ------------------------------------------------------------------------------------------------------- def setAlarm1(self, day, hour, minutes, seconds = 0, alarmType = "everyDay"): # alarm Types are: # "everySecond" - alarm every second # "everyMinute" - alarm when seconds match # "everyHour" - alarm when minutes and seconds match # "everyDay" - alarm when hours, minutes and seconds match ! default ! # "everyWeek" - alarm when day of week, hours, minutes and seconds match # "everyMonth" - alarm when day of month, hours, minutes and seconds match alarmTime = encodeDateTime(day, hour, minutes, seconds, alarmType) self.i2cVar.writeto_mem(self.i2cAddr, 0x07, convertToByteType(alarmTime[3])) self.i2cVar.writeto_mem(self.i2cAddr, 0x08, convertToByteType(alarmTime[2])) self.i2cVar.writeto_mem(self.i2cAddr, 0x09, convertToByteType(alarmTime[1])) self.i2cVar.writeto_mem(self.i2cAddr, 0x0A, convertToByteType(alarmTime[0])) def setAlarm2(self, day, hour, minutes, alarmType = "everyDay"): # alarm Types are: # "everyMinute" - alarm every minute (at 00 seconds) # "everyHour" - alarm when minutes match # "everyDay" - alarm when hours and minutes match ! default ! # "everyWeek" - alarm when day of week, hours and minutes match # "everyMonth" - alarm when day of month, hours and minutes match seconds = 0 alarmTime = encodeDateTime(day, hour, minutes, seconds, alarmType) self.i2cVar.writeto_mem(self.i2cAddr, 0x0B, convertToByteType(alarmTime[2])) self.i2cVar.writeto_mem(self.i2cAddr, 0x0C, convertToByteType(alarmTime[1])) self.i2cVar.writeto_mem(self.i2cAddr, 0x0D, convertToByteType(alarmTime[0])) def turnOnAlarmIR(self, alarmNumber): # set alarm interrupt. AlarmNumber 1 or 2 # when turned on, interrupt pin on DS3231 is "False" when alarm has been triggert controlRegister = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0E, 1)[0] setByte = 0x04 setByte = setByte + alarmNumber setByte = controlRegister | setByte self.i2cVar.writeto_mem(self.i2cAddr, 0x0E, convertToByteType(setByte)) def turnOffAlarmIR(self, alarmNumber): # turn off alarm interrupt. Alarmnumber 1 or 2 # only initiation of interrupt is turned off, # alarm flag is still set when alarm conditions meet (i don't get it either) controlRegister = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0E, 1)[0] setByte = 0xFF setByte = setByte - alarmNumber setByte = controlRegister & setByte self.i2cVar.writeto_mem(self.i2cAddr, 0x0E, convertToByteType(setByte)) def resetAlarmFlag(self, alarmNumber): statusBits = self.i2cVar.readfrom_mem(self.i2cAddr, 0x0F, 1)[0] self.i2cVar.writeto_mem(self.i2cAddr, 0x0F, convertToByteType(statusBits & (0xFF - alarmNumber))) def convertToByteType(number): return bytes([number]) def decodeToDec(byte): return ((byte[0] >> 4) * 10) + (byte[0] & 0x0F) def encodeToByte(dec): tens = floor(dec / 10) ones = dec - tens*10 return (tens << 4) + ones def decodeAlarmType(alarmTime): if(len(alarmTime) > 4): m1Bit = (alarmTime[3] & 0x80) >> 7 else: m1Bit = False m2Bit = (alarmTime[2] & 0x80) >> 7 m3Bit = (alarmTime[1] & 0x80) >> 7 m4Bit = (alarmTime[0] & 0x80) >> 7 dayBit = (alarmTime[0] & 0x40) >> 6 if(m1Bit and m2Bit and m3Bit and m4Bit): return "everySecond" elif(not m1Bit and m2Bit and m3Bit and m4Bit): return "everyMinute" elif(not m1Bit and not m2Bit and m3Bit and m4Bit): return "everyHour" elif(not m1Bit and not m2Bit and not m3Bit and m4Bit): return "everyDay" elif(not dayBit and not m1Bit and not m2Bit and not m3Bit and not m4Bit): return "everyMonth" elif(dayBit and not m1Bit and not m2Bit and not m3Bit and not m4Bit): return "everyWeek" else: return "noValidAlarmType" def decodeAlarmTime(alarmTime): alarmTime[0] = decodeToDec(convertToByteType(alarmTime[0] & 0x3F)) alarmTime[1] = decodeToDec(convertToByteType(alarmTime[1] & 0x3F)) alarmTime[2] = decodeToDec(convertToByteType(alarmTime[2] & 0x7F)) if(len(alarmTime) > 4): alarmTime[3] = decodeToDec(convertToByteType(alarmTime[3] & 0x7F)) return alarmTime def encodeAlarmType(alarmType): if(alarmType == "everySecond"): return 15 #0b01111 elif(alarmType == "everyMinute"): return 14 #0b01110 elif(alarmType == "everyHour"): return 12 #0b01100 elif(alarmType == "everyDay"): return 8 #0b01000 elif(alarmType == "everyMonth"): return 0 #0b00000 elif(alarmType == "everyWeek"): return 16 #0b10000 else: raise ValueError("""Not a supported alarmType. Options are: 'everySecond' (only Alarm 1), 'everyMinute', 'everyHour', 'everyDay', 'everyMonth', 'everyWeek'""") def encodeDateTime(day, hour, minutes, seconds, alarmType): alarmBits = encodeAlarmType(alarmType) alarmTime = [0, 0, 0, 0] alarmTime[0] = (encodeToByte(day) & 0x3F) | ((alarmBits & 0x10) << 2) | ((alarmBits & 0x08) << 4) alarmTime[1] = (encodeToByte(hour) & 0x3F) | ((alarmBits & 0x04) << 5) alarmTime[2] = (encodeToByte(minutes) & 0x7F) | ((alarmBits & 0x02) << 6) alarmTime[3] = (encodeToByte(seconds) & 0x7F) | ((alarmBits & 0x01) << 7) return alarmTime
python
# -*- coding: utf-8 -*- """ module.name ~~~~~~~~~~~~~~~ Preamble... """ from __future__ import absolute_import, print_function, unicode_literals # TEST SETTINGS TEST_RUNNER = 'django.test.runner.DiscoverRunner' # Django replaces this, but it still wants it. *shrugs* DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.spatialite', 'NAME': ':memory:', } } INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.admin', 'django.contrib.sessions', 'django.contrib.gis', 'django_geopostcodes', ) MIDDLEWARE_CLASSES = {} NOSE_ARGS=[ '--logging-clear-handlers', # Coverage - turn on with NOSE_WITH_COVERAGE=1 '--cover-html', '--cover-package=django_geopostcodes', '--cover-erase', '--with-fixture-bundling', # Nose Progressive '--with-progressive', ] SECRET_KEY = '53cr3773rc3553cr3773rc3553cr3773rc3553cr3773rc35'
python
""" Copyright 2017 Dell Inc. or its subsidiaries. All Rights Reserved. Module to hold helper classes and functions to determine run-time test IP information. Currently, """ import flogging import ipaddress import netifaces import socket import fit_common logs = flogging.get_loggers() class TestHostInterfacer(object): _cached = None @classmethod def get_testhost_ip(cls): if cls._cached is None: cls._cached = cls() logs.info('The IP address of %s has been selected as the most likely testhost IP address reachable from the DUT', cls._cached.__alleged_testhost_ip) return cls._cached.__alleged_testhost_ip def __init__(self): self.__alleged_testhost_ip = None s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) ip = fit_common.fitargs()['rackhd_host'] monip = fit_common.fitcfg()["rackhd-config"]["apiServerAddress"] monip_obj = ipaddress.ip_address(monip) logs.irl.debug('Trying to determine testhost IP address. Hitting rackhd_host value %s first', ip) s.connect((ip, 0)) logs.debug(' ip used to generate connection to %s was %s: ', ip, s.getsockname()[0]) alleged_testhost_ip_str = s.getsockname()[0] # python2/3 flake handling. The 'unicode' keyword is gone from p3. However, although # our code is p2, hound uses p3. We can cover both by using the -type- of a unicode string! ucode_type = type(u'unicode_string_to_type') alleged_testhost_ip = ipaddress.ip_address(ucode_type(alleged_testhost_ip_str)) if not alleged_testhost_ip.is_loopback: # A non-loopback address is about the best guess we can get. Use it. logs.irl.debug(' ip used to generate connection to %s is non-loopback. Using %s', ip, alleged_testhost_ip_str) self.__alleged_testhost_ip = alleged_testhost_ip_str return # Localhost. Great. We are either running on the DUT or are on a test-host. # In either case, grabbing pretty much any ip interface that isn't a loop back # should do the trick. docker_net = [] mono_net = [] eform_net = [] vbox_net = [] veth_net = [] extras_net = [] int_list = netifaces.interfaces() for interface in int_list: logs.irl.debug(' checking interface %s', interface) ifaddrs = netifaces.ifaddresses(interface) if netifaces.AF_INET not in ifaddrs: logs.irl.debug(' -- no ifaddrs on it, skipping') else: for net in ifaddrs[netifaces.AF_INET]: logs.irl.debug(' checking %s on %s', net, interface) addr = net['addr'] mask = net['netmask'] inet_form = u'{}/{}'.format(addr, mask) this_iface = ipaddress.ip_interface(inet_form) this_iface.on_name = interface dispo = None if this_iface.is_loopback: dispo = 'loopback-skip' elif monip_obj in this_iface.network: # really the last choice, all things considered! dispo = 'added to control-network-list' mono_net.append(this_iface) elif 'docker' in interface: dispo = 'added to docker list' docker_net.append(this_iface) elif interface.startswith('vbox'): dispo = 'added to vbox list' vbox_net.append(this_iface) elif interface.startswith('veth'): dispo = 'added to veth list' veth_net.append(this_iface) elif interface.startswith('eth') or interface.startswith('en'): dispo = 'added to en/eth list' eform_net.append(this_iface) else: logs.irl.debug('unknown interface type-ish %s seen', interface) dispo = 'added to extras list' extras_net.append(this_iface) logs.irl.debug(' -> %s', dispo) ordered_list = [] ordered_list.extend(eform_net) ordered_list.extend(docker_net) ordered_list.extend(vbox_net) ordered_list.extend(veth_net) ordered_list.extend(extras_net) ordered_list.extend(mono_net) logs.irl.debug(' Final list of possible addresses: %s', ordered_list) # note: we could go and ssh over and ping back to check these. For now, just # grab the first. if len(ordered_list) == 0: logs.warning('could not find the test-host ip address and fell back on localhost') self.__alleged_testhost_ip = '127.0.1.1' return picked = ordered_list[0] logs.irl.debug('picked %s on %s', picked.ip, picked.on_name) self.__alleged_testhost_ip = str(picked.ip) def get_testhost_ip(): return TestHostInterfacer.get_testhost_ip()
python
import sqlite3 as lite import datetime import json from time import * class Database: con = None cur = None def __init__(self, dbname): self.con = lite.connect(dbname + ".db") self.cur = self.con.cursor() def createIfNotExists(self): self.cur.execute("CREATE TABLE if not exists DrinksLog(ID Integer primary key, drink TEXT, date timestamp)") self.cur.execute("CREATE TABLE if not exists IngredientsLog(ID Integer primary key, ingredient TEXT," "ml integer, date timestamp)") self.cur.execute("""CREATE TABLE if not exists Ingredients ( Code varchar(50) not null primary key ,Name varchar(100) not null, IsAlcoholic integer default 0 not null);""") self.cur.execute("""create unique index if not exists Ingredients_Code_uindex on Ingredients (Code);""") self.cur.execute("""CREATE TABLE if not exists Servos ( ServoNr integer not null constraint Servos_pk primary key, Code varchar(50) not null);""") self.cur.execute("""create unique index if not exists Servos_ID_uindex on Servos (ServoNr);""") self.con.commit() def setDefaultValues(self): self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name", "IsAlcoholic") VALUES ('gin', 'Gin', 1);""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name", "IsAlcoholic") VALUES ('rum', 'Rum', 1);""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name", "IsAlcoholic") VALUES ('vodka', 'Vodka', 1);""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name", "IsAlcoholic") VALUES ('tequila', 'Tequila', 1);""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('tonic', 'Tonic Water');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('coke', 'Cola');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('oj', 'Orange Juice');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('gren', 'Grenadine');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name", "IsAlcoholic") VALUES ('mmix', 'Margarita Mix', 1);""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('mate', 'Mate');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('pine', 'Pineapple Juice');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('raspberry', 'Raspberry');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('gga', 'Ginger Ale');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('cocos', 'Cocos');""") self.cur.execute("""INSERT INTO "Ingredients" ("Code", "Name") VALUES ('mango', 'Mango Juice');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (1, 'oj');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (2, 'tequila');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (3, 'gren');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (4, 'vodka');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (5, 'mmix');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (6, 'rum');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (7, 'coke');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (8, 'gin');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (9, 'tonic');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (10, 'mate');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (11, 'rum');""") self.cur.execute("""INSERT INTO "Servos" ("ServoNr", "Code") VALUES (12, 'pine');""") self.con.commit() def get_Servos(self): self.cur.execute("SELECT ServoNr,Code FROM Servos") items = self.cur.fetchall() return items def get_Servos_asJson(self): return json.dumps(self.get_Servos()) def get_AllIngredients(self): self.cur.execute("SELECT Code, Name FROM Ingredients") items = self.cur.fetchall() return items def get_AllIngredients_asJson(self): return json.dumps(self.get_AllIngredients()) def countUpDrink(self, drink): self.cur.execute("INSERT INTO DrinksLog (drink, date) VALUES (?, ?)", (drink, datetime.datetime.now())) self.con.commit() def countUpIngredient(self, ingredient, ml): self.cur.execute("INSERT INTO IngredientsLog (ingredient, ml, date) VALUES (?, ?, ?)", (ingredient, ml, datetime.datetime.now())) self.con.commit() def __del__(self): self.con.commit() self.con.close() # when called directly, read out database and generate a log if __name__ == "__main__": db = Database("h9k") db.cur.execute("SELECT * FROM DrinksLog WHERE date > '2018-12-11' ORDER BY date ASC") # db.cur.execute("SELECT * FROM DrinksLog ORDER BY date ASC") res = db.cur.fetchall() # print("%d entries" % len(res)) for l in res: number, name, tstampstr = l tstamp = mktime(strptime(tstampstr.split(".")[0], "%Y-%m-%d %H:%M:%S")) tstamp += (14 * 24 * 3600 + 10 * 3600 + 8 * 60 + 28) print("%30s: %s" % (strftime("%a %Y-%m-%d %H:%M:%S", localtime(tstamp)), name))
python
import os import uuid import time from aim.engine.aim_repo import AimRepo def init(overwrite=False): # Init repo if doesn't exist and return repo instance repo = AimRepo.get_working_repo() if not repo: repo = AimRepo(os.getcwd()) repo.init() # Check if repo index is empty or not # Reset index or commit according to `overwrite` argument if not repo.is_index_empty(): if overwrite: repo.reset_index() else: repo.commit(str(uuid.uuid1()), int(time.time()))
python
from datetime import date from django import forms from django.core.exceptions import ValidationError from petstagram.common.helps import BootstrapFormMixin, DisabledFieldsFormMixin from petstagram.main.models import Pet class CreatePetForm(BootstrapFormMixin, forms.ModelForm): def __init__(self, user, *args, **kwargs): super().__init__(*args, **kwargs) self.user = user self._init_bootstrap_form_controls() def save(self, commit=True): # commit false does not persist to database # just returns the object to be created pet = super().save(commit=False) pet.user = self.user if commit: pet.save() return pet class Meta: model = Pet fields = ('name', 'type', 'data_of_birth') widgets = { 'name': forms.TextInput( attrs={ 'placeholder': 'Enter pet name', } ), } class EditPetForm(BootstrapFormMixin, forms.ModelForm): MIN_DATE = date(1920, 1, 1) MAX_DATE = date.today() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._init_bootstrap_form_controls() def clean_data_of_birth(self): data_of_birth = self.cleaned_data['data_of_birth'] if data_of_birth < self.MIN_DATE or self.MAX_DATE < data_of_birth: raise ValidationError(f'Date of birth must be between {self.MIN_DATE} and {self.MAX_DATE}') return data_of_birth class Meta: model = Pet exclude = ('user_profile',) widgets = { 'data_of_birth': forms.DateInput, } class DeletePetForm(BootstrapFormMixin, DisabledFieldsFormMixin, forms.ModelForm): # ako искаме нещо да е отключено тук слагаме disabled_fields = ('това което искаме да заключим') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._init_bootstrap_form_controls() self._init_disabled_fields() def save(self, commit=True): self.instance.delete() return self.instance class Meta: model = Pet exclude = ('user_profile',)
python
# grid relative from .environment_manager import EnvironmentManager from .group_manager import GroupManager from .user_manager import UserManager
python
# Copyright 2017 BBVA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging # # try: # from ujson import load # except ImportError: # from json import load # # from .model import * # from ...helpers import * # # from apitest import postman_parser # # log = logging.getLogger('apitest') # # # def launch_apitest_postman_analyze_in_console(shared_config: ApitestPostmanAnalyzeModel, **kwargs): # """Launch in console mode""" # # # Load config # config = ApitestPostmanAnalyzeModel(**shared_config, **kwargs) # # # Check if config is valid # if not config.is_valid: # for prop, msg in config.validation_errors: # log.critical("[!] '%s' property %s" % (prop, msg)) # return # # log.setLevel(config.verbosity) # # try: # log.console("[*] Analyzing parser file: '%s'" % config.file_path) # # # Get and load data # with open(config.file_path, "r") as f: # json_info = load(f) # # loaded_file = postman_parser(json_info) # # if loaded_file.is_valid: # log.console("[*] File format is OKs") # log.console("[*] Summary:") # log.console(" - Total collections: %s" % len(loaded_file.collections)) # log.console(" - Total end-points: %s" % sum(len(x.end_points) for x in loaded_file.collections)) # # if config.verbosity >= 2: # for col in loaded_file.collections: # log.console(" > {name:{align}} - {endpoint:>5} endpoints".format(name=col.name, # align=20, # endpoint=len(col.end_points))) # else: # log.console("[!] File format is WRONG") # # for tag, error in loaded_file.validation_errors: # log.console(" - {}: {}".format(tag, error)) # # except KeyboardInterrupt: # log.console("[*] CTRL+C caught. Exiting...") # except Exception as e: # log.critical("[!] Unhandled exception: %s" % str(e)) # # log.exception("[!] Unhandled exception: %s" % e, stack_info=True) # finally: # log.debug("[*] Shutdown...") # # # __all__ = ("launch_apitest_postman_analyze_in_console",)
python
from yunorm.db import models from yunorm.db import field CE_DB = { 'host': '10.x.x.x', 'port': 3306, 'user': 'root', 'password': '123456', 'database': 'ce', 'charset': 'utf8mb4', 'pool_size': 10, } class Feed(models.Model): url = field.CharField() name = field.CharField() descp = field.CharField() zan_num = field.IntegerField() like_num = field.IntegerField() create_time = field.DateTimeField() class meta: db_config = CE_DB db_table = 'feed'
python
from .. import db class Email(db.Model): """ Email Model for storing contact emails """ __tablename__ = 'email' id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(100), unique=True) contact_id = db.Column(db.Integer, db.ForeignKey('contact.id')) contact = db.relationship('Contact', back_populates='email') def __repr__(self): return f'<Email {self.email}, contact_id {self.contact_id}>'
python
# Generated by Django 3.2.8 on 2021-11-09 18:34 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('blog', '0003_auto_20211109_1805'), ] operations = [ migrations.RemoveField( model_name='reader', name='book_title', ), migrations.RemoveField( model_name='reader', name='user_id', ), migrations.AddField( model_name='reader', name='books', field=models.ManyToManyField(related_name='readers', to='blog.Book'), ), migrations.AddField( model_name='reader', name='user', field=models.OneToOneField(default=1, on_delete=django.db.models.deletion.CASCADE, to='auth.user'), preserve_default=False, ), ]
python
from flask_wtf import FlaskForm from wtforms import StringField, TextAreaField, SubmitField, SelectField, HiddenField from wtforms.validators import DataRequired, Length, Required, Email class QuestionForm(FlaskForm): """Question form.""" products = [ ('learn-ultra', 'Blackboard Learn Ultra'), ('learn-original', 'Blackboard Learn Original'), ('bb-data', 'Blackboard Data'), ('bb-ally', 'Blackboard Ally'), ('bb-collab', 'Blackboard Collaborate'), ('bb-analytics', 'Blackboard Analytics'), ('bb-classroom', 'Blackboard Classroom'), ('bb-mobile', 'Blackboard Mobile Apps'), ('bb-wcm', 'Blackboard Web Community Manager'), ('bb-mass', 'Blackboard Mass Communications'), ('bb-connect', 'Blackboard Connect'), ('bb-other', 'Other') ] gname = StringField('Given Name', [ DataRequired()]) fname = StringField('Family Name', [ DataRequired()]) email = StringField('Email', [ Email(message=('Not a valid email address.')), DataRequired()]) institution = StringField('Institution', [ DataRequired()]) product = SelectField('Product', choices=products ) question = TextAreaField('Question', [ DataRequired(), Length(min=4, message=('Your message is too short.'))]) submit = SubmitField('Submit')
python
"""Common constants used in Agtor.""" # volume ML_to_mm = 100.0 mm_to_ML = 100.0 # distance km_to_ha = 100.0 ha_to_km = 100.0 # time SEC_IN_DAY = 86400.0 # amount MILLION = 1e6 ML = 1e6 # Litres in a megaliter
python
import random from pylons.i18n import set_lang import sqlalchemy.exc import ckan.logic import ckan.lib.maintain as maintain from ckan.lib.search import SearchError from ckan.lib.base import * from ckan.lib.helpers import url_for CACHE_PARAMETER = '__cache' class HomeController(BaseController): repo = model.repo def __before__(self, action, **env): try: BaseController.__before__(self, action, **env) context = {'model': model, 'user': c.user or c.author} ckan.logic.check_access('site_read', context) except ckan.logic.NotAuthorized: abort(401, _('Not authorized to see this page')) except (sqlalchemy.exc.ProgrammingError, sqlalchemy.exc.OperationalError), e: # postgres and sqlite errors for missing tables msg = str(e) if ('relation' in msg and 'does not exist' in msg) or \ ('no such table' in msg): # table missing, major database problem abort(503, _('This site is currently off-line. Database ' 'is not initialised.')) # TODO: send an email to the admin person (#1285) else: raise def index(self): try: # package search context = {'model': model, 'session': model.Session, 'user': c.user or c.author} data_dict = { 'q': '*:*', 'facet.field': g.facets, 'rows': 0, 'start': 0, 'fq': 'capacity:"public"' } query = ckan.logic.get_action('package_search')( context, data_dict) c.package_count = query['count'] c.facets = query['facets'] maintain.deprecate_context_item( 'facets', 'Use `c.search_facets` instead.') c.search_facets = query['search_facets'] data_dict = {'order_by': 'packages', 'all_fields': 1} # only give the terms to group dictize that are returned in the # facets as full results take a lot longer if 'groups' in c.search_facets: data_dict['groups'] = [ item['name'] for item in c.search_facets['groups']['items'] ] c.groups = ckan.logic.get_action('group_list')(context, data_dict) except SearchError, se: c.package_count = 0 c.groups = [] if c.userobj is not None: msg = None url = url_for(controller='user', action='edit') is_google_id = \ c.userobj.name.startswith( 'https://www.google.com/accounts/o8/id') if not c.userobj.email and (is_google_id and not c.userobj.fullname): msg = _('Please <a href="{link}">update your profile</a>' ' and add your email address and your full name. ' '{site} uses your email address' ' if you need to reset your password.'.format(link=url, site=g.site_title)) elif not c.userobj.email: msg = _('Please <a href="%s">update your profile</a>' ' and add your email address. ') % url + \ _('%s uses your email address' ' if you need to reset your password.') \ % g.site_title elif is_google_id and not c.userobj.fullname: msg = _('Please <a href="%s">update your profile</a>' ' and add your full name.') % (url) if msg: h.flash_notice(msg, allow_html=True) c.recently_changed_packages_activity_stream = \ ckan.logic.action.get.recently_changed_packages_activity_list_html( context, {}) return render('home/index.html', cache_force=True) def license(self): return render('home/license.html') def about(self): return render('home/about.html') def cache(self, id): '''Manual way to clear the caches''' if id == 'clear': wui_caches = ['stats'] for cache_name in wui_caches: cache_ = cache.get_cache(cache_name, type='dbm') cache_.clear() return 'Cleared caches: %s' % ', '.join(wui_caches) def cors_options(self, url=None): # just return 200 OK and empty data return ''
python
########################################################################## # # Copyright 2012 Jose Fonseca # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ##########################################################################/ from d3dcommon import * from d3d10 import * D3D10_SHADER = Flags(UINT, [ "D3D10_SHADER_DEBUG", "D3D10_SHADER_SKIP_VALIDATION", "D3D10_SHADER_SKIP_OPTIMIZATION", "D3D10_SHADER_PACK_MATRIX_ROW_MAJOR", "D3D10_SHADER_PACK_MATRIX_COLUMN_MAJOR", "D3D10_SHADER_PARTIAL_PRECISION", "D3D10_SHADER_FORCE_VS_SOFTWARE_NO_OPT", "D3D10_SHADER_FORCE_PS_SOFTWARE_NO_OPT", "D3D10_SHADER_NO_PRESHADER", "D3D10_SHADER_AVOID_FLOW_CONTROL", "D3D10_SHADER_PREFER_FLOW_CONTROL", "D3D10_SHADER_ENABLE_STRICTNESS", "D3D10_SHADER_ENABLE_BACKWARDS_COMPATIBILITY", "D3D10_SHADER_IEEE_STRICTNESS", "D3D10_SHADER_WARNINGS_ARE_ERRORS", "D3D10_SHADER_OPTIMIZATION_LEVEL0", "D3D10_SHADER_OPTIMIZATION_LEVEL1", "D3D10_SHADER_OPTIMIZATION_LEVEL2", "D3D10_SHADER_OPTIMIZATION_LEVEL3", ]) D3D10_SHADER_DESC = Struct("D3D10_SHADER_DESC", [ (UINT, "Version"), (LPCSTR, "Creator"), (UINT, "Flags"), (UINT, "ConstantBuffers"), (UINT, "BoundResources"), (UINT, "InputParameters"), (UINT, "OutputParameters"), (UINT, "InstructionCount"), (UINT, "TempRegisterCount"), (UINT, "TempArrayCount"), (UINT, "DefCount"), (UINT, "DclCount"), (UINT, "TextureNormalInstructions"), (UINT, "TextureLoadInstructions"), (UINT, "TextureCompInstructions"), (UINT, "TextureBiasInstructions"), (UINT, "TextureGradientInstructions"), (UINT, "FloatInstructionCount"), (UINT, "IntInstructionCount"), (UINT, "UintInstructionCount"), (UINT, "StaticFlowControlCount"), (UINT, "DynamicFlowControlCount"), (UINT, "MacroInstructionCount"), (UINT, "ArrayInstructionCount"), (UINT, "CutInstructionCount"), (UINT, "EmitInstructionCount"), (D3D10_PRIMITIVE_TOPOLOGY, "GSOutputTopology"), (UINT, "GSMaxOutputVertexCount"), ]) D3D10_SHADER_BUFFER_DESC = Struct("D3D10_SHADER_BUFFER_DESC", [ (LPCSTR, "Name"), (D3D10_CBUFFER_TYPE, "Type"), (UINT, "Variables"), (UINT, "Size"), (UINT, "uFlags"), ]) D3D10_SHADER_VARIABLE_DESC = Struct("D3D10_SHADER_VARIABLE_DESC", [ (LPCSTR, "Name"), (UINT, "StartOffset"), (UINT, "Size"), (UINT, "uFlags"), (LPVOID, "DefaultValue"), ]) D3D10_SHADER_TYPE_DESC = Struct("D3D10_SHADER_TYPE_DESC", [ (D3D10_SHADER_VARIABLE_CLASS, "Class"), (D3D10_SHADER_VARIABLE_TYPE, "Type"), (UINT, "Rows"), (UINT, "Columns"), (UINT, "Elements"), (UINT, "Members"), (UINT, "Offset"), ]) D3D10_SHADER_INPUT_BIND_DESC = Struct("D3D10_SHADER_INPUT_BIND_DESC", [ (LPCSTR, "Name"), (D3D10_SHADER_INPUT_TYPE, "Type"), (UINT, "BindPoint"), (UINT, "BindCount"), (UINT, "uFlags"), (D3D10_RESOURCE_RETURN_TYPE, "ReturnType"), (D3D10_SRV_DIMENSION, "Dimension"), (UINT, "NumSamples"), ]) D3D10_SIGNATURE_PARAMETER_DESC = Struct("D3D10_SIGNATURE_PARAMETER_DESC", [ (LPCSTR, "SemanticName"), (UINT, "SemanticIndex"), (UINT, "Register"), (D3D10_NAME, "SystemValueType"), (D3D10_REGISTER_COMPONENT_TYPE, "ComponentType"), (BYTE, "Mask"), (BYTE, "ReadWriteMask"), ]) ID3D10ShaderReflectionType = Interface("ID3D10ShaderReflectionType") ID3D10ShaderReflectionType.methods += [ StdMethod(HRESULT, "GetDesc", [(Pointer(D3D10_SHADER_TYPE_DESC), "pDesc")]), StdMethod(ObjPointer(ID3D10ShaderReflectionType), "GetMemberTypeByIndex", [(UINT, "Index")]), StdMethod(ObjPointer(ID3D10ShaderReflectionType), "GetMemberTypeByName", [(LPCSTR, "Name")]), StdMethod(LPCSTR, "GetMemberTypeName", [(UINT, "Index")]), ] ID3D10ShaderReflectionVariable = Interface("ID3D10ShaderReflectionVariable") ID3D10ShaderReflectionVariable.methods += [ StdMethod(HRESULT, "GetDesc", [Out(Pointer(D3D10_SHADER_VARIABLE_DESC), "pDesc")]), StdMethod(ObjPointer(ID3D10ShaderReflectionType), "GetType", []), ] ID3D10ShaderReflectionConstantBuffer = Interface("ID3D10ShaderReflectionConstantBuffer") ID3D10ShaderReflectionConstantBuffer.methods += [ StdMethod(HRESULT, "GetDesc", [Out(Pointer(D3D10_SHADER_BUFFER_DESC), "pDesc")]), StdMethod(ObjPointer(ID3D10ShaderReflectionVariable), "GetVariableByIndex", [(UINT, "Index")]), StdMethod(ObjPointer(ID3D10ShaderReflectionVariable), "GetVariableByName", [(LPCSTR, "Name")]), ] ID3D10ShaderReflection = Interface("ID3D10ShaderReflection", IUnknown) ID3D10ShaderReflection.methods += [ StdMethod(HRESULT, "GetDesc", [Out(Pointer(D3D10_SHADER_DESC), "pDesc")]), StdMethod(ObjPointer(ID3D10ShaderReflectionConstantBuffer), "GetConstantBufferByIndex", [(UINT, "Index")]), StdMethod(ObjPointer(ID3D10ShaderReflectionConstantBuffer), "GetConstantBufferByName", [(LPCSTR, "Name")]), StdMethod(HRESULT, "GetResourceBindingDesc", [(UINT, "ResourceIndex"), Out(Pointer(D3D10_SHADER_INPUT_BIND_DESC), "pDesc")]), StdMethod(HRESULT, "GetInputParameterDesc", [(UINT, "ParameterIndex"), Out(Pointer(D3D10_SIGNATURE_PARAMETER_DESC), "pDesc")]), StdMethod(HRESULT, "GetOutputParameterDesc", [(UINT, "ParameterIndex"), Out(Pointer(D3D10_SIGNATURE_PARAMETER_DESC), "pDesc")]), ] d3d10.addFunctions([ StdFunction(HRESULT, "D3D10CompileShader", [(LPCSTR, "pSrcData"), (SIZE_T, "SrcDataSize"), (LPCSTR, "pFileName"), (Pointer(Const(D3D10_SHADER_MACRO)), "pDefines"), (LPD3D10INCLUDE, "pInclude"), (LPCSTR, "pFunctionName"), (LPCSTR, "pProfile"), (UINT, "Flags"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppShader"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppErrorMsgs")]), StdFunction(HRESULT, "D3D10DisassembleShader", [(OpaquePointer(Const(Void)), "pShader"), (SIZE_T, "BytecodeLength"), (BOOL, "EnableColorCode"), (LPCSTR, "pComments"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppDisassembly")]), StdFunction(LPCSTR, "D3D10GetPixelShaderProfile", [(ObjPointer(ID3D10Device), "pDevice")]), StdFunction(LPCSTR, "D3D10GetVertexShaderProfile", [(ObjPointer(ID3D10Device), "pDevice")]), StdFunction(LPCSTR, "D3D10GetGeometryShaderProfile", [(ObjPointer(ID3D10Device), "pDevice")]), StdFunction(HRESULT, "D3D10ReflectShader", [(OpaquePointer(Const(Void)), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), Out(Pointer(ObjPointer(ID3D10ShaderReflection)), "ppReflector")]), StdFunction(HRESULT, "D3D10PreprocessShader", [(LPCSTR, "pSrcData"), (SIZE_T, "SrcDataSize"), (LPCSTR, "pFileName"), (Pointer(Const(D3D10_SHADER_MACRO)), "pDefines"), (LPD3D10INCLUDE, "pInclude"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppShaderText"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppErrorMsgs")]), StdFunction(HRESULT, "D3D10GetInputSignatureBlob", [(OpaquePointer(Const(Void)), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppSignatureBlob")]), StdFunction(HRESULT, "D3D10GetOutputSignatureBlob", [(OpaquePointer(Const(Void)), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppSignatureBlob")]), StdFunction(HRESULT, "D3D10GetInputAndOutputSignatureBlob", [(OpaquePointer(Const(Void)), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppSignatureBlob")]), StdFunction(HRESULT, "D3D10GetShaderDebugInfo", [(OpaquePointer(Const(Void)), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), Out(Pointer(ObjPointer(ID3D10Blob)), "ppDebugInfo")]), ])
python
# -*- coding: utf-8 -*- import json import shutil import sys from copy import deepcopy from pathlib import Path import pytest import requests from micropy import config, main, project @pytest.fixture def mock_requests(mocker, requests_mock, test_archive): mock_source = { "name": "Micropy Stubs", "location": "https://codeload.github.com/BradenM/micropy-stubs", "source": "https://raw.githubusercontent.com/bradenm/micropy-stubs/source.json", "path": "legacy.tar.gz/pkg/", "packages": [ { "name": "micropython", "type": "firmware", "sha256sum": "7ff2cce0237268cd52164b77b6c2df6be6249a67ee285edc122960af869b8ed2" }, ] } requests_mock.get( "https://raw.githubusercontent.com/BradenM/micropy-stubs/master/source.json", json=mock_source) requests_mock.get( "https://codeload.github.com/BradenM/micropy-stubs/legacy.tar.gz/pkg/micropython", content=test_archive) @pytest.mark.skip(reason="Tests need some serious cleanup before something like this could work.") @pytest.mark.usefixtures("mock_requests") class TestCreateProject: mp = None expect_mp_data = staticmethod(lambda name: { 'name': 'NewProject', 'stubs': { name: '1.2.0' }, 'packages': {}, 'dev-packages': { 'micropy-cli': '*' }, 'config': { 'vscode': True, 'pylint': True } }) expect_vsc_data = staticmethod(lambda name: [ str(Path(f".micropy/{name}/frozen")), str(Path(".micropy/fware_test_stub/frozen")), str(Path(f".micropy/{name}/stubs")), str(Path(".micropy/NewProject")) ]) def build_project(self, mpy, path): proj_path = path / 'highlevel_new_project' if proj_path.exists(): shutil.rmtree(proj_path, ignore_errors=True) proj = project.Project(proj_path) proj_stub = list(mpy.stubs)[0] proj.add(project.modules.StubsModule, mpy.stubs, stubs=[proj_stub]) proj.add(project.modules.PackagesModule, 'requirements.txt') proj.add(project.modules.DevPackagesModule, 'dev-requirements.txt') proj.add(project.modules.TemplatesModule, ('vscode', 'pylint')) return (proj, mpy, proj_stub) def check_mp_data(self, path, utils, name='esp32', expect=None): expect_data = expect or self.expect_mp_data(name) micropy_file = path assert micropy_file.exists() mp_data = json.loads(micropy_file.read_text()) assert utils.dict_equal(mp_data, expect_data) def check_vscode(self, path, name='esp32', expect=None): vsc_path = path / '.vscode' / 'settings.json' assert vsc_path.exists() with vsc_path.open() as f: lines = [l.strip() for l in f.readlines() if l] valid = [l for l in lines if "//" not in l[:2]] vsc_data = json.loads("\n".join(valid)) expect_data = expect or self.expect_vsc_data(name) assert vsc_data['python.analysis.typeshedPaths'] == expect_data def test_setup_stubs(self, mock_micropy, get_stub_paths, shared_datadir): mpy = mock_micropy stub_path = (shared_datadir / 'esp32_test_stub') mpy.stubs.add(stub_path) def test_create_project(self, micropy_stubs, tmp_path, utils): proj, mpy, proj_stub = self.build_project(micropy_stubs(), tmp_path) proj.create() self.check_mp_data(proj.info_path, utils, name=proj_stub.path.name) self.check_vscode(proj.path, name=proj_stub.path.name) def test_add_package(self, mock_pkg, micropy_stubs, tmp_path, utils): proj, mpy, proj_stub = self.build_project(micropy_stubs(), tmp_path) proj.create() proj.add_package("newpackage") expect_data = deepcopy(self.expect_mp_data(proj_stub.path.name)) expect_data['packages']['newpackage'] = '*' self.check_mp_data(proj.info_path, utils, expect=expect_data) @pytest.mark.parametrize('local_pkg', ['src/lib/coolpackage', '/tmp/absolute/package']) def test_add_local_package(self, tmp_path, local_pkg, micropy_stubs, utils): proj, mpy, proj_stub = self.build_project(micropy_stubs(), tmp_path) proj.create() local_package = Path(local_pkg) if not local_package.is_absolute(): local_package = (proj.path / Path(local_pkg)) local_package.mkdir(parents=True, exist_ok=True) (local_package / '__init__.py').touch() local_path = utils.str_path(local_pkg) proj.add_package(f"-e {local_path}") # check micropy.json expect_data = deepcopy(self.expect_mp_data(proj_stub.path.name)) expect_data['packages'][local_package.name] = f'-e {local_path}' self.check_mp_data(proj.info_path, utils, expect=expect_data) # check vscode settings expect_vscode = deepcopy(self.expect_vsc_data(proj_stub.path.name)) expect_vscode.append(local_path) self.check_vscode(proj.path, expect=expect_vscode) shutil.rmtree(proj.path)
python
import os def get_records(base_url, http_get, data_record, target, from_ = '-1min', until_ = None, http_connect_timeout_s_ = 0.1, http_read_timeout_s_ = 1.0): url = _graphite_url_for_target(base_url, target, from_=from_, until_=until_) if 'true' == os.environ.get('VERBOSE_URL'): # default off print('url: {0}'.format(url)) resp = http_get( url, verify = True, timeout = (http_connect_timeout_s_,http_read_timeout_s_), ) if 'true' == os.environ.get('VERBOSE_RESP'): # default off print('resp: {0}'.format(resp)) resp.raise_for_status() records = [] for line in resp.text.split('\n'): if line: record = data_record(line) records.append(record) return records def _graphite_url_for_target(base, target, from_='-1min', until_=None): url = '{0}/render/?target={1}&rawData=true&noNullPoints=true&from={2}'.format( base, target, from_ ) if until_: url += '&until={0}'.format(until_) return url
python
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('entity', '0001_initial'), ] operations = [ migrations.CreateModel( name='EntityActivationEvent', fields=[ ('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)), ('time', models.DateTimeField(db_index=True, help_text='The time of the activation / deactivation')), ('was_activated', models.BooleanField(help_text='True if the entity was activated, false otherwise', default=None)), ('entity', models.ForeignKey(help_text='The entity that was activated / deactivated', to='entity.Entity')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='EntityRelationshipActivationEvent', fields=[ ('id', models.AutoField(serialize=False, auto_created=True, verbose_name='ID', primary_key=True)), ('time', models.DateTimeField(db_index=True, help_text='The time of the activation / deactivation')), ('was_activated', models.BooleanField(help_text='True if the entity was activated, false otherwise', default=None)), ('sub_entity', models.ForeignKey(to='entity.Entity', related_name='+', help_text='The sub entity in the relationship that was activated / deactivated')), ('super_entity', models.ForeignKey(to='entity.Entity', related_name='+', help_text='The super entity in the relationship that was activated / deactivated')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='EntityHistory', fields=[ ], options={ 'proxy': True, }, bases=('entity.entity',), ), ]
python
# -*- coding: utf-8 -*- # @Time: 2020/7/2 11:50 # @Author: GraceKoo # @File: interview_31.py # @Desc: https://leetcode-cn.com/problems/1nzheng-shu-zhong-1chu-xian-de-ci-shu-lcof/ class Solution: def countDigitOne(self, n: int) -> int: s = "" while n: s += str(n) n -= 1 return s.count("1") so = Solution() print(so.countDigitOne(12))
python
import time from datetime import datetime, timedelta import mysql.connector from openpyxl import load_workbook from decimal import Decimal import config ################################################################################################################ # PROCEDURES: # STEP 1: get all 'new' offline meter files # STEP 2: for each new files, iterate all rows and read cell's value and store data to energy data list # STEP 3: insert or update energy data to table offline meter hourly in energy database # STEP 4: update file status to 'done' or 'error' ################################################################################################################ def calculate_hourly(logger): while True: # outer loop to reconnect server if there is a connection error ################################################################################################################ # STEP 1: get all 'new' offline meter files ################################################################################################################ cnx = None cursor = None try: cnx = mysql.connector.connect(**config.myems_historical_db) cursor = cnx.cursor() except Exception as e: logger.error("Error in step 1.1 of offline meter.calculate_hourly " + str(e)) if cursor: cursor.close() if cnx: cnx.close() # sleep several minutes and continue the outer loop to reconnect the database print("Could not connect the MyEMS Historical Database, and go to sleep 60 seconds...") time.sleep(60) continue print("Connected to MyEMS Historical Database") print("Getting all new offline meter files") try: query = (" SELECT id, file_name, file_object " " FROM tbl_offline_meter_files " " WHERE status = 'new' " " ORDER BY id ") cursor.execute(query, ) rows_files = cursor.fetchall() except Exception as e: logger.error("Error in step 1.2 of offline meter.calculate_hourly " + str(e)) time.sleep(60) continue finally: if cursor: cursor.close() if cnx: cnx.close() excel_file_list = list() if rows_files is not None and len(rows_files) > 0: for row_file in rows_files: excel_file_list.append({"id": row_file[0], "name": row_file[1], "file_object": row_file[2]}) else: print("there isn't any new files found, and go to sleep 60 seconds...") time.sleep(60) continue ################################################################################################################ # STEP 2: for each new files, dump file object to local file and then load workbook from the local file ################################################################################################################ for excel_file in excel_file_list: print("read data from offline meter file" + excel_file['name']) is_valid_file = True fw = None try: fw = open("myems-normalization.blob", 'wb') fw.write(excel_file['file_object']) fw.close() except Exception as e: logger.error("Error in step 2.1 of offline meter.calculate_hourly " + str(e)) if fw: fw.close() # mark as invalid file is_valid_file = False fr = None wb = None try: fr = open("myems-normalization.blob", 'rb') wb = load_workbook(fr, data_only=True) fr.close() except Exception as e: logger.error("Error in step 2.2 of offline meter.calculate_hourly " + str(e)) if fr: fr.close() # mark as invalid file is_valid_file = False energy_data_list = list() # grab the active worksheet if is_valid_file: ws = wb.active # get timezone offset in minutes, this value will be returned to client timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) if config.utc_offset[0] == '-': timezone_offset = -timezone_offset for row in ws.iter_rows(min_row=3, max_row=1024, min_col=1, max_col=34): offline_meter_data = dict() offline_meter_data['offline_meter_id'] = None offline_meter_data['offline_meter_name'] = None offline_meter_data['data'] = dict() col_num = 0 for cell in row: col_num += 1 print(cell.value) if col_num == 1: # get offline meter ID if cell.value is not None: offline_meter_data['offline_meter_id'] = cell.value else: break elif col_num == 2: # get offline meter name if cell.value is None: break else: offline_meter_data['offline_meter_name'] = cell.value elif col_num > 3: # get date of the cell try: start_datetime_local = datetime(year=ws['A2'].value, month=ws['B2'].value, day=col_num - 3) except ValueError: # invalid date and go to next cell in this row until reach max_col continue start_datetime_utc = start_datetime_local - timedelta(minutes=timezone_offset) if cell.value is None: # if the cell is empty then stop at that day break else: offline_meter_data['data'][start_datetime_utc] = Decimal(cell.value) if len(offline_meter_data['data']) > 0: print("offline_meter_data:" + str(offline_meter_data)) energy_data_list.append(offline_meter_data) ############################################################################################################ # STEP 3: insert or update energy data to table offline meter hourly in energy database ############################################################################################################ print("to valid offline meter id in excel file...") if len(energy_data_list) == 0: print("Could not find any offline meters in the file...") print("and go to process the next file...") is_valid_file = False else: try: cnx = mysql.connector.connect(**config.myems_system_db) cursor = cnx.cursor() except Exception as e: logger.error("Error in step 3.1 of offlinemeter.calculate_hourly " + str(e)) if cursor: cursor.close() if cnx: cnx.close() time.sleep(60) continue try: cursor.execute(" SELECT id, name, hourly_low_limit, hourly_high_limit" " FROM tbl_offline_meters ") rows_offline_meters = cursor.fetchall() except Exception as e: logger.error("Error in step 3.2 of offlinemeter.calculate_hourly " + str(e)) time.sleep(60) continue finally: if cursor: cursor.close() if cnx: cnx.close() if rows_offline_meters is None or len(rows_offline_meters) == 0: print("Could not find any offline meters in the MyEMS System Database...") time.sleep(60) continue else: offline_meter_id_set = set() for row_offline_meter in rows_offline_meters: # valid offline meter id in excel file offline_meter_id_set.add(row_offline_meter[0]) for energy_data_item in energy_data_list: if energy_data_item['offline_meter_id'] not in offline_meter_id_set: is_valid_file = False break for row_offline_meter in rows_offline_meters: if row_offline_meter[0] == energy_data_item['offline_meter_id']: for key in energy_data_item['data']: if row_offline_meter[2] > (energy_data_item['data'][key]/24): is_valid_file = False break elif row_offline_meter[3] < (energy_data_item['data'][key]/24): is_valid_file = False break break if is_valid_file: #################################################################################################### # delete possibly exists offline meter hourly data in myems energy database, # and then insert new offline meter hourly data #################################################################################################### try: cnx = mysql.connector.connect(**config.myems_energy_db) cursor = cnx.cursor() except Exception as e: logger.error("Error in step 3.2 of offlinemeter.calculate_hourly " + str(e)) if cursor: cursor.close() if cnx: cnx.close() time.sleep(60) continue try: for energy_data_item in energy_data_list: offline_meter_id = energy_data_item['offline_meter_id'] print(energy_data_item['data'].items()) for start_datetime_utc, daily_value in energy_data_item['data'].items(): end_datetime_utc = start_datetime_utc + timedelta(hours=24) actual_value = \ daily_value / (Decimal(24) * Decimal(60) / Decimal(config.minutes_to_count)) cursor.execute(" DELETE FROM tbl_offline_meter_hourly " " WHERE offline_meter_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (offline_meter_id, start_datetime_utc.isoformat()[0:19], end_datetime_utc.isoformat()[0:19])) cnx.commit() # todo: check with hourly low limit and hourly high limit add_values = (" INSERT INTO tbl_offline_meter_hourly " " (offline_meter_id, start_datetime_utc, actual_value) " " VALUES ") while start_datetime_utc < end_datetime_utc: add_values += " (" + str(offline_meter_id) + "," add_values += "'" + start_datetime_utc.isoformat()[0:19] + "'," add_values += str(actual_value) + "), " start_datetime_utc += timedelta(minutes=config.minutes_to_count) print("add_values:" + add_values) # trim ", " at the end of string and then execute cursor.execute(add_values[:-2]) cnx.commit() except Exception as e: logger.error("Error in step 3.3 of offlinemeter.calculate_hourly " + str(e)) time.sleep(60) continue finally: if cursor: cursor.close() if cnx: cnx.close() ############################################################################################################ # STEP 4: update file status to 'done' or 'error' ############################################################################################################ print("to update offline meter file status to done...") try: cnx = mysql.connector.connect(**config.myems_historical_db) cursor = cnx.cursor() except Exception as e: logger.error("Error in step 4.1 of offlinemeter.calculate_hourly " + str(e)) if cursor: cursor.close() if cnx: cnx.close() time.sleep(60) continue try: update_row = (" UPDATE tbl_offline_meter_files " " SET status = %s " " WHERE id = %s ") cursor.execute(update_row, ('done' if is_valid_file else 'error', excel_file['id'],)) cnx.commit() except Exception as e: logger.error("Error in step 4.2 of offlinemeter.calculate_hourly " + str(e)) time.sleep(60) continue finally: if cursor: cursor.close() if cnx: cnx.close() # end of for excel_file in excel_file_list print("go to sleep ...") time.sleep(300) print("wake from sleep, and go to work...") # end of outer while
python
# -*- coding: utf-8 -*- description = 'ZEA-2 counter card setup' group = 'optional' tango_base = 'tango://phys.dns.frm2:10000/dns/' devices = dict( timer = device('nicos_mlz.jcns.devices.fpga_new.FPGATimerChannel', description = 'Acquisition time', tangodevice = tango_base + 'count/timer', ), mon1 = device('nicos.devices.entangle.CounterChannel', description = 'Beam monitor counter', tangodevice = tango_base + 'count/mon1', type = 'monitor', ), chopctr = device('nicos.devices.entangle.CounterChannel', description = 'Chopper zero signal counter', tangodevice = tango_base + 'count/chopper', type = 'other', ), ) extended = dict( representative = 'timer', )
python
import numpy as np import coveval.core.losses as losses def test_normal_scaled(): """ Asserts that the normalised loss is the same for different `(y_true, y_pred)` where the ratio `(y_true-y_pred)/y_pred` is constant. """ # using default values ns = losses.normal_scaled() v1 = ns.compute_pointwise(25,50) - ns.compute_pointwise(50,50) v2 = ns.compute_pointwise(150,100) - ns.compute_pointwise(100,100) assert round(v1,7) == round(v2,7) assert round(v1,7) == round(-np.log(ns.rel_value),7) v1 = ns.compute_pointwise(55,50) - ns.compute_pointwise(50,50) v2 = ns.compute_pointwise(110,100) - ns.compute_pointwise(100,100) assert round(v1,7) == round(v2,7) # using custom values ns = losses.normal_scaled(delta_pc=0.1, rel_value=0.75) v1 = ns.compute_pointwise(45,50) - ns.compute_pointwise(50,50) v2 = ns.compute_pointwise(110,100) - ns.compute_pointwise(100,100) assert round(v1,7) == round(v2,7) assert round(v1,7) == round(-np.log(ns.rel_value),7) assert ns.rel_value == 0.75 v1 = ns.compute_pointwise(100,50) - ns.compute_pointwise(50,50) v2 = ns.compute_pointwise(200,100) - ns.compute_pointwise(100,100) assert round(v1,7) == round(v2,7)
python
import pygame screen_x_max = 240 screen_y_max = 320 # colors RED = pygame.Color(255, 0, 0) GREEN = pygame.Color(0, 255, 0) BLUE = pygame.Color(0, 0, 255) WHITE = pygame.Color(255, 255, 255) BLACK = pygame.Color(0, 0, 0) GRAY = pygame.Color(39, 37, 37) LIGHT_GRAY = pygame.Color(130, 100, 100) # path to pifidelity pifi_dir = '/home/pi/pifidelity/' # path to music music_directories = ["/mnt/Banca/Music"] music_db_file = pifi_dir + 'music.db' # paths to bmls bml_directories = [pifi_dir + 'bmls', '/mnt/Banca/bmls'] # paths to icons icons_dir = pifi_dir + 'icons/' vol_up_icon = icons_dir + 'isometric_vol_up.png' vol_dn_icon = icons_dir + 'isometric_vol_dn.png' mute_icon = icons_dir + 'isometric_mute.png' next_icon = icons_dir + 'isometric_skip.png' prev_icon = icons_dir + 'isometric_rewind.png' select_icon = icons_dir + 'isometric_play.png'
python
from .base_public import * DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} SITE_URL = "http://test.com"
python
from project.appliances.fridge import Fridge from project.appliances.stove import Stove from project.appliances.tv import TV from project.rooms.room import Room class OldCouple(Room): def __init__(self, family_name: str, pension_one: float, pension_two: float): super().__init__(family_name, (pension_one + pension_two), 2) self.room_cost = 15 tv = TV() fridge = Fridge() stove = Stove() self.appliances = [tv, tv, fridge, fridge, stove, stove] self.expenses = sum([a.get_monthly_expense() for a in self.appliances])
python
""" wxyz top-level automation this should be executed from within an environment created from the .github/locks/conda.*.lock appropriate for your platform. See CONTRIBUTING.md. """ import json import os # pylint: disable=expression-not-assigned,W0511,too-many-lines import shutil import subprocess import time from configparser import ConfigParser from hashlib import sha256 try: import ipywidgets except ImportError: pass from doit import create_after from doit.tools import PythonInteractiveAction, config_changed from _scripts import _paths as P from _scripts import _util as U from _scripts._lock import iter_matrix, make_lock_task DOIT_CONFIG = { "backend": "sqlite3", "verbosity": 2, "par_type": "thread", "default_tasks": ["binder"], "reporter": U.Reporter, } def task_release(): """run all tasks, except re-locking and docs""" return dict( file_dep=[ *sum( [ [P.OK / f"lint_{group}_1_pylint", P.OK / f"lint_{group}_1_flake8"] for group in P.LINT_GROUPS ], [], ), P.SHA256SUMS, P.OK / "integrity", P.OK / "nbtest", P.OK / "robot", ], targets=[P.OK / "release"], actions=[ U.okit("release", remove=True), lambda: print("OK to release"), U.okit("release"), ], ) if not P.RUNNING_IN_CI: @create_after("docs") def task_all(): """like release, but also builds docs (no locks)""" return dict( file_dep=[P.SHA256SUMS, P.OK / "release"], task_dep=["spell", "checklinks"], actions=[lambda: print("OK to docs")], ) if not (P.TESTING_IN_CI or P.BUILDING_IN_CI): def task_lock(): """lock conda envs so they don't need to be solved in CI This should be run semi-frequently (e.g. after merge to master). Requires `conda-lock` CLI to be available TODO: this should be more deriveable directly from a file tree structure that matches a github actions schema """ base_envs = [P.ENV.base, *P.ENV.WXYZ] test_envs = [*base_envs, P.ENV.utest, P.ENV.atest, P.ENV.lint] binder_args = None for task_args in iter_matrix(P.CI_TEST_MATRIX): if "linux-64" in task_args: binder_args = task_args matrix_envs = list(test_envs) if "win-64" in task_args: matrix_envs += [P.ENV.tpot, P.ENV.win, P.ENV.win_tpot] else: matrix_envs += [P.ENV.tpot, P.ENV.unix, P.ENV.unix_tpot] yield make_lock_task("test", matrix_envs, P.CI_TEST_MATRIX, *task_args) for conda_platform in P.ALL_CONDA_PLATFORMS: yield make_lock_task("lock", [P.ENV.lock], {}, conda_platform, "3.8") yield make_lock_task( "binder", [*base_envs, P.ENV.tpot, P.ENV.unix_tpot, P.ENV.binder], {}, *binder_args, ) yield make_lock_task( "docs", [*test_envs, P.ENV.lint, P.ENV.tpot, P.ENV.unix_tpot, P.ENV.docs], {}, *binder_args, ) if not P.TESTING_IN_CI: def task_setup_ts(): """set up typescript environment""" dep_types = ["devDependencies", "dependencies", "peerDependencies"] return dict( uptodate=[ config_changed( { pkg["name"]: {dep: pkg.get(dep) for dep in dep_types} for pkg in P.TS_PACKAGE_CONTENT.values() } ) ], file_dep=[P.ROOT_PACKAGE], targets=[P.YARN_INTEGRITY, P.YARN_LOCK], actions=[ ["jlpm", "--prefer-offline", "--ignore-optional"], ["jlpm", "lerna", "bootstrap"], ], ) if P.RUNNING_IN_CI: def task_setup_py_ci(): """CI: setup python packages from wheels""" return dict( file_dep=[*P.WHEELS.values()], targets=[P.OK / "setup_py", P.OK / "setup_lab"], actions=[ U.okit("setup_py", remove=True), U.okit("setup_lab", remove=True), [ *P.PIP, "install", "--no-deps", "--ignore-installed", *P.WHEELS.values(), ], [*P.PIP, "freeze"], [*P.PIP, "check"], U.okit("setup_py"), ["jupyter", "labextension", "list"], U.okit("setup_lab"), ], ) else: def _make_ext_data_files(ext): """ensure a single extension's data_files are set up properly""" wxyz_name = ext.parent.name py_pkg = ext.parent.parent.parent.parent package_json = ext / "package.json" package_data = P.TS_PACKAGE_CONTENT[package_json] setup_py = py_pkg / "setup.py" manifest_in = py_pkg / "MANIFEST.in" install_json = ext.parent / "install.json" yield dict( name=f"{wxyz_name}:setup.py", uptodate=[config_changed(P.PY_SETUP_TEXT)], file_dep=[package_json], targets=[setup_py], actions=[ lambda: [ setup_py.write_text( P.PY_SETUP_TEMPLATE.render(wxyz_name=wxyz_name, **package_data) ), None, ][-1], ["isort", setup_py], ["black", setup_py], ], ) yield dict( name=f"{wxyz_name}:manifest.in", uptodate=[config_changed(P.MANIFEST_TEXT)], file_dep=[package_json], targets=[manifest_in], actions=[ lambda: [ manifest_in.write_text( P.MANIFEST_TEMPLATE.render(wxyz_name=wxyz_name, **package_data) ), None, ][-1] ], ) yield dict( name=f"{wxyz_name}:install.json", uptodate=[config_changed(P.INSTALL_JSON_TEXT)], file_dep=[package_json], targets=[install_json], actions=[ lambda: [ install_json.write_text( P.INSTALL_JSON_TEMPLATE.render( wxyz_name=wxyz_name, **package_data ) ), None, ][-1] ], ) if not P.RUNNING_IN_BINDER: def task_data_files(): """ensure data_files are set up properly""" for ext in P.WXYZ_LAB_EXTENSIONS: yield from _make_ext_data_files(ext) def task_setup_py_dev(): """ensure local packages are installed and editable""" def write_reqs_txt(): """write out a requirements file so everything can be installed in one go""" P.BUILD.exists() or P.BUILD.mkdir() P.PY_DEV_REQS.write_text( "\n".join([f"-e {p.parent.relative_to(P.ROOT)}" for p in P.PY_SETUP]) ) yield dict( name="reqs_txt", targets=[P.PY_DEV_REQS], file_dep=[*P.ALL_SETUP_CFG, *P.PY_SETUP], actions=[write_reqs_txt], ) yield dict( name="pip", file_dep=[ P.PY_DEV_REQS, *[ p.parent / "labextension" / "package.json" for p in P.WXYZ_LAB_EXTENSIONS ], ], targets=[P.OK / "setup_py"], actions=[ U.okit("setup_py", remove=True), [ *P.PIP, "install", "--no-deps", "--ignore-installed", "-r", P.PY_DEV_REQS, ], [*P.PIP, "freeze"], [*P.PIP, "check"], U.okit("setup_py"), ], ) yield dict( name="lab", file_dep=[P.PY_DEV_REQS, P.OK / "setup_py"], targets=[P.OK / "setup_lab"], actions=[ U.okit("setup_lab", remove=True), *[(_make_develop, [p.parent]) for p in P.WXYZ_LAB_EXTENSIONS], ["jupyter", "labextension", "list"], U.okit("setup_lab"), ], ) def _make_develop(path): args = [ *P.PYM, "_scripts._hacked_labextension", "develop", "--debug", "--overwrite", f"wxyz.{path.name}", ] # py_path = path.parent.parent.parent # raise Exception(args) return subprocess.call(args) == 0 def _make_linters(label, files): prev = [P.OK / "setup_py"] next_prev = [] for i, cmd_group in enumerate(P.PY_LINT_CMDS): for linter, cmd in cmd_group.items(): ok = f"lint_{label}_{i}_{linter}" next_prev += [P.OK / ok] yield dict( name=f"{label}:{linter}", file_dep=[*files, *prev] if prev else [*files, P.OK / "setup_py"], actions=[ U.okit(ok, remove=True), *(cmd(files) if callable(cmd) else [cmd + files]), U.okit(ok), ], targets=[P.OK / ok], ) prev = next_prev next_prev = [] if not (P.TESTING_IN_CI or P.BUILDING_IN_CI): def task_lint(): """detect and (hopefully) correct code style/formatting""" for label, files in P.LINT_GROUPS.items(): for linter in _make_linters(label, files): yield linter yield dict( name="prettier:core", uptodate=[config_changed(P.README.read_text(encoding="utf-8"))], file_dep=[P.YARN_INTEGRITY, P.YARN_LOCK], actions=[["jlpm", "prettier", "--write", "--list-different", P.README]], targets=[P.README], ) yield dict( name="prettier:rest", file_dep=[P.YARN_INTEGRITY, P.YARN_LOCK, *P.ALL_PRETTIER], targets=[P.OK / "prettier"], actions=[ U.okit("prettier", remove=True), ["jlpm", "lint:prettier"], U.okit("prettier"), ], ) yield dict( name="eslint", file_dep=[ P.YARN_INTEGRITY, P.YARN_LOCK, P.OK / "prettier", *sum([[*p.rglob("*.ts")] for p in P.TS_SRC], []), ], targets=[P.OK / "eslint"], actions=[ U.okit("eslint", remove=True), ["jlpm", "lint:eslint"], U.okit("eslint"), ], ) yield dict( name="robot", file_dep=[*P.ALL_ROBOT, *P.ATEST_PY], targets=[P.OK / "robot_lint"], actions=[ U.okit("robot_dry_run", remove=True), [*P.PYM, "robot.tidy", "--inplace", *P.ALL_ROBOT], [*ATEST, "--dryrun"], U.okit("robot_lint"), ], ) def _make_schema(source, targets): schema = P.SCHEMA / f"{source.stem}.schema.json" yield dict( name=schema.name, file_dep=[source, P.YARN_INTEGRITY], actions=[ lambda: [P.SCHEMA.mkdir(parents=True, exist_ok=True), None][-1], [ P.JLPM, "--silent", "ts-json-schema-generator", "--path", source, "--out", schema, ], ], targets=[schema], ) for target in targets: yield dict( name=target.name, file_dep=[schema, P.SCRIPTS / "_ts2w.py", P.YARN_INTEGRITY], actions=[[*P.PYM, "_scripts._ts2w", schema, target]], targets=[target], ) if not P.RUNNING_IN_CI: def task_schema(): """update code files from schema""" for source, targets in P.SCHEMA_WIDGETS.items(): for task in _make_schema(source, targets): yield task def _make_pydist(setup_py): """build python release artifacts""" pkg = setup_py.parent src = [*(pkg / "src/wxyz").glob("*")][0] file_dep = [ setup_py, pkg / "setup.cfg", pkg / "MANIFEST.in", pkg / "README.md", src / "js" / P.LICENSE_NAME, *sorted((pkg / "src").rglob("*.py")), ] if src.name != "notebooks": file_dep += [src / "labextension/package.json"] def _action(output): """build a single task so we can run in the cwd""" args = [P.PY, "setup.py", output, "--dist-dir", P.DIST] return lambda: U.call(args, cwd=pkg) == 0 yield dict( name=pkg.name, doc=f"build {pkg.name} distributions", file_dep=file_dep, actions=[ lambda: [ shutil.rmtree(pkg / sub, ignore_errors=True) for sub in ["build", f"{pkg.name}.egg-info"] ] and None, _action("sdist"), _action("bdist_wheel"), ], targets=[P.WHEELS[pkg.name], P.SDISTS[pkg.name]], ) if not P.TESTING_IN_CI: def task_dist(): """make pypi distributions""" for pys in P.PY_SETUP: yield _make_pydist(pys) def task_hash_dist(): """make a hash bundle of the dist artifacts""" def _run_hash(): # mimic sha256sum CLI if P.SHA256SUMS.exists(): P.SHA256SUMS.unlink() lines = [] for p in P.HASH_DEPS: if p.parent != P.DIST: tgt = P.DIST / p.name if tgt.exists(): tgt.unlink() shutil.copy2(p, tgt) lines += [" ".join([sha256(p.read_bytes()).hexdigest(), p.name])] output = "\n".join(lines) print(output) P.SHA256SUMS.write_text(output) return dict(actions=[_run_hash], file_dep=P.HASH_DEPS, targets=[P.SHA256SUMS]) def _make_lab_ext_build(ext): target = ext.parent / "labextension" / "package.json" yield dict( name=f"""ext:{ext.parent.name}""".replace("/", "_"), file_dep=[ ext / "lib" / ".tsbuildinfo", ext / "README.md", ext / "LICENSE.txt", *ext.rglob("style/*.css"), ext / "package.json", ], actions=[ lambda: subprocess.call([*P.LAB_EXT, "build", "."], cwd=str(ext)) == 0 ], targets=[target], ) if not P.TESTING_IN_CI: def task_ts(): """build typescript components""" file_dep = [P.YARN_LOCK, *P.TS_PACKAGE, *P.ALL_TS] if not P.BUILDING_IN_CI: file_dep += [P.OK / "prettier", P.OK / "eslint"] yield dict( name="tsc", file_dep=file_dep, targets=P.TS_ALL_BUILD, actions=[["jlpm", "build:ts"]], ) yield dict( name="pack", file_dep=[ P.TS_META_BUILD, *P.TS_READMES, *P.TS_LICENSES, ], actions=[["jlpm", "build:tgz"]], targets=[*P.TS_TARBALLS], ) for ext in P.WXYZ_LAB_EXTENSIONS: for task in _make_lab_ext_build(ext): yield task if not P.BUILDING_IN_CI: def task_nbtest(): """smoke test all notebooks with nbconvert""" env = dict(os.environ) env.update(WXYZ_WIDGET_LOG_OUT=str(P.WIDGET_LOG_OUT)) return dict( file_dep=[*P.ALL_SRC_PY, *P.ALL_IPYNB, P.OK / "setup_py"], targets=[P.OK / "nbtest"], actions=[ lambda: [P.WIDGET_LOG_OUT.exists() or P.WIDGET_LOG_OUT.mkdir(), None][ -1 ], U.okit("nbtest", True), lambda: U.call( [ *P.PYM, "pytest", "-vv", "-n", "auto", "-o", f"junit_suite_name=nbtest_{P.OS}_{P.PY_VER}", *os.environ.get("WXYZ_PYTEST_ARGS", "").split(" "), ], cwd=P.PY_SRC / "wxyz_notebooks", env=env, ) == 0, U.okit("nbtest"), ], ) def _make_py_readme(setup_py): pkg = setup_py.parent setup_cfg = pkg / "setup.cfg" readme = pkg / "README.md" def _write(): parser = ConfigParser() parser.read(setup_cfg) context = {s: dict(parser[s]) for s in parser.sections()} for package_json in P.TS_PACKAGE_CONTENT.values(): lab = package_json.get("jupyterlab") if lab is None: continue if pkg.name == lab["discovery"]["server"]["base"]["name"]: context["js_pkg"] = package_json break readme.write_text( "\n\n".join( [ P.PY_README_TMPL.render(**context), "---", P.README.read_text(encoding="utf-8"), ] ).strip() ) return dict( name=f"readme:py:{pkg.name}", uptodate=[config_changed(P.PY_README_TXT)], actions=[ _write, ["jlpm", "--silent", "prettier", "--write", "--list-different", readme], ], file_dep=[P.README, setup_cfg], targets=[readme], ) def _make_ts_readme(package_json): pkg = package_json.parent readme = pkg / "README.md" license_ = pkg / P.LICENSE_NAME def _write(): license_.write_text(P.LICENSE.read_text(encoding="utf-8")) context = json.loads(package_json.read_text(encoding="utf-8")) readme.write_text( "\n\n".join( [ P.TS_README_TMPL.render(**context), "---", P.README.read_text(encoding="utf-8"), ] ).strip() ) return dict( name=f"readme:ts:{pkg.parent.name}", uptodate=[config_changed(P.TS_README_TXT)], actions=[ _write, ["jlpm", "prettier", "--write", "--list-different", readme], ], file_dep=[P.README, package_json], targets=[readme, license_], ) def _make_py_rst(setup_py): pkg = setup_py.parent.name name = pkg.replace("wxyz_", "") out = P.DOCS / "widgets" target = out / f"""{name}.rst""" module = pkg.replace("_", ".", 1) def _write(): if not out.exists(): out.mkdir() target.write_text( P.PY_RST_TEMPLATE.render( name=name, module=module, stars="*" * len(module), exclude_members=", ".join(dir(ipywidgets.DOMWidget)), ) ) return dict( name=f"rst:{setup_py.parent.name}", actions=[_write], targets=[target], uptodate=[config_changed(P.PY_RST_TEMPLATE_TXT)], file_dep=[*(setup_py.parent / "src").rglob("*.py"), P.OK / "setup_py"], ) def _make_widget_index(file_dep): target = P.DOCS / "widgets.ipynb" def _write(): nb_json = json.loads(target.read_text(encoding="utf-8")) toc = None for cell in nb_json["cells"]: if cell["cell_type"] == "markdown": for line in cell["source"]: if "<!-- BEGIN MODULEGEN" in line: toc = cell toc["source"] = [ "<!-- BEGIN MODULEGEN -->\n", """```{toctree}\n""", """:maxdepth: 3\n""", *[ "widgets/{}\n".format(d.stem.replace("wxyz_", "")) for d in file_dep if d.suffix == ".rst" ], "```\n", "<!-- END MODULEGEN -->\n", ] target.write_text(json.dumps(nb_json, indent=2), encoding="utf-8") return dict( name="ipynb:modindex", actions=[_write], targets=[target], file_dep=file_dep ) def _make_dot(setup_py): pkg = setup_py.parent.name name = pkg.replace("wxyz_", "") out = P.DOCS / "widgets" / "dot" module = pkg.replace("_", ".", 1) target = out / f"classes_{name}.dot" py_files = [*setup_py.parent.rglob("*.py")] def _make(): if not out.exists(): out.mkdir() modules = [module] if "notebooks" not in name: modules += [f"{module}.base"] proc = subprocess.Popen( [*P.PYREVERSE, "-p", name, *modules], cwd=out, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) pstdout, pstderr = proc.communicate() if proc.returncode != 0: raise RuntimeError( "\n".join( [ "stdout:\n", pstdout.decode("utf-8"), "\nstderr:\n", pstderr.decode("utf-8"), "-----", f"ERROR {proc.returncode}", ] ) ) ugly_packages = out / f"packages_{name}.dot" if ugly_packages.exists(): ugly_packages.unlink() dot_txt = target.read_text(encoding="utf-8") for py_file in py_files: replace_name = f"wxyz.{name}" if py_file.stem == "base": replace_name += ".base" dot_txt = dot_txt.replace(str(py_file), replace_name) dot_lines = dot_txt.splitlines() target.write_text( "\n".join( [ dot_lines[0], """ graph [fontname = "sans-serif"]; node [fontname = "sans-serif"]; edge [fontname = "sans-serif"]; """, *dot_lines[1:], ] ) ) return dict( name=f"dot:{name}", actions=[_make], uptodate=[config_changed({"args": P.PYREVERSE})], file_dep=[*py_files, P.OK / "setup_py"], targets=[target], ) if not (P.TESTING_IN_CI or P.BUILDING_IN_CI): def task_docs(): """make the docs right""" widget_index_deps = [] for setup_py in P.PY_SETUP: yield _make_py_readme(setup_py) task = _make_py_rst(setup_py) yield task widget_index_deps += task["targets"] yield _make_widget_index(widget_index_deps) for package_json in P.TS_PACKAGE: if package_json.parent.parent.name == "notebooks": continue yield _make_ts_readme(package_json) yield dict( name="favicon", actions=[[*P.PYM, "_scripts._favicon"]], file_dep=[P.DOCS_LOGO], targets=[P.DOCS_FAVICON], ) if shutil.which("sphinx-build"): yield dict( name="sphinx", doc="build the HTML site", actions=[["sphinx-build", "-j8", "-b", "html", "docs", "build/docs"]], file_dep=[ *P.ALL_SETUP_CFG, *P.ALL_SRC_PY, *P.DOCS_DOT, *P.DOCS_IPYNB, *P.DOCS_STATIC.rglob("*"), *P.DOCS_TEMPLATES, *P.PY_DOCS_RST, P.DOCS_CONF_PY, P.OK / "setup_py", ], targets=[P.DOCS_BUILDINFO], ) def _make_spell(path): rel = path.relative_to(P.DOCS_OUT) spell_key = "spell_" + str(rel.as_posix()).replace("/", "_").replace(".", "/") args = ["hunspell", "-d", P.SPELL_LANGS, "-p", P.DICTIONARY, "-l", "-H", path] def _spell(): misspelled = [ line.strip() for line in subprocess.check_output(args).decode("utf-8").splitlines() if line.strip() ] if misspelled: print(">> misspelled words in ", path) print("\n".join(sorted(set(misspelled)))) return False return True return dict( name=spell_key, file_dep=[path, P.DICTIONARY, P.README], actions=[U.okit(spell_key, remove=True), _spell, U.okit(spell_key)], targets=[P.OK / spell_key], ) if not (P.TESTING_IN_CI or P.BUILDING_IN_CI) and shutil.which("hunspell"): @create_after("docs") def task_spell(): """check spelling of built HTML site""" if shutil.which("hunspell"): for path in P.ALL_SPELL_DOCS(): yield _make_spell(path) if not (P.TESTING_IN_CI or P.BUILDING_IN_CI) and shutil.which("pytest-check-links"): @create_after("docs") def task_checklinks(): """check whether links in built docs are valid""" key = "check_links" args = [ "pytest-check-links", "-o", "junit_suite_name=checklinks", "--check-anchors", "--check-links-cache", "--check-links-cache-name=build/check_links/cache", # a few days seems reasonable f"--check-links-cache-expire-after={60 * 60 * 24 * 3}", # might be able to relax this, eventually "-k", "not (master or carousel)", ] return dict( uptodate=[config_changed(dict(args=args))], actions=[ U.okit(key, remove=True), lambda: (P.BUILD / "check_links/cache").mkdir( parents=True, exist_ok=True ), [ *args, P.DOCS_OUT, ], U.okit(key), ], file_dep=[*P.ALL_SPELL_DOCS()], targets=[P.OK / key], ) if not P.RUNNING_IN_CI: def _make_lab(watch=False): def _lab(): if watch: print(">>> Starting typescript watcher...", flush=True) ts = subprocess.Popen(["jlpm", "watch"]) ext_watchers = [ subprocess.Popen([*P.LAB_EXT, "watch", "."], cwd=str(p)) for p in P.WXYZ_LAB_EXTENSIONS ] print(">>> Waiting a bit to JupyterLab...", flush=True) time.sleep(3) print(">>> Starting JupyterLab...", flush=True) lab = subprocess.Popen( [*P.JPY, "lab", "--no-browser", "--debug"], stdin=subprocess.PIPE, ) try: print(">>> Waiting for JupyterLab to exit (Ctrl+C)...", flush=True) lab.wait() except KeyboardInterrupt: print( f""">>> {"Watch" if watch else "Run"} canceled by user!""", flush=True, ) finally: print(">>> Stopping watchers...", flush=True) if watch: [x.terminate() for x in ext_watchers] ts.terminate() lab.terminate() lab.communicate(b"y\n") if watch: ts.wait() lab.wait() [x.wait() for x in ext_watchers] print( ">>> Stopped watchers! maybe check process monitor...", flush=True, ) return True return _lab def task_lab(): """start JupyterLab, no funny stuff (Note: Single Ctrl+C stops)""" yield dict( name="serve", uptodate=[lambda: False], file_dep=[P.OK / "setup_lab"], actions=[PythonInteractiveAction(_make_lab())], ) def task_watch(): """watch typescript sources, launch JupyterLab, rebuilding as files change""" yield dict( name="lab", uptodate=[lambda: False], file_dep=[P.OK / "setup_lab"], actions=[PythonInteractiveAction(_make_lab(watch=True))], ) def _docs(): p = None try: p = subprocess.Popen( [ "sphinx-autobuild", "-a", "-j8", "--re-ignore", r"'*\.ipynb_checkpoints*'", P.DOCS, P.DOCS_OUT, ] ) p.wait() finally: p.terminate() p.wait() if shutil.which("sphinx-autobuild"): yield dict( name="docs", doc="serve docs, watch (some) sources, livereload (when it can)", uptodate=[lambda: False], file_dep=[P.DOCS_BUILDINFO], actions=[PythonInteractiveAction(_docs)], ) if not (P.TESTING_IN_CI or P.BUILDING_IN_CI): def task_binder(): """get to a working interactive state""" return dict( file_dep=[P.OK / "setup_lab", P.OK / "setup_py"], actions=[lambda: print("OK")], ) ATEST = [P.PY, "-m", "_scripts._atest"] if not P.BUILDING_IN_CI: def task_robot(): """test in browser with robot framework""" file_dep = [ *P.ALL_ROBOT, *P.ALL_SRC_PY, *P.ATEST_PY, *P.ALL_TS, *P.ALL_IPYNB, P.SCRIPTS / "_atest.py", P.OK / "setup_lab", ] if not P.RUNNING_IN_CI: file_dep += [P.OK / "robot_lint"] return dict( file_dep=sorted(file_dep), actions=[U.okit("robot", remove=True), [*ATEST], U.okit("robot")], targets=[P.OK / "robot"], ) if not (P.BUILDING_IN_CI or P.TESTING_IN_CI): def task_integrity(): """check various sources of version and documentation issues""" return dict( file_dep=[ *P.ALL_SRC_PY, *P.ALL_MD, *P.ALL_SETUP_CFG, P.POSTBUILD, P.SCRIPTS / "_integrity.py", ], actions=[ U.okit("integrity", remove=True), [*P.PYM, "_scripts._integrity"], U.okit("integrity"), ], targets=[P.OK / "integrity"], )
python
class Cell: def __init__(self): ''' Initializes all cells as 'Dead'. Can set the state with accompanying functions. ''' self.status = 'Dead' def set_dead(self): ''' Sets <i>this</i> cell as dead. ''' self.status = 'Dead' def set_alive(self): ''' Sets <i>this</i> cell as alive. ''' self.status = 'Alive' def is_alive(self): ''' Helper function for getting cell state. ''' return self.status == 'Alive' def get_character(self): ''' Get the character used to print on the board. Depends on if the cell is alive or not. ''' return '#' if self.is_alive() else '.'
python
class NesteggException(Exception): pass def first(it) : try : return next(it) except StopIteration : return None
python
from typing import List, Optional import torch from torch import Tensor from tha2.nn.backbone.poser_encoder_decoder_00 import PoserEncoderDecoder00Args, PoserEncoderDecoder00 from tha2.nn.util import apply_color_change, apply_grid_change, apply_rgb_change from tha2.nn.batch_module.batch_input_module import BatchInputModule, BatchInputModuleFactory from tha2.nn.base.nonlinearity_factory import ReLUFactory from tha2.nn.base.normalization import InstanceNorm2dFactory from tha2.nn.base.util import BlockArgs class EyebrowMorphingCombiner00Args(PoserEncoderDecoder00Args): def __init__(self, image_size: int = 128, image_channels: int = 4, num_pose_params: int = 12, start_channels: int = 64, bottleneck_image_size=16, num_bottleneck_blocks=6, max_channels: int = 512, block_args: Optional[BlockArgs] = None): super().__init__( image_size, 2 * image_channels, image_channels, num_pose_params, start_channels, bottleneck_image_size, num_bottleneck_blocks, max_channels, block_args) class EyebrowMorphingCombiner00(BatchInputModule): def __init__(self, args: EyebrowMorphingCombiner00Args): super().__init__() self.args = args self.body = PoserEncoderDecoder00(args) self.morphed_eyebrow_layer_grid_change = self.args.create_grid_change_block() self.morphed_eyebrow_layer_alpha = self.args.create_alpha_block() self.morphed_eyebrow_layer_color_change = self.args.create_color_change_block() self.combine_alpha = self.args.create_alpha_block() def forward(self, background_layer: Tensor, eyebrow_layer: Tensor, pose: Tensor) -> List[Tensor]: combined_image = torch.cat([background_layer, eyebrow_layer], dim=1) feature = self.body(combined_image, pose)[0] morphed_eyebrow_layer_grid_change = self.morphed_eyebrow_layer_grid_change(feature) morphed_eyebrow_layer_alpha = self.morphed_eyebrow_layer_alpha(feature) morphed_eyebrow_layer_color_change = self.morphed_eyebrow_layer_color_change(feature) warped_eyebrow_layer = apply_grid_change(morphed_eyebrow_layer_grid_change, eyebrow_layer) morphed_eyebrow_layer = apply_color_change( morphed_eyebrow_layer_alpha, morphed_eyebrow_layer_color_change, warped_eyebrow_layer) combine_alpha = self.combine_alpha(feature) eyebrow_image = apply_rgb_change(combine_alpha, morphed_eyebrow_layer, background_layer) eyebrow_image_no_combine_alpha = apply_rgb_change( (morphed_eyebrow_layer[:, 3:4, :, :] + 1.0) / 2.0, morphed_eyebrow_layer, background_layer) return [ eyebrow_image, # 0 combine_alpha, # 1 eyebrow_image_no_combine_alpha, # 2 morphed_eyebrow_layer, # 3 morphed_eyebrow_layer_alpha, # 4 morphed_eyebrow_layer_color_change, # 5 warped_eyebrow_layer, # 6 morphed_eyebrow_layer_grid_change, # 7 ] EYEBROW_IMAGE_INDEX = 0 COMBINE_ALPHA_INDEX = 1 EYEBROW_IMAGE_NO_COMBINE_ALPHA_INDEX = 2 MORPHED_EYEBROW_LAYER_INDEX = 3 MORPHED_EYEBROW_LAYER_ALPHA_INDEX = 4 MORPHED_EYEBROW_LAYER_COLOR_CHANGE_INDEX = 5 WARPED_EYEBROW_LAYER_INDEX = 6 MORPHED_EYEBROW_LAYER_GRID_CHANGE_INDEX = 7 OUTPUT_LENGTH = 8 def forward_from_batch(self, batch: List[Tensor]): return self.forward(batch[0], batch[1], batch[2]) class EyebrowMorphingCombiner00Factory(BatchInputModuleFactory): def __init__(self, args: EyebrowMorphingCombiner00Args): super().__init__() self.args = args def create(self) -> BatchInputModule: return EyebrowMorphingCombiner00(self.args) if __name__ == "__main__": cuda = torch.device('cuda') args = EyebrowMorphingCombiner00Args( image_size=128, image_channels=4, num_pose_params=12, start_channels=64, bottleneck_image_size=16, num_bottleneck_blocks=3, block_args=BlockArgs( initialization_method='xavier', use_spectral_norm=False, normalization_layer_factory=InstanceNorm2dFactory(), nonlinearity_factory=ReLUFactory(inplace=True))) face_morpher = EyebrowMorphingCombiner00(args).to(cuda) background_layer = torch.randn(8, 4, 128, 128, device=cuda) eyebrow_layer = torch.randn(8, 4, 128, 128, device=cuda) pose = torch.randn(8, 12, device=cuda) outputs = face_morpher.forward(background_layer, eyebrow_layer, pose) for i in range(len(outputs)): print(i, outputs[i].shape)
python
a = str(input('digite seu nome completo: ')).strip().lower() print('seu nome tem silva ? {}'.format('silva' in a))
python
import math from typing import List class Solution: def threeSumClosest(self, nums: List[int], target: int) -> int: # sort the array first nums.sort() triplet, min_diff = 0, math.inf for i in range(len(nums) - 3 + 1): # skip the same elements to avoid duplicate pairs if i > 0 and nums[i] == nums[i - 1]: continue lt, rt = i + 1, len(nums) - 1 while lt < rt: _sum = nums[i] + nums[lt] + nums[rt] # return immediately if we found the target if _sum == target: return _sum # check if there exit a more closer to targer sum if abs(_sum - target) < min_diff: min_diff = abs(_sum - target) triplet = _sum if _sum > target: rt -= 1 if _sum < target: lt += 1 return triplet
python
""" ================ DBus wire format ================ This module de/serialize objects from/to dbus wire format. The spec for this code can be found here: - https://dbus.freedesktop.org/doc/dbus-specification.html - https://github.com/GNOME/glib/blob/master/gio/gdbusmessage.c But if you are like me that prefer some samples here they are. Our example is a complete DBus Message: *yyyyuua(yv)*``tsogybnqiuxd`` Header: ------- DBus specs define message header as **yyyyuua(yv)** or ``BYTE, BYTE, BYTE, BYTE, UINT32, UINT32, ARRAY of STRUCT of (BYTE,VARIANT)``:: BYTE \x6c BYTE \x04 BYTE \x01 BYTE \x01 UINT32 \x60\x00\x00\x00 UINT32 \x40\x00\x00\x00 ARRAY SIZE \x72\x00\x00\x00 STRUCT BYTE \x01 VARIANT SIGNATURE \x01\x6f\x00 SIZE \x10\x00\x00\x00 VAL \x2f\x61\x61\x61\x61\x61\x61\x61 \x2f\x61\x61\x61\x61\x61\x61\x61 \x00 #### \x00\x00\x00\x00\x00\x00\x00 STRUCT BYTE \x03 VARIANT SIGNATURE \x01\x73\x00 SIZE \x12\x00\x00\x00 VAL \x63\x63\x63\x63\x63\x63\x63\x63 \x63\x63\x63\x63\x63\x63\x63\x63 \x63\x63\x00 #### \x00\x00\x00\x00\x00 STRUCT BYTE \x08 VARIANT SIGNATURE \x01\x67\x00 SIZE \x0c VAL \x74\x73\x6f \x67\x79\x62\x6e\x71\x69\x75\x78 \x64\x00 #### \x00\x00\x00\x00\x00\x00 STRUCT BYTE \x02 VARIANT SIGNATURE \x01\x73\x00 SIZE \x11\x00\x00\x00 VAL \x62\x62\x62\x62\x62\x62\x62\x62 \x62\x2e\x62\x62\x62\x62\x62\x62 \x62\x00 #### \x00\x00\x00\x00\x00\x00 - Our first byte define endianess ``\\x6c`` ('l', little-endian); - The second byte is message type ``\\x04X`` (4, SIGNAL); - Third byte ``\\x01`` (1, NO_REPLY_EXPECTED) are our header flags; - Other byte for ``\\x01`` for protocol version; - A UINT64 ``\\x60\\x00\\x00\\x00`` (240) with size of body in bytes; - Another UINT64 ``\\x40\\x00\\x00\\x00`` message unique serial number; - And last part ARRAY of STRUCT of (BYTE,VARIANT) message type fields: - ``\x72\x00\x00\x00`` UINT32 array size in bytes; - Struct with byte, variant: - ``\x01`` byte define header information field; - Variant: - Variant signature: -``\x01`` signature size -``0x6f\x00`` signature val (`s`, string) - Variant content: - ``\x10\x00\x00\x00`` byte size of string; - ``\x2f\x61\x61...`` String value Message body: ------------- To be simple I defined our message body is defined as ``tsogybnqiuxd``: UINT64 \xff\xff\xff\xff \xff\xff\xff\xff STRING SIZE \x10\x00\x00\x00 VAL \x74\x68\x69\x73 \x20\x69\x73\x20 \x61\x20\x73\x74 \x72\x69\x6e\x67 \x00 #### \x00\x00\x00 PATH SIZE \x0f\x00\x00\x00 VAL \x2f\x74\x68\x69 \x73\x2f\x69\x73 \x2f\x61\x2f\x70 \x61\x74\x68\x00 SIGN SIZE \x03 VAL \x73\x61\x64 \x00 BYTE \x2a #### \x00\x00 BOOL \x01\x00\x00\x00 INT16 \xd6\xff UINT16 \x60\xea INT32 \xd4\xff\xff\xff UINT32 \xa0\x86\x01\x00 INT64 \xff\xff\xff\xff \xff\xff\xff\xff DOUB \x00\x00\x00\x00 \x00\x40\x45\x40 PADDING: -------- As you can see above #### is alingment 'hack' to meet dbus requirements. There are 3 types of padding rules, ``container``, ``header``, ``body`` - Container: - Strings are aligned as multiple of 4; - Struct are aligned as multiple of 8; - Variant are aligned as multiple of 1; - Array aligned as multiple o content type. - Last object of array has no padding. - Header: - "The length of the header must be a multiple of 8". - Body: - Any value on body is aligned gloabally to message size at that point. - IE. see #### after BYTE and before BOOL, glib implementation is: - before put value see if current size meets the next value align; - put \x00 to fix it; - put value bytes; - https://dbus.freedesktop.org/doc/dbus-specification.html#idm601 OUTPUT: ------- Glue all things and our message will be sent like this:: \x6c\x04\x01\x01\x60\x00\x00\x00 \x40\x00\x00\x00\x72\x00\x00\x00 \x08\x01\x67\x00\x0c\x74\x73\x6f \x67\x79\x62\x6e\x71\x69\x75\x78 \x64\x00\x00\x00\x00\x00\x00\x00 \x01\x01\x6f\x00\x10\x00\x00\x00 \x2f\x61\x61\x61\x61\x61\x61\x61 \x2f\x61\x61\x61\x61\x61\x61\x61 \x00\x00\x00\x00\x00\x00\x00\x00 \x03\x01\x73\x00\x12\x00\x00\x00 \x63\x63\x63\x63\x63\x63\x63\x63 \x63\x63\x63\x63\x63\x63\x63\x63 \x63\x63\x00\x00\x00\x00\x00\x00 \x02\x01\x73\x00\x11\x00\x00\x00 \x62\x62\x62\x62\x62\x62\x62\x62 \x62\x2e\x62\x62\x62\x62\x62\x62 \x62\x00\x00\x00\x00\x00\x00\x00 \xff\xff\xff\xff\xff\xff\xff\xff \x10\x00\x00\x00\x74\x68\x69\x73 \x20\x69\x73\x20\x61\x20\x73\x74 \x72\x69\x6e\x67\x00\x00\x00\x00 \x0f\x00\x00\x00\x2f\x74\x68\x69 \x73\x2f\x69\x73\x2f\x61\x2f\x70 \x61\x74\x68\x00\x03\x73\x61\x64 \x00\x2a\x00\x00\x01\x00\x00\x00 \xd6\xff\x60\xea\xd4\xff\xff\xff \xa0\x86\x01\x00\xff\xff\xff\xff \xff\xff\xff\xff\x00\x00\x00\x00 \x00\x40\x45\x40 """ from struct import pack from collections import defaultdict from .signature import break_signature NULL = b'\x00' EMPTY = b'' PATH = b'o' STRING = b's' SIGNATURE = b'g' ARRAY = b'a' STRUCT = b'(' DICT = b'{' BYTE = b'y' UINT32 = b'u' CONTAINER = b'{(avsgo' TRANSLATION = { b'y': b'b', b'b': b'I', b'n': b'h', b'q': b'H', b'i': b'i', b'u': b'I', b'x': b'q', b't': b'Q', b'd': b'd', b'h': b'I' } ALIGN = { b'y': 1, b'b': 4, b'n': 2, b'q': 2, b'i': 4, b'u': 4, b'x': 8, b't': 8, b'd': 8, b'h': 4, b's': 4, b'o': 4, b'g': 1, b'v': 1, b'a': 4, b'(': 8, b'{': 8 } LITLE_END = b'l' BIG_END = b'B' LITLE_END_FMT = b'<' BIG_END_FMT = b'>' _BIG_END = b'>B' endian = lambda k: BIG_END if k[0] in _BIG_END else LITLE_END _ENDIANESS = {LITLE_END: LITLE_END_FMT, BIG_END: BIG_END_FMT} ENDIANESS = defaultdict(lambda: LITLE_END, _ENDIANESS) def pad(encoded_len, window=4): if encoded_len and encoded_len % window: if encoded_len < window: return NULL * (window - encoded_len) else: return NULL * (encoded_len % window) return EMPTY def has_next(it): try: return next(it) except StopIteration: return None def join(val): return EMPTY.join(val) def serialize_msg(header, *body): header_buf = join(header.encode_dbus()) size = len(header_buf) body_it = serialize_body(size, header.signature, header.endianness, *body) body_buf = join(body_it) body_size = serialize_len(len(body_buf), endianess=header.endianness) yield join([header_buf[0:3], body_size, header_buf[7:]]) yield pad(size, 8) yield body_buf def serialize_body(header_size, signature, endianess=LITLE_END, *body): size = header_size signature_it = break_signature(signature) for arg in body: sig = next(signature_it) for b in serialize(sig, endianess, arg): yield pad(size, ALIGN[sig[0]]) yield b size += len(b) def serialize_str(val, signature=STRING, endianess=LITLE_END): type_of_len = BYTE if signature in SIGNATURE else UINT32 b_val = val.encode(encoding='UTF-8') l_b_val = len(b_val) yield serialize_len(l_b_val, type_of_len, endianess) yield b_val + NULL # null-terminated string yield pad(l_b_val + 1) if signature in (STRING, PATH) else EMPTY def serialize_var(val, signature, endianess=LITLE_END): for b in serialize_str(signature, SIGNATURE, endianess): yield b for b in serialize(signature, endianess, val): yield b def serialize_struct(val, signature, endianess=LITLE_END): signature_it = break_signature(signature) for v in val: size = 0 sig = next(signature_it) for b in serialize(sig, endianess, v): yield b size += len(b) yield pad(size, 8) def serialize_dict(val, signature, endianess=LITLE_END): for _key, _val in val.items(): size = 0 for b in serialize(signature[0], endianess, _key): yield b size += len(b) for b in serialize(signature[1], endianess, _val): yield b size += len(b) yield pad(size, 8) def serialize_list(val, signature, endianess=LITLE_END): sig = bytes([signature[0]]) # empty if not val: yield serialize_len(0, endianess=endianess) # simple type elif sig not in CONTAINER: yield serialize_len(len(val) * ALIGN[sig], endianess=endianess) yield pad(ALIGN[UINT32], ALIGN[sig]) for v in val: for b in serialize(sig, endianess, v): yield b # complex else: buf = [] buf_size = 0 it = iter(val) v = has_next(it) while v: _next = has_next(it) for item_buf in serialize(signature, endianess, v): if _next or len(item_buf.strip(NULL)): buf_size += len(item_buf) buf.append(item_buf) v = _next yield serialize_len(buf_size, endianess=endianess) for b in buf: yield b def serialize_len(size, signature=UINT32, endianess=LITLE_END): return pack(ENDIANESS[endianess] + TRANSLATION[signature], size) def serialize(signature, endianess, *args): if not args: yield EMPTY signature_it = break_signature(signature) for arg in args: if hasattr(arg, 'encode_dbus'): for encoded in arg.encode_dbus(endianess): yield encoded else: sig = next(signature_it) fmt = TRANSLATION.get(sig) if fmt: end = ENDIANESS[endianess] yield pack(end + fmt, arg) elif sig in (STRING, PATH, SIGNATURE): for encoded in serialize_str(arg, sig, endianess): yield encoded elif sig.startswith(ARRAY): for encoded in serialize_list(arg, sig[1:], endianess): yield encoded elif sig.startswith(STRUCT): for encoded in serialize_struct(arg, sig[1:-1], endianess): yield encoded elif sig.startswith(DICT): for encoded in serialize_dict(arg, sig[1:-1], endianess): yield encoded def deserialize(signature, endianess=LITLE_END): raise SerializeExeption('Deserialize method not implemented') class SerializeExeption(Exception): pass
python
from collections import deque working_bees = deque([int(el) for el in input().split()]) nectar_to_collect = [int(el) for el in input().split()] honey_process = deque(input().split()) total_honey_collect = 0 def get_honey_value(bee, honey, symbol): if symbol == "+": result = bee + honey elif symbol == "-": result = bee - honey elif symbol == "*": result = bee * honey elif symbol == "/": if honey == 0: return 0 result = bee / honey return abs(result) while working_bees and nectar_to_collect: bee = working_bees[0] nectar = nectar_to_collect[-1] if bee <= nectar: symbol = honey_process[0] honey_collect = get_honey_value(bee, nectar, symbol) total_honey_collect += honey_collect working_bees.popleft() nectar_to_collect.pop() honey_process.popleft() else: nectar_to_collect.pop() print(f"Total honey made: {total_honey_collect}") if working_bees: print(f"Bees left: {', '.join(str(b) for b in working_bees)}") if nectar_to_collect: print(f"Nectar left: {', '.join(str(n) for n in nectar_to_collect)}")
python
"""Module contains http hmac request, supports HTTP persistent connection.""" import httphmac import requests class HttpRequest(httphmac.Request): """Class to represent HTTP keep-alive hmac Request.""" _session = None def __init__(self): """Initialize HTTP Request object with requests.Session.""" super().__init__() self.session = self._get_session() def _get_session(self): """Generate new session object. :return: requests.Session """ if not HttpRequest._session: HttpRequest._session = requests.Session() return HttpRequest._session def do(self): """"Executes the request represented by this object. The requests library will be used for this purpose. Use requests.Session object for reuse TCP connection. Returns an instance of requests.Response. """ data = None if self.body is not None and self.body != b'': data = self.body return self.session.request(self.method, str(self.url), data=data, headers=self.header)
python
# @lc app=leetcode id=174 lang=python3 # # [174] Dungeon Game # # https://leetcode.com/problems/dungeon-game/description/ # # algorithms # Hard (33.61%) # Likes: 2439 # Dislikes: 50 # Total Accepted: 128.5K # Total Submissions: 381.5K # Testcase Example: '[[-2,-3,3],[-5,-10,1],[10,30,-5]]' # # The demons had captured the princess and imprisoned her in the bottom-right # corner of a dungeon. The dungeon consists of m x n rooms laid out in a 2D # grid. Our valiant knight was initially positioned in the top-left room and # must fight his way through dungeon to rescue the princess. # # The knight has an initial health point represented by a positive integer. If # at any point his health point drops to 0 or below, he dies immediately. # # Some of the rooms are guarded by demons (represented by negative integers), # so the knight loses health upon entering these rooms; other rooms are either # empty (represented as 0) or contain magic orbs that increase the knight's # health (represented by positive integers). # # To reach the princess as quickly as possible, the knight decides to move only # rightward or downward in each step. # # Return the knight's minimum initial health so that he can rescue the # princess. # # Note that any room can contain threats or power-ups, even the first room the # knight enters and the bottom-right room where the princess is imprisoned. # # # Example 1: # # # Input: dungeon = [[-2,-3,3],[-5,-10,1],[10,30,-5]] # Output: 7 # Explanation: The initial health of the knight must be at least 7 if he # follows the optimal path: RIGHT-> RIGHT -> DOWN -> DOWN. # # # Example 2: # # # Input: dungeon = [[0]] # Output: 1 # # # # Constraints: # # # m == dungeon.length # n == dungeon[i].length # 1 <= m, n <= 200 # -1000 <= dungeon[i][j] <= 1000 # # # # @lc tags=binary-search;dynamic-programming # @lc imports=start from imports import * # @lc imports=end # @lc idea=start # # 地牢游戏,二维图,从左上走到右下,每个格子有生命值加减,求至少需要多少初始的生命值。 # 直接动态规划。 # # @lc idea=end # @lc group= # @lc rank= # @lc code=start class Solution: def calculateMinimumHP(self, dungeon: List[List[int]]) -> int: rows = len(dungeon) cols = len(dungeon[0]) for i in reversed(range(rows - 1)): j = -1 dungeon[i][j] += min(dungeon[i + 1][j], 0) for j in reversed(range(cols - 1)): i = -1 dungeon[i][j] += min(dungeon[i][j + 1], 0) for i in reversed(range(rows - 1)): for j in reversed(range(cols - 1)): dungeon[i][j] += min(max(dungeon[i + 1][j], dungeon[i][j + 1]), 0) return max(0, -dungeon[0][0]) + 1 pass # @lc code=end # @lc main=start if __name__ == '__main__': print('Example 1:') print('Input : ') print('dungeon = [[-2,-3,3],[-5,-10,1],[10,30,-5]]') print('Exception :') print('7') print('Output :') print( str(Solution().calculateMinimumHP([[-2, -3, 3], [-5, -10, 1], [10, 30, -5]]))) print() print('Example 2:') print('Input : ') print('dungeon = [[0]]') print('Exception :') print('1') print('Output :') print(str(Solution().calculateMinimumHP([[0]]))) print() pass # @lc main=end
python
import csv import numpy as np import tensorflow as tf import cv2 import os #import keras #print(keras.__version__) #print(tf.__version__) from keras.models import Sequential from keras.layers.core import Dense, Activation, Flatten, Dropout from keras.layers import Conv2D from keras.utils import to_categorical from keras.layers.pooling import MaxPooling2D from keras.preprocessing.image import load_img, img_to_array from keras.callbacks import EarlyStopping from keras.optimizers import SGD from PIL import Image import skimage.transform #print(skimage.__version__) #from copy import deepcopy from sklearn.utils import shuffle from sklearn import metrics import matplotlib.pyplot as plt fieldnames = ["image_name", "label"] path = "/home/student/Desktop/CarND-Capstone-master/imgs/traffic_lights/" image_names = [] labels = [] with open(path+"labels_final.csv") as f: reader=csv.reader(f) next(reader) for row in reader: image_names.append(row[0]) lab = int(row[1]) if lab>2: labels.append(3) else: labels.append(lab) print("imagenames read") #this loop deletes old training images for(dirpath, dirnamens, filenames) in os.walk(path+"imgs/"): for f in filenames: if(f[:-4] not in image_names): print(f[:-4]) os.remove(path+"imgs/"+f) image_names, labels = shuffle(image_names, labels) print(set(labels)) #labels = 5*labels #labels = 3*labels labelsonehot = to_categorical(labels) #labelsonehot = to_categorical(labels[0:100]) images = [] for img in image_names: #for img in image_names[0:100]: #print(path+"imgs/"+img+".jpg") #i = cv2.imread(path+"imgs/"+img+".jpg", cv2.IMREAD_COLOR) #print(i.shape) #i = np.array(i / 255.0 - 0.5) #images.append(i) p = path+"imgs/"+img+".jpg" #i = load_img(p, grayscale=False, color_mode="rgb", target_size=(60,80), interpolation="nearest") i = load_img(p, grayscale=False, target_size=(60,80)) i = img_to_array(i) #print(np.shape(i)) i = np.array(i / 255.0) #print(np.shape(i)) images.append(i) #print(np.max(i)) #rot1 = skimage.transform.rotate(i, angle=-10, resize=False) #images.append(rot1) #rot2 = skimage.transform.rotate(i, angle=10, resize=False) #images.append(rot2) #rot3 = skimage.transform.rotate(i, angle=-5, resize=False) #images.append(rot3) #rot4 = skimage.transform.rotate(i, angle=5, resize=False) #images.append(rot4) #print(np.shape(rot1)) #print() #cv2.imwrite("/home/student/Desktop/CarND-Capstone-master/imgs/traffic_lights/first.jpg", images[0]) for k in range(10): plt.imshow(images[k]) plt.savefig("/home/student/Desktop/CarND-Capstone-master/imgs/traffic_lights/"+str(k)+"_train.jpg") print("images read") #X_train = np.array([img for img in images]) X_train = np.array(images) y_train = np.array(labelsonehot) #print(y_train) print(np.shape(X_train)) print(np.shape(y_train)) model = Sequential() #model.add(Conv2D(32, kernel_size=(3,3), activation='relu', input_shape=(600, 800, 3))) model.add(Conv2D(32, kernel_size=(3,3), activation='relu', input_shape=(60, 80, 3))) model.add(MaxPooling2D(pool_size=(2,2))) #model.add(Dropout(0.5)) model.add(Dropout(0.75)) model.add(Activation('relu')) model.add(Flatten()) model.add(Dense(128)) model.add(Activation('relu')) model.add(Dense(64)) model.add(Activation('relu')) model.add(Dense(32)) model.add(Activation('relu')) model.add(Dense(16)) model.add(Activation('relu')) model.add(Dense(4)) model.add(Activation('softmax')) print("model constructed") #cw = 10. #class_weight = {0:cw, 1:cw, 2:cw, 3:0.5} cw = 10. class_weight = {0:cw, 1:2., 2:8., 3:0.5} #cw = 8. #class_weight = {0:cw, 1:cw, 2:cw, 3:1.} print("cw: "+str(cw)) cb = EarlyStopping(monitor='val_acc', patience=3, restore_best_weights=True, verbose=1) #opt = SGD(lr=0.01) #opt = SGD(lr=0.03) model.compile('adam', 'categorical_crossentropy', ['accuracy']) #model.compile(opt, 'categorical_crossentropy', ['accuracy']) history = model.fit(X_train, y_train, epochs=20, validation_split=0.125, verbose=2, class_weight=class_weight, callbacks=[cb]) #history = model.fit(X_train, y_train, epochs=10, validation_split=0.15, verbose=2, class_weight=class_weight) print("model fitted") model.save("/home/student/Desktop/CarND-Capstone-master/ros/src/tl_detector/light_classification/clf.h5") print("model saved") acc = model.evaluate(X_train, y_train, verbose=1) print("model evaluated") print(acc) y_pred = model.predict(X_train) matrix = metrics.confusion_matrix(y_train.argmax(axis=1), y_pred.argmax(axis=1)) print("confusion matrix generated") print(matrix)
python
import pickle import gzip import threading def dump(object, filename, protocol=0, compresslevel=1, async=False): """Saves a compressed object to disk """ def run(): file = gzip.GzipFile(filename, 'wb', compresslevel=compresslevel) pickle_dump = pickle.dumps(object, protocol=protocol) file.write(pickle_dump) file.close() if async: threading.Thread(target=run).start() else: run() def load(filename): """Loads a compressed object from disk """ file = gzip.GzipFile(filename, 'rb') buffer = b'' while True: data = file.read() if data == b'': break buffer += data object = pickle.loads(buffer) file.close() return object
python
# Lint as: python3 # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""PointPillars implementation. [1] PointPillars. https://arxiv.org/abs/1812.05784 """ import enum import functools from lingvo import compat as tf from lingvo.core import base_layer from lingvo.core import layers from lingvo.core import optimizer from lingvo.core import py_utils from lingvo.tasks.car import builder_lib from lingvo.tasks.car import detection_3d_lib from lingvo.tasks.car import geometry from lingvo.tasks.car import point_detector import numpy as np def SparseToDense(grid_shape, locations, feats): """Converts a sparse representation back to the dense grid. Args: grid_shape: (nx, ny, nz). The shape of the grid. locations: [b, p, 3]. Locations of the pillars. feats: [b, p, fdims]. Extracted features for pillars. Returns: grid_feats of shape [b, nx, ny, nz * fdims]. """ nx, ny, nz = grid_shape b, p, _ = py_utils.GetShape(locations, 3) feats = py_utils.HasShape(feats, [b, p, -1]) _, _, fdims = py_utils.GetShape(feats, 3) indices = tf.concat( [tf.tile(tf.range(b)[:, tf.newaxis, tf.newaxis], [1, p, 1]), locations], axis=2) grid = tf.scatter_nd(indices, feats, [b, nx, ny, nz, fdims]) return tf.reshape(grid, [b, nx, ny, nz * fdims]) class PointsToGridFeaturizer(base_layer.BaseLayer): """Layer for processing points to grid outputs.""" @classmethod def Params(cls, num_laser_features, num_output_features=64): p = super().Params() p.Define('num_laser_features', num_laser_features, 'The number of (non-xyz) laser features of the input.') builder = Builder() total_num_laser_features = 9 + num_laser_features p.Define( 'featurizer', builder.Featurizer('feat', [total_num_laser_features, num_output_features]), 'Point cloud feature extractor.') return p def __init__(self, params): super().__init__(params) p = self.params self.CreateChild('featurizer', p.featurizer) def FProp(self, theta, input_batch): # pyformat: disable """Compute features for the pillars and convert them back to a dense grid. Args: theta: A `.NestedMap` object containing variable values of this task. input_batch: A `.NestedMap` object containing input tensors. Following keys are required: - grid_num_points: Integer tensor with shape [batch size, nx, ny, nz], where nx, ny, nz corresponds to the grid sizes (i.e., number of voxels in each axis dimension). - pillar_points: Float tensor with shape [batch size, num_pillars, num_points_per_pillar, 3 + num_laser_features] - pillar_centers: Float tensor with shape [batch size, num_pillars, num_points_per_pillar, 3] - pillar_locations: Float tensor with shape [batch size, num_pillars, 3] Returns: The dense features with shape [b, nx, ny, nz * fdims]. """ # pyformat: enable p = self.params bs, nx, ny, nz = py_utils.GetShape(input_batch.grid_num_points, 4) # Process points to concatenate a set of fixed features (e.g., # add means, centers, normalize points to means). num_features = 3 + p.num_laser_features pillar_points = py_utils.HasShape(input_batch.pillar_points, [bs, -1, -1, num_features]) _, npillars, npoints, _ = py_utils.GetShape(pillar_points, 4) pillar_xyz = pillar_points[..., :3] # Compute number of points per pillar and prepare for broadcasting. pillar_num_points = tf.gather_nd( input_batch.grid_num_points, input_batch.pillar_locations, batch_dims=1) pillar_num_points = pillar_num_points[..., tf.newaxis, tf.newaxis] # Compute mean by computing sum and dividing by number of points. Clip the # denominator by 1.0 to gracefully handle empty pillars. pillar_sum = tf.reduce_sum(pillar_xyz, axis=2, keepdims=True) pillar_means = pillar_sum / tf.maximum( tf.cast(pillar_num_points, tf.float32), 1.0) pillar_feats = pillar_points[..., 3:] pillar_centers = py_utils.HasShape(input_batch.pillar_centers, [bs, -1, 1, 3]) pillar_concat = tf.concat( axis=3, values=[ pillar_xyz - pillar_means, pillar_feats, tf.tile(pillar_means, [1, 1, npoints, 1]), tf.tile(pillar_centers, [1, 1, npoints, 1]) ]) # Featurize pillars. pillar_features = self.featurizer.FProp(theta.featurizer, pillar_concat) # Convert back to the dense grid. pillar_locations = py_utils.HasShape(input_batch.pillar_locations, [bs, npillars, 3]) dense_features = SparseToDense( grid_shape=(nx, ny, nz), locations=pillar_locations, feats=pillar_features) return dense_features # pyformat: disable class Builder(builder_lib.ModelBuilderBase): """Builder for the Pillars model.""" def __init__(self): super().__init__() self.conv_init_method = builder_lib.KaimingUniformFanInRelu self.linear_params_init = py_utils.WeightInit.KaimingUniformFanInRelu() self.bn_params_init = py_utils.WeightInit.UniformPositive() def Featurizer(self, name, dims): return self._Seq( name, self._MLP('mlp', dims), self._Max('max')) def _Deconv(self, name, filter_shape, stride): return layers.DeconvLayer.Params().Set( name=name, filter_shape=filter_shape, filter_stride=(stride, stride)) def _Block(self, name, stride, repeats, idims, odims, activation=None): """[1]. Sec 2.2.""" return self._Seq( name, self._Conv('c3x3', (3, 3, idims, odims), stride, activation=activation), self._Rep( 'rep', repeats, self._Conv('c3x3', (3, 3, odims, odims), activation=activation)), self._Fetch('final')) def _TopDown(self, name, strides=(2, 2, 2), channel_multiplier=1, activation=None): """[1]. Sec 2.2.""" if len(strides) != 3: raise ValueError('`strides` expected to be list/tuple of len 3.') return self._Seq( name, self._Block('b0', strides[0], 3, channel_multiplier * 64, channel_multiplier * 64, activation), self._Block('b1', strides[1], 5, channel_multiplier * 64, channel_multiplier * 128, activation), self._Block('b2', strides[2], 5, channel_multiplier * 128, channel_multiplier * 256, activation)) def _Upsample(self, name, stride, idims, odims, activation=None): """[1]. Sec 2.2.""" # Match the kernel size to the stride in order to ensure that the output # activation map has no holes and to minimize any checkerboard artifacts. # TODO(shlens): Consider replacing this in the future with a bilinear # interpolation followed by a 3x3 convolution. kernel = stride return self._Seq( name, self._Deconv('deconv', (kernel, kernel, odims, idims), stride), self._BN('bn', odims), self._Activation('activation', activation)) def Contract(self, down_strides=(2, 2, 2), channel_multiplier=1, activation=None): """Contracting part of [1] Sec 2.2.""" return self._Branch( 'branch', self._TopDown('topdown', strides=down_strides, channel_multiplier=channel_multiplier, activation=activation), ['b1.final', 'b0.final']) def Expand(self, odims, channel_multiplier=1, activation=None): """Expanding part of [1] Sec 2.2.""" # Note that the resulting output will be 3*odims return self._Concat( 'concat', self._Seq( 'b2', self._ArgIdx('idx', [0]), self._Upsample('ups', 4, channel_multiplier * 256, odims, activation)), self._Seq( 'b1', self._ArgIdx('idx', [1]), self._Upsample('ups', 2, channel_multiplier * 128, odims, activation)), self._Seq( 'b0', self._ArgIdx('idx', [2]), self._Upsample('ups', 1, channel_multiplier * 64, odims, activation))) def Backbone(self, odims, down_strides=(2, 2, 2), channel_multiplier=1, activation=None): """[1]. Sec 2.2.""" # We assume (H, W) are multiple of 8. So that we can concat # multiple-scale feature maps together after upsample. return self._Seq( 'backbone', self.Contract(down_strides, channel_multiplier=channel_multiplier, activation=activation), self.Expand(odims, channel_multiplier=channel_multiplier, activation=activation)) def Detector(self, name, idims, odims, conv_init_method=None, bias_params_init=None): # Implemented according to VoxelNet # https://arxiv.org/pdf/1711.06396.pdf # May add more Conv2D layers before predictor for better performance. return self._Seq( name, self._ConvPlain('predict', (3, 3, idims, odims), conv_init_method=conv_init_method), self._Bias('predict_bias', odims, bias_params_init)) # pyformat: enable class LossNormType(enum.Enum): NO_NORM = 0 NORM_BY_NUM_POSITIVES = 1 class ModelV1(point_detector.PointDetectorBase): """PointPillars model. Base class implements common Decoder functions, though they can be overridden if desired. """ NUM_OUTPUT_CHANNELS = 128 @classmethod def Params(cls, grid_size_z=1, num_anchors=2, num_classes=1, num_laser_features=1): p = super().Params(num_classes=num_classes) p.Define('grid_size_z', grid_size_z, 'The grid size along the z-axis.') p.Define('num_anchors', num_anchors, 'The number of anchor boxes.') p.Define('num_laser_features', num_laser_features, 'The number of (non-xyz) laser features of the input.') p.Define('input_featurizer', PointsToGridFeaturizer.Params(num_laser_features), 'Point cloud feature extractor.') builder = Builder() p.Define('backbone', builder.Backbone(cls.NUM_OUTPUT_CHANNELS), 'Dense features pyramid.') # Backbone() concatenates 3 different scales of features. idims = 3 * cls.NUM_OUTPUT_CHANNELS # 7: predicted (dx, dy, dz, dw, dl, dh, dt). class_odims = grid_size_z * num_anchors * num_classes reg_odims = grid_size_z * num_anchors * 7 rot_odims = grid_size_z * num_anchors * 2 # Although theoretically a single conv layer can generate both the # regression and classification logits, we try to implement the paper # faithfully, which uses two different layers. p.Define('class_detector', builder.Detector('class', idims, class_odims), 'Dense features to class logits.') p.Define('regression_detector', builder.Detector('reg', idims, reg_odims), 'Dense features to regression logits.') p.Define('direction_classifier', builder.Detector('dir', idims, rot_odims), 'Dense features to rotation direction classifier.') # We disable the direction classifier by default since it has # weird discontinous optimization objectives around the threshold # and it doesn't improve mAP. p.Define( 'direction_classifier_weight', 0.0, 'If > 0, adds a direction classifier to the model and adds ' 'to the total loss with this weight.') p.Define( 'direction_aware_rot_loss', False, 'If True, changes the heading loss ' 'from sin(theta_delta) to WrapAngleRad(theta_delta), which makes the ' 'model produce headings between [-pi to pi].') p.Define( 'squash_rotation_predictions', False, 'Apply tanh squashing to rotation predictions to ensure outputs ' 'are between (-pi, pi).') p.Define('focal_loss_alpha', 0.25, 'The alpha parameter in focal loss ' '(see paper eq. 4).') p.Define('focal_loss_gamma', 2.0, 'The gamma parameter in focal loss ' '(see paper eq. 4).') p.Define( 'localization_loss_weight', 2.0, 'Localization loss weight factor between localization and ' 'class loss contributions.') p.Define( 'classification_loss_weight', 1.0, 'Classification loss weight factor between localization and ' 'class loss contributions.') p.Define( 'location_loss_weight', 1.0, 'Weight multiplier for contribution of location loss ' 'to full localization/regression loss') p.Define( 'dimension_loss_weight', 1.0, 'Weight multiplier for contribution of dimension loss ' 'to full localization/regression loss') p.Define( 'rotation_loss_weight', 1.0, 'Weight multiplier for contribution of rotation loss ' 'to full localization/regression loss') p.Define('loss_norm_type', LossNormType.NORM_BY_NUM_POSITIVES, 'Normalization function for class and regularization weights.') p.Define('oracle_location', False, 'If true, the model predicts the ground truth for location.') p.Define('oracle_dimension', False, 'If true, the model predicts the ground truth for dimension.') p.Define('oracle_rotation', False, 'If true, the model predicts the ground truth for rotation.') tp = p.train tp.learning_rate = 0.001 tp.optimizer = optimizer.Momentum.Params().Set(alpha=0.9) return p def __init__(self, params): super().__init__(params) p = self.params self._utils = detection_3d_lib.Utils3D() self.CreateChild('input_featurizer', p.input_featurizer) self.CreateChild('backbone', p.backbone) self.CreateChild('class_detector', p.class_detector) self.CreateChild('regression_detector', p.regression_detector) if p.direction_classifier_weight > 0.0: self.CreateChild('direction_classifier', p.direction_classifier) def ComputePredictions(self, theta, input_batch): """Computes predictions for `input_batch`. Args: theta: A `.NestedMap` object containing variable values of this task. input_batch: A `.NestedMap` object containing input tensors to this tower. Returns: A `.NestedMap` contains logits - [b, nx, ny, nz, na, 7 + num_classes] """ p = self.params input_batch.Transform(lambda x: (x.shape, x.shape.num_elements())).VLog( 0, 'input_batch shapes: ') # Make pillars representation from input_batch. dense_features = self.input_featurizer.FProp(theta.input_featurizer, input_batch) # Backbone tf.logging.vlog(1, 'dense_features.shape = %s', dense_features.shape) act = self.backbone.FProp(theta.backbone, dense_features) tf.logging.vlog(1, 'act.shape = %s', act.shape) # Convert the output of the backbone into class logits and regression # residuals using two different layers. class_detection = self.class_detector.FProp(theta.class_detector, act) reg_detection = self.regression_detector.FProp(theta.regression_detector, act) bs, nx, ny, _ = py_utils.GetShape(class_detection, 4) predicted_classification_logits = tf.reshape( class_detection, [bs, nx, ny, p.grid_size_z, p.num_anchors, p.num_classes]) predicted_residuals = tf.reshape( reg_detection, [bs, nx, ny, p.grid_size_z, p.num_anchors, 7]) if p.squash_rotation_predictions: predicted_rotations = predicted_residuals[..., 6:] predicted_rotations = np.pi * tf.tanh(predicted_rotations) predicted_residuals = tf.concat( [predicted_residuals[..., :6], predicted_rotations], axis=-1) if p.oracle_location or p.oracle_dimension or p.oracle_rotation: gt_residuals = py_utils.HasShape( input_batch.anchor_localization_residuals, [bs, nx, ny, p.grid_size_z, p.num_anchors, 7]) # Replace the predicted components with the ground truth if needed. if p.oracle_location: location = gt_residuals[..., 0:3] else: location = predicted_residuals[..., 0:3] if p.oracle_dimension: dimension = gt_residuals[..., 3:6] else: dimension = predicted_residuals[..., 3:6] if p.oracle_rotation: rotation = gt_residuals[..., 6:] else: rotation = predicted_residuals[..., 6:] predicted_residuals = tf.concat([location, dimension, rotation], axis=-1) ret = py_utils.NestedMap({ 'residuals': predicted_residuals, 'classification_logits': predicted_classification_logits, }) if p.direction_classifier_weight > 0.0: predicted_dir = self.direction_classifier.FProp( theta.direction_classifier, act) predicted_dir = tf.reshape(predicted_dir, [bs, nx, ny, p.grid_size_z, p.num_anchors, 2]) ret.predicted_dir = predicted_dir return ret def _ComputeClassificationLoss(self, predictions, input_batch, class_weights): """Compute classification loss for the given predictions. Args: predictions: The output of `ComputePredictions`, contains: logits - [b, nx, ny, nz, na, 7 + num_classes]. na is the number of anchor boxes per cell. [..., :7] are (dx, dy, dz, dw, dl, dh, dt). input_batch: The input batch from which we accesses the groundtruth. class_weights: Per-class weights to use in loss computation. Returns: Classification loss. """ p = self.params predicted_class_logits = py_utils.HasShape( predictions.classification_logits, [-1, -1, -1, -1, p.num_anchors, p.num_classes]) bs, nx, ny, nz, na, _ = py_utils.GetShape(predicted_class_logits, 6) assigned_gt_labels = py_utils.HasShape(input_batch.assigned_gt_labels, [bs, nx, ny, nz, na]) class_loss = py_utils.SigmoidCrossEntropyFocalLoss( logits=predicted_class_logits, labels=tf.one_hot(assigned_gt_labels, p.num_classes), alpha=p.focal_loss_alpha, gamma=p.focal_loss_gamma) class_loss *= class_weights[..., tf.newaxis] class_loss_sum = tf.reduce_sum(class_loss) return class_loss_sum def ComputeLoss(self, theta, predictions, input_batch): """Computes loss and other metrics for the given predictions. Args: theta: A `.NestedMap` object containing variable values of this task. predictions: The output of `ComputePredictions`, contains: logits - [b, nx, ny, nz, na, 7 + num_classes]. na is the number of anchor boxes per cell. [..., :7] are (dx, dy, dz, dw, dl, dh, dt). input_batch: The input batch from which we accesses the groundtruth. Returns: Two dicts defined as BaseTask.ComputeLoss. """ p = self.params predicted_residuals = py_utils.HasShape(predictions.residuals, [-1, -1, -1, -1, p.num_anchors, 7]) predicted_class_logits = py_utils.HasShape( predictions.classification_logits, [-1, -1, -1, -1, p.num_anchors, p.num_classes]) bs, nx, ny, nz, na, _ = py_utils.GetShape(predicted_class_logits, 6) # Compute class and regression weights. class_weights = input_batch.assigned_cls_mask class_weights = py_utils.HasShape(class_weights, [bs, nx, ny, nz, na]) reg_weights = input_batch.assigned_reg_mask reg_weights = py_utils.HasShape(reg_weights, [bs, nx, ny, nz, na]) reg_weights = tf.expand_dims(reg_weights, -1) if p.loss_norm_type == LossNormType.NORM_BY_NUM_POSITIVES: # Compute number of positive anchors per example. foreground_mask = py_utils.HasShape(input_batch.assigned_reg_mask, [bs, nx, ny, nz, na]) # Sum to get the number of foreground anchors for each example. loss_normalization = tf.reduce_sum(foreground_mask, axis=[1, 2, 3, 4]) loss_normalization = tf.maximum(loss_normalization, tf.ones_like(loss_normalization)) # Reshape for broadcasting. loss_normalization = tf.reshape(loss_normalization, [bs, 1, 1, 1, 1, 1]) class_weights /= loss_normalization reg_weights /= loss_normalization # Classification loss. class_loss_sum = self._ComputeClassificationLoss(predictions, input_batch, class_weights) # Regression loss. anchor_localization_residuals = py_utils.HasShape( input_batch.anchor_localization_residuals, [bs, nx, ny, nz, na, 7]) # Location and dimensions loss. reg_loc_and_dims_loss = self._utils.ScaledHuberLoss( predictions=py_utils.HasShape(predicted_residuals[..., :6], [bs, nx, ny, nz, na, 6]), labels=anchor_localization_residuals[..., :6], delta=1 / (3.**2)) # Rotation loss is computed on a transform on rot_delta. For a direction # aware loss, we simply wrap the angles to -pi to pi; for a loss that is # symmetric to direction (i.e., rotating by pi), we use a sin transform. rot_delta_transform = tf.sin if p.direction_aware_rot_loss: rot_delta_transform = functools.partial( geometry.WrapAngleRad, min_val=-np.pi, max_val=np.pi) rot_delta = ( predicted_residuals[..., 6:] - anchor_localization_residuals[..., 6:]) reg_rot_loss = self._utils.ScaledHuberLoss( predictions=rot_delta_transform(rot_delta), labels=tf.zeros_like(rot_delta), delta=1 / (3.**2)) # Direction loss if p.direction_classifier_weight > 0.0: # The target rotations are in the assigned_gt_bbox tensor, # which already has assigned a gt bounding box to every anchor. rot_target = input_batch.assigned_gt_bbox[..., 6] # If rotation is > 0, the class is 1, else it is 0. rot_dir = tf.cast(rot_target > 0., tf.int32) # Compute one-hot labels as a target. rot_dir_onehot = tf.one_hot(rot_dir, 2) # Manually handle loss reduction. dir_loss = tf.losses.softmax_cross_entropy( onehot_labels=rot_dir_onehot, logits=predictions.predicted_dir, weights=tf.squeeze(reg_weights, axis=-1), reduction=tf.losses.Reduction.NONE) # Reduce across all dimensions (we'll divide by the batch size below). dir_loss_sum = tf.reduce_sum(dir_loss) else: dir_loss_sum = 0.0 # Compute loss contribution from location and dimension separately. reg_loc_loss = reg_loc_and_dims_loss[..., :3] * reg_weights reg_loc_loss_sum = tf.reduce_sum(reg_loc_loss) reg_dim_loss = reg_loc_and_dims_loss[..., 3:6] * reg_weights reg_dim_loss_sum = tf.reduce_sum(reg_dim_loss) # Compute rotation loss contribution. reg_rot_loss *= reg_weights reg_rot_loss_sum = tf.reduce_sum(reg_rot_loss) # Num. predictions. # TODO(zhifengc): Consider other normalization factors. E.g., # of bboxes. preds = tf.cast(bs, class_loss_sum.dtype) # Normalize all of the components by batch size. reg_loc_loss = reg_loc_loss_sum / preds reg_dim_loss = reg_dim_loss_sum / preds reg_rot_loss = reg_rot_loss_sum / preds class_loss = class_loss_sum / preds dir_loss = dir_loss_sum / preds # Compute total localization regression loss. reg_loss = ( p.location_loss_weight * reg_loc_loss + p.dimension_loss_weight * reg_dim_loss + p.rotation_loss_weight * reg_rot_loss) # Apply weights to normalized class losses. loss = ( class_loss * p.classification_loss_weight + reg_loss * p.localization_loss_weight + dir_loss * p.direction_classifier_weight) metrics_dict = { 'loss': (loss, preds), 'loss/class': (class_loss, preds), 'loss/reg': (reg_loss, preds), 'loss/reg/rot': (reg_rot_loss, preds), 'loss/reg/loc': (reg_loc_loss, preds), 'loss/reg/dim': (reg_dim_loss, preds), 'loss/dir': (dir_loss, preds), } # Calculate dimension errors min_angle_rad = -np.pi if p.direction_aware_rot_loss else 0 gt_bboxes = self._utils_3d.ResidualsToBBoxes( input_batch.anchor_bboxes, anchor_localization_residuals, min_angle_rad=min_angle_rad, max_angle_rad=np.pi) predicted_bboxes = self._utils_3d.ResidualsToBBoxes( input_batch.anchor_bboxes, predicted_residuals, min_angle_rad=min_angle_rad, max_angle_rad=np.pi) dimension_errors_dict = self._BBoxDimensionErrors(gt_bboxes, predicted_bboxes, reg_weights) metrics_dict.update(dimension_errors_dict) per_example_dict = { 'residuals': predicted_residuals, 'classification_logits': predicted_class_logits, } return metrics_dict, per_example_dict def _BBoxesAndLogits(self, input_batch, predictions): """Decode an input batch, computing predicted bboxes from residuals.""" p = self.params # Decode residuals. min_angle_rad = -np.pi if p.direction_aware_rot_loss else 0 predicted_bboxes = self._utils.ResidualsToBBoxes( input_batch.anchor_bboxes, predictions.residuals, min_angle_rad=min_angle_rad, max_angle_rad=np.pi) # predicted_bboxes is a [batch, nx, ny, nz, na, 7] Tensor. batch_size, nx, ny, nz, na, _ = py_utils.GetShape(predicted_bboxes, 6) num_boxes = nx * ny * nz * na # Reshape to [batch_size, num_boxes, 7] predicted_bboxes = tf.reshape(predicted_bboxes, [batch_size, num_boxes, 7]) classification_logits = tf.reshape(predictions.classification_logits, [batch_size, num_boxes, -1]) return py_utils.NestedMap({ 'predicted_bboxes': predicted_bboxes, 'classification_logits': classification_logits })
python
"""Controller for ingest and parsing of character files""" import logging import re from configparser import ConfigParser from pathlib import Path class CharfileIngest: HEADER_PATTERN = r"\bLocation\sName\sID\sCount\sSlots\b" ROW_PATTERN = r"^.*?\s.*?\s[0-9]*?\s[0-9]*?\s[0-9]*?$" def __init__(self, config: ConfigParser) -> None: self.log = logging.getLogger(__name__) self.config = config self.filepath = Path(config["CHARACTERS"]["file_path"]).absolute() self._charfile: dict[str, str] = {"filename": "", "content": ""} def process_webform(self, webform_content: str) -> dict[str, str]: """Returns filename:content on success, empty dict on failure""" filename = self.extract_filename(webform_content) content = self.extract_content(webform_content) charfile = {"filename": filename, "content": content} self._charfile = charfile return self._charfile.copy() if filename and content else {"error": "Invalid"} def extract_filename(self, webform_content: str) -> str: """Extract filename from webform, returns empty string on failure""" result = re.search(r'filename="(.*?)"', webform_content) return self._rpl_spaces(result.group(1)) if result is not None else "" def extract_content(self, webform_content: str) -> str: """Extract file body from webform, returns empty string on failure""" headers = re.findall(self.HEADER_PATTERN, webform_content) rows: list[str] = [] for line in webform_content.split("\n"): if re.match(self.ROW_PATTERN, line): rows.append(line) if not headers or not rows: return "" rows.insert(0, headers[0]) return "\n".join(rows) def save_to_file(self) -> bool: """Saves loaded charfile(s) to disk""" try: with open(self.filepath / self._charfile["filename"], "w") as outfile: outfile.write(self._charfile["content"]) except OSError as err: self.log.error("Failed to save '%s' : %s", self._charfile["filename"], err) return False return True @staticmethod def _rpl_spaces(string: str) -> str: """Replaces spaces with underscores""" string = re.sub(r"\s", "_", string.strip()) return re.sub(r"_-_", "-", string)
python
#!/usr/bin/env python # Copyright 2021 Google LLC All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Google Analytics Admin API sample application which prints the iOS app data stream details. See https://developers.google.com/analytics/devguides/config/admin/v1/rest/v1alpha/properties.iosAppDataStreams/get for more information. """ # [START analyticsadmin_properties_ios_app_data_streams_get] from google.analytics.admin import AnalyticsAdminServiceClient def run_sample(): """Runs the sample.""" # TODO(developer): Replace this variable with your Google Analytics 4 # property ID (e.g. "123456") before running the sample. property_id = "YOUR-GA4-PROPERTY-ID" # TODO(developer): Replace this variable with your iOS app data stream ID # (e.g. "123456") before running the sample. stream_id = "YOUR-IOS-APP-DATA-STREAM-ID" get_ios_app_data_stream(property_id, stream_id) def get_ios_app_data_stream(property_id, stream_id): """Retrieves the details for the iOS app data stream.""" client = AnalyticsAdminServiceClient() ios_app_data_stream = client.get_ios_app_data_stream( name=f"properties/{property_id}/iosAppDataStreams/{stream_id}" ) print("Result:") print_ios_app_data_stream(ios_app_data_stream) def print_ios_app_data_stream(ios_app_data_stream): """Prints the iOS app data stream details.""" print(f"Resource name: {ios_app_data_stream.name}") print(f"Display name: {ios_app_data_stream.display_name}") print(f"Firebase app ID: {ios_app_data_stream.firebase_app_id}") print(f"Bundle ID: {ios_app_data_stream.bundle_id}") print(f"Create time: {ios_app_data_stream.create_time}") print(f"Update time: {ios_app_data_stream.update_time}") # [END analyticsadmin_properties_ios_app_data_streams_get] if __name__ == "__main__": run_sample()
python
import glob import os import pytest import motor.motor_asyncio as motor # We can either be on the host or in the docker-compose network def pytest_addoption(parser): parser.addoption( "--in-docker-compose", action="store", default="", help="Assume inside a docker network", ) @pytest.fixture(scope="session") def in_docker_compose(request): """ Gets command line argument `--in-docker-compose` """ return request.config.getoption("--in-docker-compose") @pytest.fixture(scope="session") def docker_compose_files(in_docker_compose, pytestconfig): """ This fixture provides support for `cloudbuild`. By passing the command line argument `--in-docker-compose=cloudbuild`, uses `docker-compose.cloudbuild.yml`. """ dc_type = f".{in_docker_compose}" if in_docker_compose else "" dc_file = f"docker-compose{dc_type}.yml" return [os.path.join(os.path.dirname(__file__), dc_file)] def make_url(host: str, port: int) -> str: return f"mongodb://{host}:{port}/" def wait_for_db(host: str, port: int) -> bool: url = make_url(host=host, port=port) try: motor.AsyncIOMotorClient(url) return True except Exception: return False @pytest.fixture(scope="function") def db_mongodb(in_docker_compose, docker_services): """ Provided is the `db` fixture which gives you an `motor` test database instance for mongodb:: @pytest.fixture def db_with_schema(db_mongodb): fill_database(db_mongodb) return db """ docker_services.start("db") if in_docker_compose: port = 27017 # Ugly but lovely-pytest-docker throws unnecessary exceptions docker_services.wait_until_responsive( timeout=30.0, pause=0.1, check=lambda: wait_for_db("db", port) ) else: port = docker_services.wait_for_service("db", 27017, check_server=wait_for_db) host = "localhost" if not in_docker_compose else "db" url = make_url(host=host, port=port) client = motor.AsyncIOMotorClient(url) # yield a test database yield client['test'] client.drop_database('test')
python