file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
udp-multicast.rs
|
use std::{env, str};
use std::net::{UdpSocket, Ipv4Addr};
fn main() {
let mcast_group: Ipv4Addr = "239.0.0.1".parse().unwrap();
let port: u16 = 6000;
let any = "0.0.0.0".parse().unwrap();
let mut buffer = [0u8; 1600];
if env::args().count() > 1
|
else {
let socket = UdpSocket::bind((any, 0)).expect("Could not bind socket");
socket.send_to("Hello world!".as_bytes(), &(mcast_group, port)).expect("Failed to write data");
}
}
|
{
let socket = UdpSocket::bind((any, port)).expect("Could not bind client socket");
socket.join_multicast_v4(&mcast_group, &any).expect("Could not join multicast group");
socket.recv_from(&mut buffer).expect("Failed to write to server");
print!("{}", str::from_utf8(&buffer).expect("Could not write buffer as string"));
}
|
conditional_block
|
udp-multicast.rs
|
use std::{env, str};
use std::net::{UdpSocket, Ipv4Addr};
fn main()
|
{
let mcast_group: Ipv4Addr = "239.0.0.1".parse().unwrap();
let port: u16 = 6000;
let any = "0.0.0.0".parse().unwrap();
let mut buffer = [0u8; 1600];
if env::args().count() > 1 {
let socket = UdpSocket::bind((any, port)).expect("Could not bind client socket");
socket.join_multicast_v4(&mcast_group, &any).expect("Could not join multicast group");
socket.recv_from(&mut buffer).expect("Failed to write to server");
print!("{}", str::from_utf8(&buffer).expect("Could not write buffer as string"));
} else {
let socket = UdpSocket::bind((any, 0)).expect("Could not bind socket");
socket.send_to("Hello world!".as_bytes(), &(mcast_group, port)).expect("Failed to write data");
}
}
|
identifier_body
|
|
bgp.py
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utilities related to bgp data types and models.
"""
import logging
import socket
from ryu.lib.packet.bgp import (
BGPUpdate,
RF_IPv4_UC,
RF_IPv6_UC,
RF_IPv4_VPN,
RF_IPv6_VPN,
RF_L2_EVPN,
RF_RTC_UC,
RouteTargetMembershipNLRI,
BGP_ATTR_TYPE_MULTI_EXIT_DISC,
BGPPathAttributeMultiExitDisc,
BGPPathAttributeMpUnreachNLRI,
BGPPathAttributeAs4Path,
BGPPathAttributeAs4Aggregator,
BGPPathAttributeUnknown,
BGP_ATTR_FLAG_OPTIONAL,
BGP_ATTR_FLAG_TRANSITIVE,
)
from ryu.services.protocols.bgp.info_base.rtc import RtcPath
from ryu.services.protocols.bgp.info_base.ipv4 import Ipv4Path
from ryu.services.protocols.bgp.info_base.ipv6 import Ipv6Path
from ryu.services.protocols.bgp.info_base.vpnv4 import Vpnv4Path
from ryu.services.protocols.bgp.info_base.vpnv6 import Vpnv6Path
from ryu.services.protocols.bgp.info_base.evpn import EvpnPath
LOG = logging.getLogger('utils.bgp')
# RouteFmaily to path sub-class mapping.
_ROUTE_FAMILY_TO_PATH_MAP = {RF_IPv4_UC: Ipv4Path,
RF_IPv6_UC: Ipv6Path,
RF_IPv4_VPN: Vpnv4Path,
RF_IPv6_VPN: Vpnv6Path,
RF_L2_EVPN: EvpnPath,
RF_RTC_UC: RtcPath}
def create_path(src_peer, nlri, **kwargs):
route_family = nlri.ROUTE_FAMILY
assert route_family in _ROUTE_FAMILY_TO_PATH_MAP.keys()
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
return path_cls(src_peer, nlri, src_peer.version_num, **kwargs)
def clone_path_and_update_med_for_target_neighbor(path, med):
assert path and med
route_family = path.route_family
if route_family not in _ROUTE_FAMILY_TO_PATH_MAP.keys():
raise ValueError('Clone is not supported for address-family %s' %
route_family)
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
pattrs = path.pathattr_map
pattrs[BGP_ATTR_TYPE_MULTI_EXIT_DISC] = BGPPathAttributeMultiExitDisc(med)
return path_cls(
path.source, path.nlri, path.source_version_num,
pattrs=pattrs, nexthop=path.nexthop,
is_withdraw=path.is_withdraw,
med_set_by_target_neighbor=True
)
def clone_rtcpath_update_rt_as(path, new_rt_as):
"""Clones given RT NLRI `path`, and updates it with new RT_NLRI AS.
Parameters:
- `path`: (Path) RT_NLRI path
- `new_rt_as`: AS value of cloned paths' RT_NLRI
"""
assert path and new_rt_as
if not path or path.route_family != RF_RTC_UC:
raise ValueError('Expected RT_NLRI path')
old_nlri = path.nlri
new_rt_nlri = RouteTargetMembershipNLRI(new_rt_as, old_nlri.route_target)
return RtcPath(path.source, new_rt_nlri, path.source_version_num,
pattrs=path.pathattr_map, nexthop=path.nexthop,
|
def from_inet_ptoi(bgp_id):
"""Convert an IPv4 address string format to a four byte long.
"""
four_byte_id = None
try:
packed_byte = socket.inet_pton(socket.AF_INET, bgp_id)
four_byte_id = int(packed_byte.encode('hex'), 16)
except ValueError:
LOG.debug('Invalid bgp id given for conversion to integer value %s',
bgp_id)
return four_byte_id
def get_unknown_opttrans_attr(path):
"""Utility method that gives a `dict` of unknown and unsupported optional
transitive path attributes of `path`.
Returns dict: <key> - attribute type code, <value> - unknown path-attr.
"""
path_attrs = path.pathattr_map
unknown_opt_tran_attrs = {}
for _, attr in path_attrs.items():
if (isinstance(attr, BGPPathAttributeUnknown) and
attr.flags & (BGP_ATTR_FLAG_OPTIONAL |
BGP_ATTR_FLAG_TRANSITIVE)) or \
isinstance(attr, BGPPathAttributeAs4Path) or \
isinstance(attr, BGPPathAttributeAs4Aggregator):
unknown_opt_tran_attrs[attr.type] = attr
return unknown_opt_tran_attrs
def create_end_of_rib_update():
"""Construct end-of-rib (EOR) Update instance."""
mpunreach_attr = BGPPathAttributeMpUnreachNLRI(RF_IPv4_VPN.afi,
RF_IPv4_VPN.safi,
[])
eor = BGPUpdate(path_attributes=[mpunreach_attr])
return eor
# Bgp update message instance that can used as End of RIB marker.
UPDATE_EOR = create_end_of_rib_update()
|
is_withdraw=path.is_withdraw)
|
random_line_split
|
bgp.py
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utilities related to bgp data types and models.
"""
import logging
import socket
from ryu.lib.packet.bgp import (
BGPUpdate,
RF_IPv4_UC,
RF_IPv6_UC,
RF_IPv4_VPN,
RF_IPv6_VPN,
RF_L2_EVPN,
RF_RTC_UC,
RouteTargetMembershipNLRI,
BGP_ATTR_TYPE_MULTI_EXIT_DISC,
BGPPathAttributeMultiExitDisc,
BGPPathAttributeMpUnreachNLRI,
BGPPathAttributeAs4Path,
BGPPathAttributeAs4Aggregator,
BGPPathAttributeUnknown,
BGP_ATTR_FLAG_OPTIONAL,
BGP_ATTR_FLAG_TRANSITIVE,
)
from ryu.services.protocols.bgp.info_base.rtc import RtcPath
from ryu.services.protocols.bgp.info_base.ipv4 import Ipv4Path
from ryu.services.protocols.bgp.info_base.ipv6 import Ipv6Path
from ryu.services.protocols.bgp.info_base.vpnv4 import Vpnv4Path
from ryu.services.protocols.bgp.info_base.vpnv6 import Vpnv6Path
from ryu.services.protocols.bgp.info_base.evpn import EvpnPath
LOG = logging.getLogger('utils.bgp')
# RouteFmaily to path sub-class mapping.
_ROUTE_FAMILY_TO_PATH_MAP = {RF_IPv4_UC: Ipv4Path,
RF_IPv6_UC: Ipv6Path,
RF_IPv4_VPN: Vpnv4Path,
RF_IPv6_VPN: Vpnv6Path,
RF_L2_EVPN: EvpnPath,
RF_RTC_UC: RtcPath}
def create_path(src_peer, nlri, **kwargs):
route_family = nlri.ROUTE_FAMILY
assert route_family in _ROUTE_FAMILY_TO_PATH_MAP.keys()
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
return path_cls(src_peer, nlri, src_peer.version_num, **kwargs)
def clone_path_and_update_med_for_target_neighbor(path, med):
assert path and med
route_family = path.route_family
if route_family not in _ROUTE_FAMILY_TO_PATH_MAP.keys():
raise ValueError('Clone is not supported for address-family %s' %
route_family)
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
pattrs = path.pathattr_map
pattrs[BGP_ATTR_TYPE_MULTI_EXIT_DISC] = BGPPathAttributeMultiExitDisc(med)
return path_cls(
path.source, path.nlri, path.source_version_num,
pattrs=pattrs, nexthop=path.nexthop,
is_withdraw=path.is_withdraw,
med_set_by_target_neighbor=True
)
def clone_rtcpath_update_rt_as(path, new_rt_as):
"""Clones given RT NLRI `path`, and updates it with new RT_NLRI AS.
Parameters:
- `path`: (Path) RT_NLRI path
- `new_rt_as`: AS value of cloned paths' RT_NLRI
"""
assert path and new_rt_as
if not path or path.route_family != RF_RTC_UC:
raise ValueError('Expected RT_NLRI path')
old_nlri = path.nlri
new_rt_nlri = RouteTargetMembershipNLRI(new_rt_as, old_nlri.route_target)
return RtcPath(path.source, new_rt_nlri, path.source_version_num,
pattrs=path.pathattr_map, nexthop=path.nexthop,
is_withdraw=path.is_withdraw)
def from_inet_ptoi(bgp_id):
"""Convert an IPv4 address string format to a four byte long.
"""
four_byte_id = None
try:
packed_byte = socket.inet_pton(socket.AF_INET, bgp_id)
four_byte_id = int(packed_byte.encode('hex'), 16)
except ValueError:
LOG.debug('Invalid bgp id given for conversion to integer value %s',
bgp_id)
return four_byte_id
def get_unknown_opttrans_attr(path):
"""Utility method that gives a `dict` of unknown and unsupported optional
transitive path attributes of `path`.
Returns dict: <key> - attribute type code, <value> - unknown path-attr.
"""
path_attrs = path.pathattr_map
unknown_opt_tran_attrs = {}
for _, attr in path_attrs.items():
if (isinstance(attr, BGPPathAttributeUnknown) and
attr.flags & (BGP_ATTR_FLAG_OPTIONAL |
BGP_ATTR_FLAG_TRANSITIVE)) or \
isinstance(attr, BGPPathAttributeAs4Path) or \
isinstance(attr, BGPPathAttributeAs4Aggregator):
unknown_opt_tran_attrs[attr.type] = attr
return unknown_opt_tran_attrs
def
|
():
"""Construct end-of-rib (EOR) Update instance."""
mpunreach_attr = BGPPathAttributeMpUnreachNLRI(RF_IPv4_VPN.afi,
RF_IPv4_VPN.safi,
[])
eor = BGPUpdate(path_attributes=[mpunreach_attr])
return eor
# Bgp update message instance that can used as End of RIB marker.
UPDATE_EOR = create_end_of_rib_update()
|
create_end_of_rib_update
|
identifier_name
|
bgp.py
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utilities related to bgp data types and models.
"""
import logging
import socket
from ryu.lib.packet.bgp import (
BGPUpdate,
RF_IPv4_UC,
RF_IPv6_UC,
RF_IPv4_VPN,
RF_IPv6_VPN,
RF_L2_EVPN,
RF_RTC_UC,
RouteTargetMembershipNLRI,
BGP_ATTR_TYPE_MULTI_EXIT_DISC,
BGPPathAttributeMultiExitDisc,
BGPPathAttributeMpUnreachNLRI,
BGPPathAttributeAs4Path,
BGPPathAttributeAs4Aggregator,
BGPPathAttributeUnknown,
BGP_ATTR_FLAG_OPTIONAL,
BGP_ATTR_FLAG_TRANSITIVE,
)
from ryu.services.protocols.bgp.info_base.rtc import RtcPath
from ryu.services.protocols.bgp.info_base.ipv4 import Ipv4Path
from ryu.services.protocols.bgp.info_base.ipv6 import Ipv6Path
from ryu.services.protocols.bgp.info_base.vpnv4 import Vpnv4Path
from ryu.services.protocols.bgp.info_base.vpnv6 import Vpnv6Path
from ryu.services.protocols.bgp.info_base.evpn import EvpnPath
LOG = logging.getLogger('utils.bgp')
# RouteFmaily to path sub-class mapping.
_ROUTE_FAMILY_TO_PATH_MAP = {RF_IPv4_UC: Ipv4Path,
RF_IPv6_UC: Ipv6Path,
RF_IPv4_VPN: Vpnv4Path,
RF_IPv6_VPN: Vpnv6Path,
RF_L2_EVPN: EvpnPath,
RF_RTC_UC: RtcPath}
def create_path(src_peer, nlri, **kwargs):
route_family = nlri.ROUTE_FAMILY
assert route_family in _ROUTE_FAMILY_TO_PATH_MAP.keys()
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
return path_cls(src_peer, nlri, src_peer.version_num, **kwargs)
def clone_path_and_update_med_for_target_neighbor(path, med):
|
def clone_rtcpath_update_rt_as(path, new_rt_as):
"""Clones given RT NLRI `path`, and updates it with new RT_NLRI AS.
Parameters:
- `path`: (Path) RT_NLRI path
- `new_rt_as`: AS value of cloned paths' RT_NLRI
"""
assert path and new_rt_as
if not path or path.route_family != RF_RTC_UC:
raise ValueError('Expected RT_NLRI path')
old_nlri = path.nlri
new_rt_nlri = RouteTargetMembershipNLRI(new_rt_as, old_nlri.route_target)
return RtcPath(path.source, new_rt_nlri, path.source_version_num,
pattrs=path.pathattr_map, nexthop=path.nexthop,
is_withdraw=path.is_withdraw)
def from_inet_ptoi(bgp_id):
"""Convert an IPv4 address string format to a four byte long.
"""
four_byte_id = None
try:
packed_byte = socket.inet_pton(socket.AF_INET, bgp_id)
four_byte_id = int(packed_byte.encode('hex'), 16)
except ValueError:
LOG.debug('Invalid bgp id given for conversion to integer value %s',
bgp_id)
return four_byte_id
def get_unknown_opttrans_attr(path):
"""Utility method that gives a `dict` of unknown and unsupported optional
transitive path attributes of `path`.
Returns dict: <key> - attribute type code, <value> - unknown path-attr.
"""
path_attrs = path.pathattr_map
unknown_opt_tran_attrs = {}
for _, attr in path_attrs.items():
if (isinstance(attr, BGPPathAttributeUnknown) and
attr.flags & (BGP_ATTR_FLAG_OPTIONAL |
BGP_ATTR_FLAG_TRANSITIVE)) or \
isinstance(attr, BGPPathAttributeAs4Path) or \
isinstance(attr, BGPPathAttributeAs4Aggregator):
unknown_opt_tran_attrs[attr.type] = attr
return unknown_opt_tran_attrs
def create_end_of_rib_update():
"""Construct end-of-rib (EOR) Update instance."""
mpunreach_attr = BGPPathAttributeMpUnreachNLRI(RF_IPv4_VPN.afi,
RF_IPv4_VPN.safi,
[])
eor = BGPUpdate(path_attributes=[mpunreach_attr])
return eor
# Bgp update message instance that can used as End of RIB marker.
UPDATE_EOR = create_end_of_rib_update()
|
assert path and med
route_family = path.route_family
if route_family not in _ROUTE_FAMILY_TO_PATH_MAP.keys():
raise ValueError('Clone is not supported for address-family %s' %
route_family)
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
pattrs = path.pathattr_map
pattrs[BGP_ATTR_TYPE_MULTI_EXIT_DISC] = BGPPathAttributeMultiExitDisc(med)
return path_cls(
path.source, path.nlri, path.source_version_num,
pattrs=pattrs, nexthop=path.nexthop,
is_withdraw=path.is_withdraw,
med_set_by_target_neighbor=True
)
|
identifier_body
|
bgp.py
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utilities related to bgp data types and models.
"""
import logging
import socket
from ryu.lib.packet.bgp import (
BGPUpdate,
RF_IPv4_UC,
RF_IPv6_UC,
RF_IPv4_VPN,
RF_IPv6_VPN,
RF_L2_EVPN,
RF_RTC_UC,
RouteTargetMembershipNLRI,
BGP_ATTR_TYPE_MULTI_EXIT_DISC,
BGPPathAttributeMultiExitDisc,
BGPPathAttributeMpUnreachNLRI,
BGPPathAttributeAs4Path,
BGPPathAttributeAs4Aggregator,
BGPPathAttributeUnknown,
BGP_ATTR_FLAG_OPTIONAL,
BGP_ATTR_FLAG_TRANSITIVE,
)
from ryu.services.protocols.bgp.info_base.rtc import RtcPath
from ryu.services.protocols.bgp.info_base.ipv4 import Ipv4Path
from ryu.services.protocols.bgp.info_base.ipv6 import Ipv6Path
from ryu.services.protocols.bgp.info_base.vpnv4 import Vpnv4Path
from ryu.services.protocols.bgp.info_base.vpnv6 import Vpnv6Path
from ryu.services.protocols.bgp.info_base.evpn import EvpnPath
LOG = logging.getLogger('utils.bgp')
# RouteFmaily to path sub-class mapping.
_ROUTE_FAMILY_TO_PATH_MAP = {RF_IPv4_UC: Ipv4Path,
RF_IPv6_UC: Ipv6Path,
RF_IPv4_VPN: Vpnv4Path,
RF_IPv6_VPN: Vpnv6Path,
RF_L2_EVPN: EvpnPath,
RF_RTC_UC: RtcPath}
def create_path(src_peer, nlri, **kwargs):
route_family = nlri.ROUTE_FAMILY
assert route_family in _ROUTE_FAMILY_TO_PATH_MAP.keys()
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
return path_cls(src_peer, nlri, src_peer.version_num, **kwargs)
def clone_path_and_update_med_for_target_neighbor(path, med):
assert path and med
route_family = path.route_family
if route_family not in _ROUTE_FAMILY_TO_PATH_MAP.keys():
raise ValueError('Clone is not supported for address-family %s' %
route_family)
path_cls = _ROUTE_FAMILY_TO_PATH_MAP.get(route_family)
pattrs = path.pathattr_map
pattrs[BGP_ATTR_TYPE_MULTI_EXIT_DISC] = BGPPathAttributeMultiExitDisc(med)
return path_cls(
path.source, path.nlri, path.source_version_num,
pattrs=pattrs, nexthop=path.nexthop,
is_withdraw=path.is_withdraw,
med_set_by_target_neighbor=True
)
def clone_rtcpath_update_rt_as(path, new_rt_as):
"""Clones given RT NLRI `path`, and updates it with new RT_NLRI AS.
Parameters:
- `path`: (Path) RT_NLRI path
- `new_rt_as`: AS value of cloned paths' RT_NLRI
"""
assert path and new_rt_as
if not path or path.route_family != RF_RTC_UC:
|
old_nlri = path.nlri
new_rt_nlri = RouteTargetMembershipNLRI(new_rt_as, old_nlri.route_target)
return RtcPath(path.source, new_rt_nlri, path.source_version_num,
pattrs=path.pathattr_map, nexthop=path.nexthop,
is_withdraw=path.is_withdraw)
def from_inet_ptoi(bgp_id):
"""Convert an IPv4 address string format to a four byte long.
"""
four_byte_id = None
try:
packed_byte = socket.inet_pton(socket.AF_INET, bgp_id)
four_byte_id = int(packed_byte.encode('hex'), 16)
except ValueError:
LOG.debug('Invalid bgp id given for conversion to integer value %s',
bgp_id)
return four_byte_id
def get_unknown_opttrans_attr(path):
"""Utility method that gives a `dict` of unknown and unsupported optional
transitive path attributes of `path`.
Returns dict: <key> - attribute type code, <value> - unknown path-attr.
"""
path_attrs = path.pathattr_map
unknown_opt_tran_attrs = {}
for _, attr in path_attrs.items():
if (isinstance(attr, BGPPathAttributeUnknown) and
attr.flags & (BGP_ATTR_FLAG_OPTIONAL |
BGP_ATTR_FLAG_TRANSITIVE)) or \
isinstance(attr, BGPPathAttributeAs4Path) or \
isinstance(attr, BGPPathAttributeAs4Aggregator):
unknown_opt_tran_attrs[attr.type] = attr
return unknown_opt_tran_attrs
def create_end_of_rib_update():
"""Construct end-of-rib (EOR) Update instance."""
mpunreach_attr = BGPPathAttributeMpUnreachNLRI(RF_IPv4_VPN.afi,
RF_IPv4_VPN.safi,
[])
eor = BGPUpdate(path_attributes=[mpunreach_attr])
return eor
# Bgp update message instance that can used as End of RIB marker.
UPDATE_EOR = create_end_of_rib_update()
|
raise ValueError('Expected RT_NLRI path')
|
conditional_block
|
mod.rs
|
//! Messaging primitives for discovering devices and services.
use std::io;
#[cfg(windows)]
use std::net;
use std::net::SocketAddr;
use net::connector::UdpConnector;
use net::IpVersionMode;
mod notify;
mod search;
mod ssdp;
pub mod listen;
pub mod multicast;
pub use message::multicast::Multicast;
pub use message::search::{SearchRequest, SearchResponse, SearchListener};
pub use message::notify::{NotifyMessage, NotifyListener};
pub use message::listen::Listen;
#[cfg(not(windows))]
use ifaces;
/// Multicast Socket Information
pub const UPNP_MULTICAST_IPV4_ADDR: &'static str = "239.255.255.250";
pub const UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR: &'static str = "FF02::C";
pub const UPNP_MULTICAST_PORT: u16 = 1900;
/// Default TTL For Multicast
pub const UPNP_MULTICAST_TTL: u32 = 2;
/// Enumerates different types of SSDP messages.
#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]
pub enum MessageType {
/// A notify message.
Notify,
/// A search message.
Search,
/// A response to a search message.
Response,
}
#[derive(Clone)]
pub struct Config {
pub ipv4_addr: String,
pub ipv6_addr: String,
pub port: u16,
pub ttl: u32,
pub mode: IpVersionMode,
}
impl Config {
pub fn new() -> Self {
Default::default()
}
pub fn set_ipv4_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv4_addr = value.into();
self
}
pub fn set_ipv6_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv6_addr = value.into();
self
}
pub fn set_port(mut self, value: u16) -> Self {
self.port = value;
self
}
pub fn set_ttl(mut self, value: u32) -> Self {
self.ttl = value;
self
}
pub fn set_mode(mut self, value: IpVersionMode) -> Self {
self.mode = value;
self
}
}
impl Default for Config {
fn default() -> Self {
Config {
ipv4_addr: UPNP_MULTICAST_IPV4_ADDR.to_string(),
ipv6_addr: UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR.to_string(),
port: UPNP_MULTICAST_PORT,
ttl: UPNP_MULTICAST_TTL,
mode: IpVersionMode::Any,
}
}
}
/// Generate `UdpConnector` objects for all local `IPv4` interfaces.
fn all_local_connectors(multicast_ttl: Option<u32>, filter: &IpVersionMode) -> io::Result<Vec<UdpConnector>> {
trace!("Fetching all local connectors");
map_local(|&addr| match (filter, addr) {
(&IpVersionMode::V4Only, SocketAddr::V4(n)) |
(&IpVersionMode::Any, SocketAddr::V4(n)) => {
Ok(Some(try!(UdpConnector::new((*n.ip(), 0), multicast_ttl))))
}
(&IpVersionMode::V6Only, SocketAddr::V6(n)) |
(&IpVersionMode::Any, SocketAddr::V6(n)) => Ok(Some(try!(UdpConnector::new(n, multicast_ttl)))),
_ => Ok(None),
})
}
/// Invoke the closure for every local address found on the system
///
/// This method filters out _loopback_ and _global_ addresses.
fn map_local<F, R>(mut f: F) -> io::Result<Vec<R>>
where F: FnMut(&SocketAddr) -> io::Result<Option<R>>
{
let addrs_iter = try!(get_local_addrs());
let mut obj_list = Vec::with_capacity(addrs_iter.len());
for addr in addrs_iter {
trace!("Found {}", addr);
match addr {
SocketAddr::V4(n) if !n.ip().is_loopback() => {
if let Some(x) = try!(f(&addr))
|
}
// Filter all loopback and global IPv6 addresses
SocketAddr::V6(n) if !n.ip().is_loopback() && !n.ip().is_global() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
_ => (),
}
}
Ok(obj_list)
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(windows)]
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let host_iter = try!(net::lookup_host(""));
Ok(host_iter.collect())
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(not(windows))]
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let iface_iter = try!(ifaces::Interface::get_all()).into_iter();
Ok(iface_iter.filter(|iface| iface.kind != ifaces::Kind::Packet)
.filter_map(|iface| iface.addr)
.collect())
}
|
{
obj_list.push(x);
}
|
conditional_block
|
mod.rs
|
//! Messaging primitives for discovering devices and services.
use std::io;
#[cfg(windows)]
use std::net;
use std::net::SocketAddr;
use net::connector::UdpConnector;
use net::IpVersionMode;
mod notify;
mod search;
mod ssdp;
pub mod listen;
pub mod multicast;
pub use message::multicast::Multicast;
pub use message::search::{SearchRequest, SearchResponse, SearchListener};
pub use message::notify::{NotifyMessage, NotifyListener};
pub use message::listen::Listen;
#[cfg(not(windows))]
use ifaces;
/// Multicast Socket Information
pub const UPNP_MULTICAST_IPV4_ADDR: &'static str = "239.255.255.250";
pub const UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR: &'static str = "FF02::C";
pub const UPNP_MULTICAST_PORT: u16 = 1900;
/// Default TTL For Multicast
pub const UPNP_MULTICAST_TTL: u32 = 2;
/// Enumerates different types of SSDP messages.
#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]
pub enum MessageType {
/// A notify message.
Notify,
/// A search message.
Search,
/// A response to a search message.
Response,
}
#[derive(Clone)]
pub struct Config {
pub ipv4_addr: String,
pub ipv6_addr: String,
pub port: u16,
pub ttl: u32,
pub mode: IpVersionMode,
}
impl Config {
pub fn new() -> Self {
Default::default()
}
pub fn set_ipv4_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv4_addr = value.into();
self
}
pub fn set_ipv6_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv6_addr = value.into();
self
}
pub fn set_port(mut self, value: u16) -> Self {
self.port = value;
self
|
self.ttl = value;
self
}
pub fn set_mode(mut self, value: IpVersionMode) -> Self {
self.mode = value;
self
}
}
impl Default for Config {
fn default() -> Self {
Config {
ipv4_addr: UPNP_MULTICAST_IPV4_ADDR.to_string(),
ipv6_addr: UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR.to_string(),
port: UPNP_MULTICAST_PORT,
ttl: UPNP_MULTICAST_TTL,
mode: IpVersionMode::Any,
}
}
}
/// Generate `UdpConnector` objects for all local `IPv4` interfaces.
fn all_local_connectors(multicast_ttl: Option<u32>, filter: &IpVersionMode) -> io::Result<Vec<UdpConnector>> {
trace!("Fetching all local connectors");
map_local(|&addr| match (filter, addr) {
(&IpVersionMode::V4Only, SocketAddr::V4(n)) |
(&IpVersionMode::Any, SocketAddr::V4(n)) => {
Ok(Some(try!(UdpConnector::new((*n.ip(), 0), multicast_ttl))))
}
(&IpVersionMode::V6Only, SocketAddr::V6(n)) |
(&IpVersionMode::Any, SocketAddr::V6(n)) => Ok(Some(try!(UdpConnector::new(n, multicast_ttl)))),
_ => Ok(None),
})
}
/// Invoke the closure for every local address found on the system
///
/// This method filters out _loopback_ and _global_ addresses.
fn map_local<F, R>(mut f: F) -> io::Result<Vec<R>>
where F: FnMut(&SocketAddr) -> io::Result<Option<R>>
{
let addrs_iter = try!(get_local_addrs());
let mut obj_list = Vec::with_capacity(addrs_iter.len());
for addr in addrs_iter {
trace!("Found {}", addr);
match addr {
SocketAddr::V4(n) if !n.ip().is_loopback() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
// Filter all loopback and global IPv6 addresses
SocketAddr::V6(n) if !n.ip().is_loopback() && !n.ip().is_global() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
_ => (),
}
}
Ok(obj_list)
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(windows)]
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let host_iter = try!(net::lookup_host(""));
Ok(host_iter.collect())
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(not(windows))]
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let iface_iter = try!(ifaces::Interface::get_all()).into_iter();
Ok(iface_iter.filter(|iface| iface.kind != ifaces::Kind::Packet)
.filter_map(|iface| iface.addr)
.collect())
}
|
}
pub fn set_ttl(mut self, value: u32) -> Self {
|
random_line_split
|
mod.rs
|
//! Messaging primitives for discovering devices and services.
use std::io;
#[cfg(windows)]
use std::net;
use std::net::SocketAddr;
use net::connector::UdpConnector;
use net::IpVersionMode;
mod notify;
mod search;
mod ssdp;
pub mod listen;
pub mod multicast;
pub use message::multicast::Multicast;
pub use message::search::{SearchRequest, SearchResponse, SearchListener};
pub use message::notify::{NotifyMessage, NotifyListener};
pub use message::listen::Listen;
#[cfg(not(windows))]
use ifaces;
/// Multicast Socket Information
pub const UPNP_MULTICAST_IPV4_ADDR: &'static str = "239.255.255.250";
pub const UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR: &'static str = "FF02::C";
pub const UPNP_MULTICAST_PORT: u16 = 1900;
/// Default TTL For Multicast
pub const UPNP_MULTICAST_TTL: u32 = 2;
/// Enumerates different types of SSDP messages.
#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]
pub enum MessageType {
/// A notify message.
Notify,
/// A search message.
Search,
/// A response to a search message.
Response,
}
#[derive(Clone)]
pub struct Config {
pub ipv4_addr: String,
pub ipv6_addr: String,
pub port: u16,
pub ttl: u32,
pub mode: IpVersionMode,
}
impl Config {
pub fn new() -> Self {
Default::default()
}
pub fn set_ipv4_addr<S: Into<String>>(mut self, value: S) -> Self
|
pub fn set_ipv6_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv6_addr = value.into();
self
}
pub fn set_port(mut self, value: u16) -> Self {
self.port = value;
self
}
pub fn set_ttl(mut self, value: u32) -> Self {
self.ttl = value;
self
}
pub fn set_mode(mut self, value: IpVersionMode) -> Self {
self.mode = value;
self
}
}
impl Default for Config {
fn default() -> Self {
Config {
ipv4_addr: UPNP_MULTICAST_IPV4_ADDR.to_string(),
ipv6_addr: UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR.to_string(),
port: UPNP_MULTICAST_PORT,
ttl: UPNP_MULTICAST_TTL,
mode: IpVersionMode::Any,
}
}
}
/// Generate `UdpConnector` objects for all local `IPv4` interfaces.
fn all_local_connectors(multicast_ttl: Option<u32>, filter: &IpVersionMode) -> io::Result<Vec<UdpConnector>> {
trace!("Fetching all local connectors");
map_local(|&addr| match (filter, addr) {
(&IpVersionMode::V4Only, SocketAddr::V4(n)) |
(&IpVersionMode::Any, SocketAddr::V4(n)) => {
Ok(Some(try!(UdpConnector::new((*n.ip(), 0), multicast_ttl))))
}
(&IpVersionMode::V6Only, SocketAddr::V6(n)) |
(&IpVersionMode::Any, SocketAddr::V6(n)) => Ok(Some(try!(UdpConnector::new(n, multicast_ttl)))),
_ => Ok(None),
})
}
/// Invoke the closure for every local address found on the system
///
/// This method filters out _loopback_ and _global_ addresses.
fn map_local<F, R>(mut f: F) -> io::Result<Vec<R>>
where F: FnMut(&SocketAddr) -> io::Result<Option<R>>
{
let addrs_iter = try!(get_local_addrs());
let mut obj_list = Vec::with_capacity(addrs_iter.len());
for addr in addrs_iter {
trace!("Found {}", addr);
match addr {
SocketAddr::V4(n) if !n.ip().is_loopback() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
// Filter all loopback and global IPv6 addresses
SocketAddr::V6(n) if !n.ip().is_loopback() && !n.ip().is_global() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
_ => (),
}
}
Ok(obj_list)
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(windows)]
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let host_iter = try!(net::lookup_host(""));
Ok(host_iter.collect())
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(not(windows))]
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let iface_iter = try!(ifaces::Interface::get_all()).into_iter();
Ok(iface_iter.filter(|iface| iface.kind != ifaces::Kind::Packet)
.filter_map(|iface| iface.addr)
.collect())
}
|
{
self.ipv4_addr = value.into();
self
}
|
identifier_body
|
mod.rs
|
//! Messaging primitives for discovering devices and services.
use std::io;
#[cfg(windows)]
use std::net;
use std::net::SocketAddr;
use net::connector::UdpConnector;
use net::IpVersionMode;
mod notify;
mod search;
mod ssdp;
pub mod listen;
pub mod multicast;
pub use message::multicast::Multicast;
pub use message::search::{SearchRequest, SearchResponse, SearchListener};
pub use message::notify::{NotifyMessage, NotifyListener};
pub use message::listen::Listen;
#[cfg(not(windows))]
use ifaces;
/// Multicast Socket Information
pub const UPNP_MULTICAST_IPV4_ADDR: &'static str = "239.255.255.250";
pub const UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR: &'static str = "FF02::C";
pub const UPNP_MULTICAST_PORT: u16 = 1900;
/// Default TTL For Multicast
pub const UPNP_MULTICAST_TTL: u32 = 2;
/// Enumerates different types of SSDP messages.
#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]
pub enum MessageType {
/// A notify message.
Notify,
/// A search message.
Search,
/// A response to a search message.
Response,
}
#[derive(Clone)]
pub struct Config {
pub ipv4_addr: String,
pub ipv6_addr: String,
pub port: u16,
pub ttl: u32,
pub mode: IpVersionMode,
}
impl Config {
pub fn new() -> Self {
Default::default()
}
pub fn set_ipv4_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv4_addr = value.into();
self
}
pub fn set_ipv6_addr<S: Into<String>>(mut self, value: S) -> Self {
self.ipv6_addr = value.into();
self
}
pub fn set_port(mut self, value: u16) -> Self {
self.port = value;
self
}
pub fn set_ttl(mut self, value: u32) -> Self {
self.ttl = value;
self
}
pub fn set_mode(mut self, value: IpVersionMode) -> Self {
self.mode = value;
self
}
}
impl Default for Config {
fn default() -> Self {
Config {
ipv4_addr: UPNP_MULTICAST_IPV4_ADDR.to_string(),
ipv6_addr: UPNP_MULTICAST_IPV6_LINK_LOCAL_ADDR.to_string(),
port: UPNP_MULTICAST_PORT,
ttl: UPNP_MULTICAST_TTL,
mode: IpVersionMode::Any,
}
}
}
/// Generate `UdpConnector` objects for all local `IPv4` interfaces.
fn all_local_connectors(multicast_ttl: Option<u32>, filter: &IpVersionMode) -> io::Result<Vec<UdpConnector>> {
trace!("Fetching all local connectors");
map_local(|&addr| match (filter, addr) {
(&IpVersionMode::V4Only, SocketAddr::V4(n)) |
(&IpVersionMode::Any, SocketAddr::V4(n)) => {
Ok(Some(try!(UdpConnector::new((*n.ip(), 0), multicast_ttl))))
}
(&IpVersionMode::V6Only, SocketAddr::V6(n)) |
(&IpVersionMode::Any, SocketAddr::V6(n)) => Ok(Some(try!(UdpConnector::new(n, multicast_ttl)))),
_ => Ok(None),
})
}
/// Invoke the closure for every local address found on the system
///
/// This method filters out _loopback_ and _global_ addresses.
fn map_local<F, R>(mut f: F) -> io::Result<Vec<R>>
where F: FnMut(&SocketAddr) -> io::Result<Option<R>>
{
let addrs_iter = try!(get_local_addrs());
let mut obj_list = Vec::with_capacity(addrs_iter.len());
for addr in addrs_iter {
trace!("Found {}", addr);
match addr {
SocketAddr::V4(n) if !n.ip().is_loopback() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
// Filter all loopback and global IPv6 addresses
SocketAddr::V6(n) if !n.ip().is_loopback() && !n.ip().is_global() => {
if let Some(x) = try!(f(&addr)) {
obj_list.push(x);
}
}
_ => (),
}
}
Ok(obj_list)
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(windows)]
fn get_local_addrs() -> io::Result<Vec<SocketAddr>> {
let host_iter = try!(net::lookup_host(""));
Ok(host_iter.collect())
}
/// Generate a list of some object R constructed from all local `Ipv4Addr` objects.
///
/// If any of the `SocketAddr`'s fail to resolve, this function will not return an error.
#[cfg(not(windows))]
fn
|
() -> io::Result<Vec<SocketAddr>> {
let iface_iter = try!(ifaces::Interface::get_all()).into_iter();
Ok(iface_iter.filter(|iface| iface.kind != ifaces::Kind::Packet)
.filter_map(|iface| iface.addr)
.collect())
}
|
get_local_addrs
|
identifier_name
|
speech-recognition.js
|
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
import { Plugin, Cordova } from './plugin';
/**
* @beta
* @name SpeechRecognition
* @description
* This plugin does speech recognition using cloud services
*
* @usage
* ```
* import { SpeechRecognition } from 'ionic-native';
*
* // Check feature available
* SpeechRecognition.isRecognitionAvailable()
* .then((available: boolean) => console.log(available))
*
* // Start the recognition process
* SpeechRecognition.startListening(options)
* .subscribe(
* (matches: Array<string>) => console.log(matches),
* (onerror) => console.log('error:', onerror)
* )
*
* // Stop the recognition process (iOS only)
* SpeechRecognition.stopListening()
*
* // Get the list of supported languages
* SpeechRecognition.getSupportedLanguages()
* .then(
* (languages: Array<string>) => console.log(languages),
* (error) => console.log(error)
* )
*
* // Check permission
* SpeechRecognition.hasPermission()
* .then((hasPermission: boolean) => console.log(hasPermission))
*
|
* // Request permissions
* SpeechRecognition.requestPermission()
* .then(
* () => console.log('Granted'),
* () => console.log('Denied')
* )
*
* ```
*/
export var SpeechRecognition = (function () {
function SpeechRecognition() {
}
/**
* Check feature available
* @return {Promise<boolean>}
*/
SpeechRecognition.isRecognitionAvailable = function () {
return;
};
/**
* Start the recognition process
* @return {Promise< Array<string> >} list of recognized terms
*/
SpeechRecognition.startListening = function (options) {
return;
};
/**
* Stop the recognition process
*/
SpeechRecognition.stopListening = function () {
return;
};
/**
* Get the list of supported languages
* @return {Promise< Array<string> >} list of languages
*/
SpeechRecognition.getSupportedLanguages = function () {
return;
};
/**
* Check permission
* @return {Promise<boolean>} has permission
*/
SpeechRecognition.hasPermission = function () {
return;
};
/**
* Request permissions
* @return {Promise<void>}
*/
SpeechRecognition.requestPermission = function () {
return;
};
__decorate([
Cordova()
], SpeechRecognition, "isRecognitionAvailable", null);
__decorate([
Cordova({
callbackOrder: 'reverse',
observable: true,
})
], SpeechRecognition, "startListening", null);
__decorate([
Cordova({
platforms: ['iOS']
})
], SpeechRecognition, "stopListening", null);
__decorate([
Cordova()
], SpeechRecognition, "getSupportedLanguages", null);
__decorate([
Cordova()
], SpeechRecognition, "hasPermission", null);
__decorate([
Cordova()
], SpeechRecognition, "requestPermission", null);
SpeechRecognition = __decorate([
Plugin({
pluginName: 'SpeechRecognition',
plugin: 'cordova-plugin-speechrecognition',
pluginRef: 'plugins.speechRecognition',
repo: 'https://github.com/pbakondy/cordova-plugin-speechrecognition',
platforms: ['Android', 'iOS']
})
], SpeechRecognition);
return SpeechRecognition;
}());
//# sourceMappingURL=speech-recognition.js.map
|
random_line_split
|
|
speech-recognition.js
|
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
import { Plugin, Cordova } from './plugin';
/**
* @beta
* @name SpeechRecognition
* @description
* This plugin does speech recognition using cloud services
*
* @usage
* ```
* import { SpeechRecognition } from 'ionic-native';
*
* // Check feature available
* SpeechRecognition.isRecognitionAvailable()
* .then((available: boolean) => console.log(available))
*
* // Start the recognition process
* SpeechRecognition.startListening(options)
* .subscribe(
* (matches: Array<string>) => console.log(matches),
* (onerror) => console.log('error:', onerror)
* )
*
* // Stop the recognition process (iOS only)
* SpeechRecognition.stopListening()
*
* // Get the list of supported languages
* SpeechRecognition.getSupportedLanguages()
* .then(
* (languages: Array<string>) => console.log(languages),
* (error) => console.log(error)
* )
*
* // Check permission
* SpeechRecognition.hasPermission()
* .then((hasPermission: boolean) => console.log(hasPermission))
*
* // Request permissions
* SpeechRecognition.requestPermission()
* .then(
* () => console.log('Granted'),
* () => console.log('Denied')
* )
*
* ```
*/
export var SpeechRecognition = (function () {
function
|
() {
}
/**
* Check feature available
* @return {Promise<boolean>}
*/
SpeechRecognition.isRecognitionAvailable = function () {
return;
};
/**
* Start the recognition process
* @return {Promise< Array<string> >} list of recognized terms
*/
SpeechRecognition.startListening = function (options) {
return;
};
/**
* Stop the recognition process
*/
SpeechRecognition.stopListening = function () {
return;
};
/**
* Get the list of supported languages
* @return {Promise< Array<string> >} list of languages
*/
SpeechRecognition.getSupportedLanguages = function () {
return;
};
/**
* Check permission
* @return {Promise<boolean>} has permission
*/
SpeechRecognition.hasPermission = function () {
return;
};
/**
* Request permissions
* @return {Promise<void>}
*/
SpeechRecognition.requestPermission = function () {
return;
};
__decorate([
Cordova()
], SpeechRecognition, "isRecognitionAvailable", null);
__decorate([
Cordova({
callbackOrder: 'reverse',
observable: true,
})
], SpeechRecognition, "startListening", null);
__decorate([
Cordova({
platforms: ['iOS']
})
], SpeechRecognition, "stopListening", null);
__decorate([
Cordova()
], SpeechRecognition, "getSupportedLanguages", null);
__decorate([
Cordova()
], SpeechRecognition, "hasPermission", null);
__decorate([
Cordova()
], SpeechRecognition, "requestPermission", null);
SpeechRecognition = __decorate([
Plugin({
pluginName: 'SpeechRecognition',
plugin: 'cordova-plugin-speechrecognition',
pluginRef: 'plugins.speechRecognition',
repo: 'https://github.com/pbakondy/cordova-plugin-speechrecognition',
platforms: ['Android', 'iOS']
})
], SpeechRecognition);
return SpeechRecognition;
}());
//# sourceMappingURL=speech-recognition.js.map
|
SpeechRecognition
|
identifier_name
|
speech-recognition.js
|
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
import { Plugin, Cordova } from './plugin';
/**
* @beta
* @name SpeechRecognition
* @description
* This plugin does speech recognition using cloud services
*
* @usage
* ```
* import { SpeechRecognition } from 'ionic-native';
*
* // Check feature available
* SpeechRecognition.isRecognitionAvailable()
* .then((available: boolean) => console.log(available))
*
* // Start the recognition process
* SpeechRecognition.startListening(options)
* .subscribe(
* (matches: Array<string>) => console.log(matches),
* (onerror) => console.log('error:', onerror)
* )
*
* // Stop the recognition process (iOS only)
* SpeechRecognition.stopListening()
*
* // Get the list of supported languages
* SpeechRecognition.getSupportedLanguages()
* .then(
* (languages: Array<string>) => console.log(languages),
* (error) => console.log(error)
* )
*
* // Check permission
* SpeechRecognition.hasPermission()
* .then((hasPermission: boolean) => console.log(hasPermission))
*
* // Request permissions
* SpeechRecognition.requestPermission()
* .then(
* () => console.log('Granted'),
* () => console.log('Denied')
* )
*
* ```
*/
export var SpeechRecognition = (function () {
function SpeechRecognition()
|
/**
* Check feature available
* @return {Promise<boolean>}
*/
SpeechRecognition.isRecognitionAvailable = function () {
return;
};
/**
* Start the recognition process
* @return {Promise< Array<string> >} list of recognized terms
*/
SpeechRecognition.startListening = function (options) {
return;
};
/**
* Stop the recognition process
*/
SpeechRecognition.stopListening = function () {
return;
};
/**
* Get the list of supported languages
* @return {Promise< Array<string> >} list of languages
*/
SpeechRecognition.getSupportedLanguages = function () {
return;
};
/**
* Check permission
* @return {Promise<boolean>} has permission
*/
SpeechRecognition.hasPermission = function () {
return;
};
/**
* Request permissions
* @return {Promise<void>}
*/
SpeechRecognition.requestPermission = function () {
return;
};
__decorate([
Cordova()
], SpeechRecognition, "isRecognitionAvailable", null);
__decorate([
Cordova({
callbackOrder: 'reverse',
observable: true,
})
], SpeechRecognition, "startListening", null);
__decorate([
Cordova({
platforms: ['iOS']
})
], SpeechRecognition, "stopListening", null);
__decorate([
Cordova()
], SpeechRecognition, "getSupportedLanguages", null);
__decorate([
Cordova()
], SpeechRecognition, "hasPermission", null);
__decorate([
Cordova()
], SpeechRecognition, "requestPermission", null);
SpeechRecognition = __decorate([
Plugin({
pluginName: 'SpeechRecognition',
plugin: 'cordova-plugin-speechrecognition',
pluginRef: 'plugins.speechRecognition',
repo: 'https://github.com/pbakondy/cordova-plugin-speechrecognition',
platforms: ['Android', 'iOS']
})
], SpeechRecognition);
return SpeechRecognition;
}());
//# sourceMappingURL=speech-recognition.js.map
|
{
}
|
identifier_body
|
operation_display.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class OperationDisplay(Model):
|
"""The object that represents the operation.
:param provider: Service provider: Microsoft.ResourceProvider
:type provider: str
:param resource: Resource on which the operation is performed: Profile,
endpoint, etc.
:type resource: str
:param operation: Operation type: Read, write, delete, etc.
:type operation: str
:param description: Description of operation
:type description: str
"""
_attribute_map = {
'provider': {'key': 'Provider', 'type': 'str'},
'resource': {'key': 'Resource', 'type': 'str'},
'operation': {'key': 'Operation', 'type': 'str'},
'description': {'key': 'Description', 'type': 'str'},
}
def __init__(self, provider=None, resource=None, operation=None, description=None):
super(OperationDisplay, self).__init__()
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
|
identifier_body
|
|
operation_display.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class
|
(Model):
"""The object that represents the operation.
:param provider: Service provider: Microsoft.ResourceProvider
:type provider: str
:param resource: Resource on which the operation is performed: Profile,
endpoint, etc.
:type resource: str
:param operation: Operation type: Read, write, delete, etc.
:type operation: str
:param description: Description of operation
:type description: str
"""
_attribute_map = {
'provider': {'key': 'Provider', 'type': 'str'},
'resource': {'key': 'Resource', 'type': 'str'},
'operation': {'key': 'Operation', 'type': 'str'},
'description': {'key': 'Description', 'type': 'str'},
}
def __init__(self, provider=None, resource=None, operation=None, description=None):
super(OperationDisplay, self).__init__()
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
|
OperationDisplay
|
identifier_name
|
operation_display.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
|
:param provider: Service provider: Microsoft.ResourceProvider
:type provider: str
:param resource: Resource on which the operation is performed: Profile,
endpoint, etc.
:type resource: str
:param operation: Operation type: Read, write, delete, etc.
:type operation: str
:param description: Description of operation
:type description: str
"""
_attribute_map = {
'provider': {'key': 'Provider', 'type': 'str'},
'resource': {'key': 'Resource', 'type': 'str'},
'operation': {'key': 'Operation', 'type': 'str'},
'description': {'key': 'Description', 'type': 'str'},
}
def __init__(self, provider=None, resource=None, operation=None, description=None):
super(OperationDisplay, self).__init__()
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
|
class OperationDisplay(Model):
"""The object that represents the operation.
|
random_line_split
|
win.rs
|
use std::fs;
use std::path::Path;
use app_dirs::{self, AppDataType};
use util::*;
const CG_MN_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cg.dll";
const CG_GL_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgGL.dll";
const CG_D9_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgD3D9.dll";
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_MN_DLL_DL_HASH: [u8; 48] = [
0x2d, 0x27, 0x17, 0x03, 0x95, 0x97, 0xde, 0x0b, 0xf4, 0x88, 0x14, 0xad, 0xee, 0x90, 0xa2, 0xb8,
0xac, 0xfd, 0x9d, 0xab, 0x29, 0xf3, 0x7a, 0x64, 0xbf, 0x94, 0x8f, 0xb5, 0x5f, 0xcf, 0x9c, 0xa7,
0x8f, 0xb0, 0x5f, 0x92, 0x22, 0x27, 0x31, 0x65, 0xe2, 0x3c, 0x5c, 0xa2, 0xab, 0x87, 0x4d, 0x21,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_GL_DLL_DL_HASH: [u8; 48] = [
0xbc, 0x81, 0x45, 0xc4, 0x7d, 0x3c, 0xa6, 0x96, 0x5c, 0xe5, 0x19, 0x2e, 0x2a, 0xd7, 0xe6, 0xe7,
0x26, 0x26, 0xdd, 0x8c, 0x3b, 0xe9, 0x6a, 0xa9, 0x30, 0x75, 0x69, 0x36, 0x1f, 0x30, 0x34, 0x5b,
0x7b, 0x11, 0x24, 0xfb, 0x1d, 0x09, 0x2c, 0x0a, 0xdd, 0xb3, 0x82, 0x0b, 0x53, 0xa3, 0x8a, 0x78,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_D9_DLL_DL_HASH: [u8; 48] = [
0xeb, 0x58, 0x44, 0x85, 0x9a, 0x39, 0xd6, 0x85, 0x3c, 0x1f, 0x14, 0x9c, 0xe0, 0x51, 0x16, 0x79,
0x1d, 0x2a, 0x45, 0x7a, 0x7f, 0x98, 0x41, 0xed, 0x07, 0xec, 0xdc, 0x1a, 0xc7, 0xc5, 0xad, 0xcb,
0x34, 0xd6, 0x30, 0x50, 0xbe, 0xe5, 0xad, 0xa5, 0x8e, 0xbd, 0x25, 0xb5, 0x02, 0xe7, 0x28, 0x24,
];
pub fn install() -> Result<()> {
info!("Backing up Nvidia Cg…");
backup_cg().chain_err(|| "Failed to backup Cg")?;
let cg_dir = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if !cg_dir.exists() {
info!("Downloading Nvidia Cg…");
let result = download_cg(&cg_dir);
if result.is_err() {
fs::remove_dir_all(&cg_dir)?;
}
result?;
} else {
info!("Nvidia Cg is already cached!")
}
info!("Updating Nvidia Cg…\n");
update_cg(&cg_dir).chain_err(|| "Failed to update Cg")?;
Ok(())
}
pub fn remove() -> Result<()> {
let cg_backup_path = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if !cg_backup_path.exists() {
return Err("No Cg backup found!".into());
}
info!("Restoring Nvidia Cg…");
update_cg(&cg_backup_path)?;
fs::remove_dir_all(&cg_backup_path)?;
info!("Removing Nvidia Cg backup…");
let cg_cache_path = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if cg_cache_path.exists() {
info!("Removing Nvidia Cg download cache…");
fs::remove_dir_all(cg_cache_path)?;
}
Ok(())
}
fn download_cg(cg_dir: &Path) -> Result<()> {
fs::create_dir(&cg_dir)?;
download(
&cg_dir.join("Cg.dll"),
CG_MN_DLL_DL,
Some(&CG_MN_DLL_DL_HASH),
)?;
download(
&cg_dir.join("CgGL.dll"),
CG_GL_DLL_DL,
Some(&CG_GL_DLL_DL_HASH),
)?;
download(
&cg_dir.join("cgD3D9.dll"),
CG_D9_DLL_DL,
Some(&CG_D9_DLL_DL_HASH),
)?;
Ok(())
}
#[test]
fn download_cg_
|
tempdir::TempDir;
let target = TempDir::new("lolupdater-cg-target").unwrap();
download_cg(&target.path().join("cg")).unwrap();
}
fn backup_cg() -> Result<()> {
let cg_backup = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if cg_backup.exists() {
info!("Skipping NVIDIA Cg backup! (Already exists)");
} else {
fs::create_dir(&cg_backup)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
&cg_backup.join("Cg.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
&cg_backup.join("CgGL.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
&cg_backup.join("CgD3D9.dll"),
)?;
}
Ok(())
}
fn update_cg(cg_dir: &Path) -> Result<()> {
update_file(
&cg_dir.join("Cg.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
update_file(
&cg_dir.join("Cg.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
Ok(())
}
|
works() {
use
|
identifier_name
|
win.rs
|
use std::fs;
use std::path::Path;
use app_dirs::{self, AppDataType};
use util::*;
const CG_MN_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cg.dll";
const CG_GL_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgGL.dll";
const CG_D9_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgD3D9.dll";
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_MN_DLL_DL_HASH: [u8; 48] = [
0x2d, 0x27, 0x17, 0x03, 0x95, 0x97, 0xde, 0x0b, 0xf4, 0x88, 0x14, 0xad, 0xee, 0x90, 0xa2, 0xb8,
0xac, 0xfd, 0x9d, 0xab, 0x29, 0xf3, 0x7a, 0x64, 0xbf, 0x94, 0x8f, 0xb5, 0x5f, 0xcf, 0x9c, 0xa7,
0x8f, 0xb0, 0x5f, 0x92, 0x22, 0x27, 0x31, 0x65, 0xe2, 0x3c, 0x5c, 0xa2, 0xab, 0x87, 0x4d, 0x21,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_GL_DLL_DL_HASH: [u8; 48] = [
0xbc, 0x81, 0x45, 0xc4, 0x7d, 0x3c, 0xa6, 0x96, 0x5c, 0xe5, 0x19, 0x2e, 0x2a, 0xd7, 0xe6, 0xe7,
0x26, 0x26, 0xdd, 0x8c, 0x3b, 0xe9, 0x6a, 0xa9, 0x30, 0x75, 0x69, 0x36, 0x1f, 0x30, 0x34, 0x5b,
0x7b, 0x11, 0x24, 0xfb, 0x1d, 0x09, 0x2c, 0x0a, 0xdd, 0xb3, 0x82, 0x0b, 0x53, 0xa3, 0x8a, 0x78,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_D9_DLL_DL_HASH: [u8; 48] = [
0xeb, 0x58, 0x44, 0x85, 0x9a, 0x39, 0xd6, 0x85, 0x3c, 0x1f, 0x14, 0x9c, 0xe0, 0x51, 0x16, 0x79,
0x1d, 0x2a, 0x45, 0x7a, 0x7f, 0x98, 0x41, 0xed, 0x07, 0xec, 0xdc, 0x1a, 0xc7, 0xc5, 0xad, 0xcb,
0x34, 0xd6, 0x30, 0x50, 0xbe, 0xe5, 0xad, 0xa5, 0x8e, 0xbd, 0x25, 0xb5, 0x02, 0xe7, 0x28, 0x24,
];
pub fn install() -> Result<()> {
info!("Backing up Nvidia Cg…");
backup_cg().chain_err(|| "Failed to backup Cg")?;
let cg_dir = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if !cg_dir.exists() {
info!("Downloading Nvidia Cg…");
let result = download_cg(&cg_dir);
if result.is_err() {
fs::remove_dir_all(&cg_dir)?;
}
result?;
} else {
info!("Nvidia Cg is already cached!")
}
info!("Updating Nvidia Cg…\n");
update_cg(&cg_dir).chain_err(|| "Failed to update Cg")?;
Ok(())
}
pub fn remove() -> Result<()> {
let cg_backup_path = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if !cg_backup_path.exists() {
|
nfo!("Restoring Nvidia Cg…");
update_cg(&cg_backup_path)?;
fs::remove_dir_all(&cg_backup_path)?;
info!("Removing Nvidia Cg backup…");
let cg_cache_path = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if cg_cache_path.exists() {
info!("Removing Nvidia Cg download cache…");
fs::remove_dir_all(cg_cache_path)?;
}
Ok(())
}
fn download_cg(cg_dir: &Path) -> Result<()> {
fs::create_dir(&cg_dir)?;
download(
&cg_dir.join("Cg.dll"),
CG_MN_DLL_DL,
Some(&CG_MN_DLL_DL_HASH),
)?;
download(
&cg_dir.join("CgGL.dll"),
CG_GL_DLL_DL,
Some(&CG_GL_DLL_DL_HASH),
)?;
download(
&cg_dir.join("cgD3D9.dll"),
CG_D9_DLL_DL,
Some(&CG_D9_DLL_DL_HASH),
)?;
Ok(())
}
#[test]
fn download_cg_works() {
use tempdir::TempDir;
let target = TempDir::new("lolupdater-cg-target").unwrap();
download_cg(&target.path().join("cg")).unwrap();
}
fn backup_cg() -> Result<()> {
let cg_backup = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if cg_backup.exists() {
info!("Skipping NVIDIA Cg backup! (Already exists)");
} else {
fs::create_dir(&cg_backup)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
&cg_backup.join("Cg.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
&cg_backup.join("CgGL.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
&cg_backup.join("CgD3D9.dll"),
)?;
}
Ok(())
}
fn update_cg(cg_dir: &Path) -> Result<()> {
update_file(
&cg_dir.join("Cg.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
update_file(
&cg_dir.join("Cg.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
Ok(())
}
|
return Err("No Cg backup found!".into());
}
i
|
conditional_block
|
win.rs
|
use std::fs;
use std::path::Path;
use app_dirs::{self, AppDataType};
use util::*;
const CG_MN_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cg.dll";
const CG_GL_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgGL.dll";
const CG_D9_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgD3D9.dll";
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_MN_DLL_DL_HASH: [u8; 48] = [
0x2d, 0x27, 0x17, 0x03, 0x95, 0x97, 0xde, 0x0b, 0xf4, 0x88, 0x14, 0xad, 0xee, 0x90, 0xa2, 0xb8,
0xac, 0xfd, 0x9d, 0xab, 0x29, 0xf3, 0x7a, 0x64, 0xbf, 0x94, 0x8f, 0xb5, 0x5f, 0xcf, 0x9c, 0xa7,
0x8f, 0xb0, 0x5f, 0x92, 0x22, 0x27, 0x31, 0x65, 0xe2, 0x3c, 0x5c, 0xa2, 0xab, 0x87, 0x4d, 0x21,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_GL_DLL_DL_HASH: [u8; 48] = [
0xbc, 0x81, 0x45, 0xc4, 0x7d, 0x3c, 0xa6, 0x96, 0x5c, 0xe5, 0x19, 0x2e, 0x2a, 0xd7, 0xe6, 0xe7,
0x26, 0x26, 0xdd, 0x8c, 0x3b, 0xe9, 0x6a, 0xa9, 0x30, 0x75, 0x69, 0x36, 0x1f, 0x30, 0x34, 0x5b,
0x7b, 0x11, 0x24, 0xfb, 0x1d, 0x09, 0x2c, 0x0a, 0xdd, 0xb3, 0x82, 0x0b, 0x53, 0xa3, 0x8a, 0x78,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_D9_DLL_DL_HASH: [u8; 48] = [
0xeb, 0x58, 0x44, 0x85, 0x9a, 0x39, 0xd6, 0x85, 0x3c, 0x1f, 0x14, 0x9c, 0xe0, 0x51, 0x16, 0x79,
0x1d, 0x2a, 0x45, 0x7a, 0x7f, 0x98, 0x41, 0xed, 0x07, 0xec, 0xdc, 0x1a, 0xc7, 0xc5, 0xad, 0xcb,
0x34, 0xd6, 0x30, 0x50, 0xbe, 0xe5, 0xad, 0xa5, 0x8e, 0xbd, 0x25, 0xb5, 0x02, 0xe7, 0x28, 0x24,
];
pub fn install() -> Result<()> {
info!("Backing up Nvidia Cg…");
backup_cg().chain_err(|| "Failed to backup Cg")?;
let cg_dir = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if !cg_dir.exists() {
info!("Downloading Nvidia Cg…");
let result = download_cg(&cg_dir);
if result.is_err() {
fs::remove_dir_all(&cg_dir)?;
}
result?;
} else {
info!("Nvidia Cg is already cached!")
}
info!("Updating Nvidia Cg…\n");
update_cg(&cg_dir).chain_err(|| "Failed to update Cg")?;
Ok(())
}
pub fn remove() -> Result<()> {
let cg_backup_path = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if !cg_backup_path.exists() {
return Err("No Cg backup found!".into());
}
info!("Restoring Nvidia Cg…");
update_cg(&cg_backup_path)?;
fs::remove_dir_all(&cg_backup_path)?;
info!("Removing Nvidia Cg backup…");
let cg_cache_path = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if cg_cache_path.exists() {
info!("Removing Nvidia Cg download cache…");
fs::remove_dir_all(cg_cache_path)?;
}
Ok(())
}
fn download_cg(cg_dir: &Path) -> Result<()> {
fs::create_dir(&cg_dir)?;
download(
&cg_dir.join("Cg.dll"),
CG_MN_DLL_DL,
Some(&CG_MN_DLL_DL_HASH),
)?;
download(
&cg_dir.join("CgGL.dll"),
CG_GL_DLL_DL,
Some(&CG_GL_DLL_DL_HASH),
)?;
download(
&cg_dir.join("cgD3D9.dll"),
CG_D9_DLL_DL,
Some(&CG_D9_DLL_DL_HASH),
)?;
Ok(())
}
#[test]
fn download_cg_works() {
use tempdir::TempDir;
let target = TempDir::new("lolupdater-cg-target").unwrap();
download_cg(&target.path().join("cg")).unwrap();
}
fn backup_cg() -> Result<()> {
let cg_backup = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if cg_backup.exists() {
info!("Skipping NVIDIA Cg backup! (Already exists)");
} else {
fs::create_dir(&cg_backup)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
&cg_backup.join("Cg.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
&cg_backup.join("CgGL.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
&cg_backup.join("CgD3D9.dll"),
)?;
}
Ok(())
}
fn update_cg(cg_dir: &Path) -> Result<()> {
update_file(
&cg_dir.join("Cg.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
update_file(
&cg_dir.join("Cg.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
|
&LOLSLN_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
Ok(())
}
|
random_line_split
|
|
win.rs
|
use std::fs;
use std::path::Path;
use app_dirs::{self, AppDataType};
use util::*;
const CG_MN_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cg.dll";
const CG_GL_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgGL.dll";
const CG_D9_DLL_DL: &str = "https://mobasuite.com/downloads/macos/cgD3D9.dll";
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_MN_DLL_DL_HASH: [u8; 48] = [
0x2d, 0x27, 0x17, 0x03, 0x95, 0x97, 0xde, 0x0b, 0xf4, 0x88, 0x14, 0xad, 0xee, 0x90, 0xa2, 0xb8,
0xac, 0xfd, 0x9d, 0xab, 0x29, 0xf3, 0x7a, 0x64, 0xbf, 0x94, 0x8f, 0xb5, 0x5f, 0xcf, 0x9c, 0xa7,
0x8f, 0xb0, 0x5f, 0x92, 0x22, 0x27, 0x31, 0x65, 0xe2, 0x3c, 0x5c, 0xa2, 0xab, 0x87, 0x4d, 0x21,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_GL_DLL_DL_HASH: [u8; 48] = [
0xbc, 0x81, 0x45, 0xc4, 0x7d, 0x3c, 0xa6, 0x96, 0x5c, 0xe5, 0x19, 0x2e, 0x2a, 0xd7, 0xe6, 0xe7,
0x26, 0x26, 0xdd, 0x8c, 0x3b, 0xe9, 0x6a, 0xa9, 0x30, 0x75, 0x69, 0x36, 0x1f, 0x30, 0x34, 0x5b,
0x7b, 0x11, 0x24, 0xfb, 0x1d, 0x09, 0x2c, 0x0a, 0xdd, 0xb3, 0x82, 0x0b, 0x53, 0xa3, 0x8a, 0x78,
];
#[cfg_attr(rustfmt, rustfmt_skip)]
const CG_D9_DLL_DL_HASH: [u8; 48] = [
0xeb, 0x58, 0x44, 0x85, 0x9a, 0x39, 0xd6, 0x85, 0x3c, 0x1f, 0x14, 0x9c, 0xe0, 0x51, 0x16, 0x79,
0x1d, 0x2a, 0x45, 0x7a, 0x7f, 0x98, 0x41, 0xed, 0x07, 0xec, 0xdc, 0x1a, 0xc7, 0xc5, 0xad, 0xcb,
0x34, 0xd6, 0x30, 0x50, 0xbe, 0xe5, 0xad, 0xa5, 0x8e, 0xbd, 0x25, 0xb5, 0x02, 0xe7, 0x28, 0x24,
];
pub fn install() -> Result<()> {
info!("Backing up Nvidia Cg…");
backup_cg().chain_err(|| "Failed to backup Cg")?;
let cg_dir = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if !cg_dir.exists() {
info!("Downloading Nvidia Cg…");
let result = download_cg(&cg_dir);
if result.is_err() {
fs::remove_dir_all(&cg_dir)?;
}
result?;
} else {
info!("Nvidia Cg is already cached!")
}
info!("Updating Nvidia Cg…\n");
update_cg(&cg_dir).chain_err(|| "Failed to update Cg")?;
Ok(())
}
pub fn remove() -> Result<()> {
let cg_backup_path = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if !cg_backup_path.exists() {
return Err("No Cg backup found!".into());
}
info!("Restoring Nvidia Cg…");
update_cg(&cg_backup_path)?;
fs::remove_dir_all(&cg_backup_path)?;
info!("Removing Nvidia Cg backup…");
let cg_cache_path = app_dirs::get_app_dir(AppDataType::UserCache, &APP_INFO, "Cg")?;
if cg_cache_path.exists() {
info!("Removing Nvidia Cg download cache…");
fs::remove_dir_all(cg_cache_path)?;
}
Ok(())
}
fn download_cg(cg_dir: &Path) -> Result<()> {
fs::create_dir(&cg_dir)?;
download(
&cg_dir.join("Cg.dll"),
CG_MN_DLL_DL,
Some(&CG_MN_DLL_DL_HASH),
)?;
download(
&cg_dir.join("CgGL.dll"),
CG_GL_DLL_DL,
Some(&CG_GL_DLL_DL_HASH),
)?;
download(
&cg_dir.join("cgD3D9.dll"),
CG_D9_DLL_DL,
Some(&CG_D9_DLL_DL_HASH),
)?;
Ok(())
}
#[test]
fn download_cg_works() {
use tempdir::TempDir;
let target = TempDir::new("lolupdater-cg-target").unwrap();
download_cg(&target.path().join("cg")).unwrap();
}
fn backup_cg() -> Result<()> {
let cg
|
cg(cg_dir: &Path) -> Result<()> {
update_file(
&cg_dir.join("Cg.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
update_file(
&cg_dir.join("Cg.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
)?;
update_file(
&cg_dir.join("CgGL.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
)?;
update_file(
&cg_dir.join("cgD3D9.dll"),
&LOLSLN_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
)?;
Ok(())
}
|
_backup = app_dirs::get_app_dir(AppDataType::UserData, &APP_INFO, "Backups/Cg")?;
if cg_backup.exists() {
info!("Skipping NVIDIA Cg backup! (Already exists)");
} else {
fs::create_dir(&cg_backup)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("Cg.dll"),
&cg_backup.join("Cg.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("CgGL.dll"),
&cg_backup.join("CgGL.dll"),
)?;
update_file(
&LOLP_GC_PATH.with(|k| k.clone()).join("cgD3D9.dll"),
&cg_backup.join("CgD3D9.dll"),
)?;
}
Ok(())
}
fn update_
|
identifier_body
|
0006_auto_20190926_1218.py
|
# Generated by Django 2.2.5 on 2019-09-26 12:18
from django.db import migrations, models
import weblate.utils.backup
class Migration(migrations.Migration):
|
dependencies = [("wladmin", "0005_auto_20190926_1332")]
operations = [
migrations.AddField(
model_name="backupservice",
name="paperkey",
field=models.TextField(default=""),
preserve_default=False,
),
migrations.AddField(
model_name="backupservice",
name="passphrase",
field=models.CharField(
default=weblate.utils.backup.make_password, max_length=100
),
),
migrations.AlterField(
model_name="backuplog",
name="event",
field=models.CharField(
choices=[
("backup", "Backup performed"),
("prune", "Deleted the oldest backups"),
("init", "Repository initialization"),
],
max_length=100,
),
),
migrations.AlterField(
model_name="backupservice",
name="repository",
field=models.CharField(
default="", max_length=500, verbose_name="Backup repository"
),
),
]
|
identifier_body
|
|
0006_auto_20190926_1218.py
|
# Generated by Django 2.2.5 on 2019-09-26 12:18
from django.db import migrations, models
import weblate.utils.backup
class Migration(migrations.Migration):
dependencies = [("wladmin", "0005_auto_20190926_1332")]
operations = [
migrations.AddField(
|
model_name="backupservice",
name="paperkey",
field=models.TextField(default=""),
preserve_default=False,
),
migrations.AddField(
model_name="backupservice",
name="passphrase",
field=models.CharField(
default=weblate.utils.backup.make_password, max_length=100
),
),
migrations.AlterField(
model_name="backuplog",
name="event",
field=models.CharField(
choices=[
("backup", "Backup performed"),
("prune", "Deleted the oldest backups"),
("init", "Repository initialization"),
],
max_length=100,
),
),
migrations.AlterField(
model_name="backupservice",
name="repository",
field=models.CharField(
default="", max_length=500, verbose_name="Backup repository"
),
),
]
|
random_line_split
|
|
0006_auto_20190926_1218.py
|
# Generated by Django 2.2.5 on 2019-09-26 12:18
from django.db import migrations, models
import weblate.utils.backup
class
|
(migrations.Migration):
dependencies = [("wladmin", "0005_auto_20190926_1332")]
operations = [
migrations.AddField(
model_name="backupservice",
name="paperkey",
field=models.TextField(default=""),
preserve_default=False,
),
migrations.AddField(
model_name="backupservice",
name="passphrase",
field=models.CharField(
default=weblate.utils.backup.make_password, max_length=100
),
),
migrations.AlterField(
model_name="backuplog",
name="event",
field=models.CharField(
choices=[
("backup", "Backup performed"),
("prune", "Deleted the oldest backups"),
("init", "Repository initialization"),
],
max_length=100,
),
),
migrations.AlterField(
model_name="backupservice",
name="repository",
field=models.CharField(
default="", max_length=500, verbose_name="Backup repository"
),
),
]
|
Migration
|
identifier_name
|
guess-the-number.rs
|
use std::string::String;
use std::fs::File;
use std::io::{Read, BufRead};
static mut TRIES_LEFT: i32 = 5;
static mut RAND_INT: i32 = -1;
static mut GUESS: i32 = -1;
fn main() {
unsafe {
// Open /dev/random
let mut devrandom = File::open("/dev/random").unwrap();
// Create a 1 byte large buffer
let mut randombyte: [u8; 1] = [0];
// Read exactly 1 byte from /dev/random1 byte wide buffer
devrandom.read_exact(&mut randombyte).unwrap();
// Clamp it to 0-100 with modulo
RAND_INT = (randombyte[0] as i32) % 100;
// Get a handle to STDIN
let stdin = std::io::stdin();
let mut handle = stdin.lock();
// Create string to hold STDIN input
let mut input = String::new();
loop {
if TRIES_LEFT == 0 {
println!("Sorry, dude, but you lost. Better luck next time.");
println!("The number you wanted was {}", RAND_INT);
break;
}
println!("Make a guess: ");
input.truncate(0); // clear any previous input
handle.read_line(&mut input).unwrap();
GUESS = match input.trim().parse() {
Ok(integer) => integer,
Err(_) => {
println!("That's no integer, buddy.");
continue;
}
};
if GUESS < 0 || GUESS > 100 {
println!("I can't believe you've done this! That's not in 0-100");
continue;
}
// If we have a valid guess now, it counts as a try
|
TRIES_LEFT -= 1;
if GUESS == RAND_INT {
println!("🎉 YOU WIN 🎉");
break;
} else if GUESS > RAND_INT {
println!("Too high, guy.");
} else {
println!("Too low, bro.");
}
}
}
}
|
random_line_split
|
|
guess-the-number.rs
|
use std::string::String;
use std::fs::File;
use std::io::{Read, BufRead};
static mut TRIES_LEFT: i32 = 5;
static mut RAND_INT: i32 = -1;
static mut GUESS: i32 = -1;
fn main()
|
{
unsafe {
// Open /dev/random
let mut devrandom = File::open("/dev/random").unwrap();
// Create a 1 byte large buffer
let mut randombyte: [u8; 1] = [0];
// Read exactly 1 byte from /dev/random1 byte wide buffer
devrandom.read_exact(&mut randombyte).unwrap();
// Clamp it to 0-100 with modulo
RAND_INT = (randombyte[0] as i32) % 100;
// Get a handle to STDIN
let stdin = std::io::stdin();
let mut handle = stdin.lock();
// Create string to hold STDIN input
let mut input = String::new();
loop {
if TRIES_LEFT == 0 {
println!("Sorry, dude, but you lost. Better luck next time.");
println!("The number you wanted was {}", RAND_INT);
break;
}
println!("Make a guess: ");
input.truncate(0); // clear any previous input
handle.read_line(&mut input).unwrap();
GUESS = match input.trim().parse() {
Ok(integer) => integer,
Err(_) => {
println!("That's no integer, buddy.");
continue;
}
};
if GUESS < 0 || GUESS > 100 {
println!("I can't believe you've done this! That's not in 0-100");
continue;
}
// If we have a valid guess now, it counts as a try
TRIES_LEFT -= 1;
if GUESS == RAND_INT {
println!("🎉 YOU WIN 🎉");
break;
} else if GUESS > RAND_INT {
println!("Too high, guy.");
} else {
println!("Too low, bro.");
}
}
}
}
|
identifier_body
|
|
guess-the-number.rs
|
use std::string::String;
use std::fs::File;
use std::io::{Read, BufRead};
static mut TRIES_LEFT: i32 = 5;
static mut RAND_INT: i32 = -1;
static mut GUESS: i32 = -1;
fn
|
() {
unsafe {
// Open /dev/random
let mut devrandom = File::open("/dev/random").unwrap();
// Create a 1 byte large buffer
let mut randombyte: [u8; 1] = [0];
// Read exactly 1 byte from /dev/random1 byte wide buffer
devrandom.read_exact(&mut randombyte).unwrap();
// Clamp it to 0-100 with modulo
RAND_INT = (randombyte[0] as i32) % 100;
// Get a handle to STDIN
let stdin = std::io::stdin();
let mut handle = stdin.lock();
// Create string to hold STDIN input
let mut input = String::new();
loop {
if TRIES_LEFT == 0 {
println!("Sorry, dude, but you lost. Better luck next time.");
println!("The number you wanted was {}", RAND_INT);
break;
}
println!("Make a guess: ");
input.truncate(0); // clear any previous input
handle.read_line(&mut input).unwrap();
GUESS = match input.trim().parse() {
Ok(integer) => integer,
Err(_) => {
println!("That's no integer, buddy.");
continue;
}
};
if GUESS < 0 || GUESS > 100 {
println!("I can't believe you've done this! That's not in 0-100");
continue;
}
// If we have a valid guess now, it counts as a try
TRIES_LEFT -= 1;
if GUESS == RAND_INT {
println!("🎉 YOU WIN 🎉");
break;
} else if GUESS > RAND_INT {
println!("Too high, guy.");
} else {
println!("Too low, bro.");
}
}
}
}
|
main
|
identifier_name
|
AnnotationNote.tsx
|
import { createElement } from 'react'
import omit from 'lodash/omit'
import { useSpring, animated } from '@react-spring/web'
import { useTheme, useMotionConfig } from '@nivo/core'
import { NoteSvg } from './types'
export const AnnotationNote = <Datum,>({
datum,
x,
y,
note,
}: {
datum: Datum
x: number
y: number
note: NoteSvg<Datum>
|
const animatedProps = useSpring({
x,
y,
config: springConfig,
immediate: !animate,
})
if (typeof note === 'function') {
return createElement(note, { x, y, datum })
}
return (
<>
{theme.annotations.text.outlineWidth > 0 && (
<animated.text
x={animatedProps.x}
y={animatedProps.y}
style={{
...theme.annotations.text,
strokeLinejoin: 'round',
strokeWidth: theme.annotations.text.outlineWidth * 2,
stroke: theme.annotations.text.outlineColor,
}}
>
{note}
</animated.text>
)}
<animated.text
x={animatedProps.x}
y={animatedProps.y}
style={omit(theme.annotations.text, ['outlineWidth', 'outlineColor'])}
>
{note}
</animated.text>
</>
)
}
|
}) => {
const theme = useTheme()
const { animate, config: springConfig } = useMotionConfig()
|
random_line_split
|
AnnotationNote.tsx
|
import { createElement } from 'react'
import omit from 'lodash/omit'
import { useSpring, animated } from '@react-spring/web'
import { useTheme, useMotionConfig } from '@nivo/core'
import { NoteSvg } from './types'
export const AnnotationNote = <Datum,>({
datum,
x,
y,
note,
}: {
datum: Datum
x: number
y: number
note: NoteSvg<Datum>
}) => {
const theme = useTheme()
const { animate, config: springConfig } = useMotionConfig()
const animatedProps = useSpring({
x,
y,
config: springConfig,
immediate: !animate,
})
if (typeof note === 'function')
|
return (
<>
{theme.annotations.text.outlineWidth > 0 && (
<animated.text
x={animatedProps.x}
y={animatedProps.y}
style={{
...theme.annotations.text,
strokeLinejoin: 'round',
strokeWidth: theme.annotations.text.outlineWidth * 2,
stroke: theme.annotations.text.outlineColor,
}}
>
{note}
</animated.text>
)}
<animated.text
x={animatedProps.x}
y={animatedProps.y}
style={omit(theme.annotations.text, ['outlineWidth', 'outlineColor'])}
>
{note}
</animated.text>
</>
)
}
|
{
return createElement(note, { x, y, datum })
}
|
conditional_block
|
synapsecollection.py
|
# -*- coding: utf-8 -*-
#
# synapsecollection.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Example script to show some of the possibilities of the SynapseCollection class. We
connect neurons, and get the SynapseCollection with a GetConnections call. To get
a better understanding of the connections, we plot the weights between the
source and targets.
"""
import nest
import matplotlib.pyplot as plt
import numpy as np
def makeMatrix(sources, targets, weights):
"""
Returns a matrix with the weights between the source and target node_ids.
"""
aa = np.zeros((max(sources)+1, max(targets)+1))
for src, trg, wght in zip(sources, targets, weights):
|
return aa
def plotMatrix(srcs, tgts, weights, title, pos):
"""
Plots weight matrix.
"""
plt.subplot(pos)
plt.matshow(makeMatrix(srcs, tgts, weights), fignum=False)
plt.xlim([min(tgts)-0.5, max(tgts)+0.5])
plt.xlabel('target')
plt.ylim([max(srcs)+0.5, min(srcs)-0.5])
plt.ylabel('source')
plt.title(title)
plt.colorbar(fraction=0.046, pad=0.04)
"""
Start with a simple, one_to_one example.
We create the neurons, connect them, and get the connections. From this we can
get the connected sources, targets, and weights. The corresponding matrix will
be the identity matrix, as we have a one_to_one connection.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 10)
nest.Connect(nrns, nrns, 'one_to_one')
conns = nest.GetConnections(nrns, nrns) # This returns a SynapseCollection
# We can get desired information of the SynapseCollection with simple get() call.
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
# Plot the matrix consisting of the weights between the sources and targets
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'Uniform weight', 121)
"""
Add some weights to the connections, and plot the updated weight matrix.
"""
# We can set data of the connections with a simple set() call.
w = [{'weight': x*1.0} for x in range(1, 11)]
conns.set(w)
weights = conns.weight
plotMatrix(srcs, tgts, weights, 'Set weight', 122)
"""
We can also plot an all_to_all connection, with uniformly distributed weights,
and different number of sources and targets.
"""
nest.ResetKernel()
pre = nest.Create('iaf_psc_alpha', 10)
post = nest.Create('iaf_psc_delta', 5)
nest.Connect(pre, post,
syn_spec={'weight':
{'distribution': 'uniform', 'low': 0.5, 'high': 4.5}})
# Get a SynapseCollection with all connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'All to all connection', 111)
"""
Lastly, we'll do an exmple that is a bit more complex. We connect different
neurons with different rules, synapse models and weight distributions, and get
different SynapseCollections by calling GetConnections with different inputs.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 15)
nest.Connect(nrns[:5], nrns[:5],
'one_to_one',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'normal', 'mu': 5.0, 'sigma': 2.0}})
nest.Connect(nrns[:10], nrns[5:12],
{'rule': 'pairwise_bernoulli', 'p': 0.4},
{'weight': 4.0})
nest.Connect(nrns[5:10], nrns[:5],
{'rule': 'fixed_total_number', 'N': 5},
{'weight': 3.0})
nest.Connect(nrns[10:], nrns[:12],
'all_to_all',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'uniform', 'low': 1., 'high': 5.}})
nest.Connect(nrns, nrns[12:],
{'rule': 'fixed_indegree', 'indegree': 3})
# First get a SynapseCollection consisting of all the connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(14, 12))
plotMatrix(list(srcs), list(tgts), weights, 'All connections', 221)
# Get SynapseCollection consisting of a subset of connections
conns = nest.GetConnections(nrns[:10], nrns[:10])
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections of the first ten neurons', 222)
# Get SynapseCollection consisting of just the stdp_synapses
conns = nest.GetConnections(synapse_model='stdp_synapse')
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections with stdp_synapse', 223)
# Get SynapseCollection consisting of the fixed_total_number connections, but set
# weight before plotting
conns = nest.GetConnections(nrns[5:10], nrns[:5])
w = [{'weight': x*1.0} for x in range(1, 6)]
conns.set(w)
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'fixed_total_number, set weight', 224)
plt.show()
|
aa[src, trg] += wght
|
conditional_block
|
synapsecollection.py
|
# -*- coding: utf-8 -*-
#
# synapsecollection.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Example script to show some of the possibilities of the SynapseCollection class. We
connect neurons, and get the SynapseCollection with a GetConnections call. To get
a better understanding of the connections, we plot the weights between the
source and targets.
"""
import nest
import matplotlib.pyplot as plt
import numpy as np
def makeMatrix(sources, targets, weights):
|
def plotMatrix(srcs, tgts, weights, title, pos):
"""
Plots weight matrix.
"""
plt.subplot(pos)
plt.matshow(makeMatrix(srcs, tgts, weights), fignum=False)
plt.xlim([min(tgts)-0.5, max(tgts)+0.5])
plt.xlabel('target')
plt.ylim([max(srcs)+0.5, min(srcs)-0.5])
plt.ylabel('source')
plt.title(title)
plt.colorbar(fraction=0.046, pad=0.04)
"""
Start with a simple, one_to_one example.
We create the neurons, connect them, and get the connections. From this we can
get the connected sources, targets, and weights. The corresponding matrix will
be the identity matrix, as we have a one_to_one connection.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 10)
nest.Connect(nrns, nrns, 'one_to_one')
conns = nest.GetConnections(nrns, nrns) # This returns a SynapseCollection
# We can get desired information of the SynapseCollection with simple get() call.
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
# Plot the matrix consisting of the weights between the sources and targets
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'Uniform weight', 121)
"""
Add some weights to the connections, and plot the updated weight matrix.
"""
# We can set data of the connections with a simple set() call.
w = [{'weight': x*1.0} for x in range(1, 11)]
conns.set(w)
weights = conns.weight
plotMatrix(srcs, tgts, weights, 'Set weight', 122)
"""
We can also plot an all_to_all connection, with uniformly distributed weights,
and different number of sources and targets.
"""
nest.ResetKernel()
pre = nest.Create('iaf_psc_alpha', 10)
post = nest.Create('iaf_psc_delta', 5)
nest.Connect(pre, post,
syn_spec={'weight':
{'distribution': 'uniform', 'low': 0.5, 'high': 4.5}})
# Get a SynapseCollection with all connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'All to all connection', 111)
"""
Lastly, we'll do an exmple that is a bit more complex. We connect different
neurons with different rules, synapse models and weight distributions, and get
different SynapseCollections by calling GetConnections with different inputs.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 15)
nest.Connect(nrns[:5], nrns[:5],
'one_to_one',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'normal', 'mu': 5.0, 'sigma': 2.0}})
nest.Connect(nrns[:10], nrns[5:12],
{'rule': 'pairwise_bernoulli', 'p': 0.4},
{'weight': 4.0})
nest.Connect(nrns[5:10], nrns[:5],
{'rule': 'fixed_total_number', 'N': 5},
{'weight': 3.0})
nest.Connect(nrns[10:], nrns[:12],
'all_to_all',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'uniform', 'low': 1., 'high': 5.}})
nest.Connect(nrns, nrns[12:],
{'rule': 'fixed_indegree', 'indegree': 3})
# First get a SynapseCollection consisting of all the connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(14, 12))
plotMatrix(list(srcs), list(tgts), weights, 'All connections', 221)
# Get SynapseCollection consisting of a subset of connections
conns = nest.GetConnections(nrns[:10], nrns[:10])
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections of the first ten neurons', 222)
# Get SynapseCollection consisting of just the stdp_synapses
conns = nest.GetConnections(synapse_model='stdp_synapse')
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections with stdp_synapse', 223)
# Get SynapseCollection consisting of the fixed_total_number connections, but set
# weight before plotting
conns = nest.GetConnections(nrns[5:10], nrns[:5])
w = [{'weight': x*1.0} for x in range(1, 6)]
conns.set(w)
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'fixed_total_number, set weight', 224)
plt.show()
|
"""
Returns a matrix with the weights between the source and target node_ids.
"""
aa = np.zeros((max(sources)+1, max(targets)+1))
for src, trg, wght in zip(sources, targets, weights):
aa[src, trg] += wght
return aa
|
identifier_body
|
synapsecollection.py
|
# -*- coding: utf-8 -*-
#
# synapsecollection.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Example script to show some of the possibilities of the SynapseCollection class. We
connect neurons, and get the SynapseCollection with a GetConnections call. To get
a better understanding of the connections, we plot the weights between the
source and targets.
"""
import nest
import matplotlib.pyplot as plt
import numpy as np
def makeMatrix(sources, targets, weights):
"""
Returns a matrix with the weights between the source and target node_ids.
"""
aa = np.zeros((max(sources)+1, max(targets)+1))
for src, trg, wght in zip(sources, targets, weights):
aa[src, trg] += wght
return aa
def plotMatrix(srcs, tgts, weights, title, pos):
"""
Plots weight matrix.
"""
plt.subplot(pos)
plt.matshow(makeMatrix(srcs, tgts, weights), fignum=False)
plt.xlim([min(tgts)-0.5, max(tgts)+0.5])
plt.xlabel('target')
plt.ylim([max(srcs)+0.5, min(srcs)-0.5])
plt.ylabel('source')
plt.title(title)
plt.colorbar(fraction=0.046, pad=0.04)
"""
Start with a simple, one_to_one example.
We create the neurons, connect them, and get the connections. From this we can
get the connected sources, targets, and weights. The corresponding matrix will
be the identity matrix, as we have a one_to_one connection.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 10)
nest.Connect(nrns, nrns, 'one_to_one')
conns = nest.GetConnections(nrns, nrns) # This returns a SynapseCollection
# We can get desired information of the SynapseCollection with simple get() call.
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
# Plot the matrix consisting of the weights between the sources and targets
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'Uniform weight', 121)
"""
Add some weights to the connections, and plot the updated weight matrix.
"""
# We can set data of the connections with a simple set() call.
w = [{'weight': x*1.0} for x in range(1, 11)]
conns.set(w)
weights = conns.weight
plotMatrix(srcs, tgts, weights, 'Set weight', 122)
"""
We can also plot an all_to_all connection, with uniformly distributed weights,
and different number of sources and targets.
"""
nest.ResetKernel()
pre = nest.Create('iaf_psc_alpha', 10)
post = nest.Create('iaf_psc_delta', 5)
nest.Connect(pre, post,
syn_spec={'weight':
{'distribution': 'uniform', 'low': 0.5, 'high': 4.5}})
# Get a SynapseCollection with all connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'All to all connection', 111)
"""
Lastly, we'll do an exmple that is a bit more complex. We connect different
neurons with different rules, synapse models and weight distributions, and get
different SynapseCollections by calling GetConnections with different inputs.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 15)
nest.Connect(nrns[:5], nrns[:5],
'one_to_one',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'normal', 'mu': 5.0, 'sigma': 2.0}})
nest.Connect(nrns[:10], nrns[5:12],
{'rule': 'pairwise_bernoulli', 'p': 0.4},
{'weight': 4.0})
|
nest.Connect(nrns[5:10], nrns[:5],
{'rule': 'fixed_total_number', 'N': 5},
{'weight': 3.0})
nest.Connect(nrns[10:], nrns[:12],
'all_to_all',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'uniform', 'low': 1., 'high': 5.}})
nest.Connect(nrns, nrns[12:],
{'rule': 'fixed_indegree', 'indegree': 3})
# First get a SynapseCollection consisting of all the connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(14, 12))
plotMatrix(list(srcs), list(tgts), weights, 'All connections', 221)
# Get SynapseCollection consisting of a subset of connections
conns = nest.GetConnections(nrns[:10], nrns[:10])
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections of the first ten neurons', 222)
# Get SynapseCollection consisting of just the stdp_synapses
conns = nest.GetConnections(synapse_model='stdp_synapse')
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections with stdp_synapse', 223)
# Get SynapseCollection consisting of the fixed_total_number connections, but set
# weight before plotting
conns = nest.GetConnections(nrns[5:10], nrns[:5])
w = [{'weight': x*1.0} for x in range(1, 6)]
conns.set(w)
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'fixed_total_number, set weight', 224)
plt.show()
|
random_line_split
|
|
synapsecollection.py
|
# -*- coding: utf-8 -*-
#
# synapsecollection.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Example script to show some of the possibilities of the SynapseCollection class. We
connect neurons, and get the SynapseCollection with a GetConnections call. To get
a better understanding of the connections, we plot the weights between the
source and targets.
"""
import nest
import matplotlib.pyplot as plt
import numpy as np
def
|
(sources, targets, weights):
"""
Returns a matrix with the weights between the source and target node_ids.
"""
aa = np.zeros((max(sources)+1, max(targets)+1))
for src, trg, wght in zip(sources, targets, weights):
aa[src, trg] += wght
return aa
def plotMatrix(srcs, tgts, weights, title, pos):
"""
Plots weight matrix.
"""
plt.subplot(pos)
plt.matshow(makeMatrix(srcs, tgts, weights), fignum=False)
plt.xlim([min(tgts)-0.5, max(tgts)+0.5])
plt.xlabel('target')
plt.ylim([max(srcs)+0.5, min(srcs)-0.5])
plt.ylabel('source')
plt.title(title)
plt.colorbar(fraction=0.046, pad=0.04)
"""
Start with a simple, one_to_one example.
We create the neurons, connect them, and get the connections. From this we can
get the connected sources, targets, and weights. The corresponding matrix will
be the identity matrix, as we have a one_to_one connection.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 10)
nest.Connect(nrns, nrns, 'one_to_one')
conns = nest.GetConnections(nrns, nrns) # This returns a SynapseCollection
# We can get desired information of the SynapseCollection with simple get() call.
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
# Plot the matrix consisting of the weights between the sources and targets
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'Uniform weight', 121)
"""
Add some weights to the connections, and plot the updated weight matrix.
"""
# We can set data of the connections with a simple set() call.
w = [{'weight': x*1.0} for x in range(1, 11)]
conns.set(w)
weights = conns.weight
plotMatrix(srcs, tgts, weights, 'Set weight', 122)
"""
We can also plot an all_to_all connection, with uniformly distributed weights,
and different number of sources and targets.
"""
nest.ResetKernel()
pre = nest.Create('iaf_psc_alpha', 10)
post = nest.Create('iaf_psc_delta', 5)
nest.Connect(pre, post,
syn_spec={'weight':
{'distribution': 'uniform', 'low': 0.5, 'high': 4.5}})
# Get a SynapseCollection with all connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(12, 10))
plotMatrix(srcs, tgts, weights, 'All to all connection', 111)
"""
Lastly, we'll do an exmple that is a bit more complex. We connect different
neurons with different rules, synapse models and weight distributions, and get
different SynapseCollections by calling GetConnections with different inputs.
"""
nest.ResetKernel()
nrns = nest.Create('iaf_psc_alpha', 15)
nest.Connect(nrns[:5], nrns[:5],
'one_to_one',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'normal', 'mu': 5.0, 'sigma': 2.0}})
nest.Connect(nrns[:10], nrns[5:12],
{'rule': 'pairwise_bernoulli', 'p': 0.4},
{'weight': 4.0})
nest.Connect(nrns[5:10], nrns[:5],
{'rule': 'fixed_total_number', 'N': 5},
{'weight': 3.0})
nest.Connect(nrns[10:], nrns[:12],
'all_to_all',
{'synapse_model': 'stdp_synapse',
'weight': {'distribution': 'uniform', 'low': 1., 'high': 5.}})
nest.Connect(nrns, nrns[12:],
{'rule': 'fixed_indegree', 'indegree': 3})
# First get a SynapseCollection consisting of all the connections
conns = nest.GetConnections()
srcs = conns.source
tgts = conns.target
weights = conns.weight
plt.figure(figsize=(14, 12))
plotMatrix(list(srcs), list(tgts), weights, 'All connections', 221)
# Get SynapseCollection consisting of a subset of connections
conns = nest.GetConnections(nrns[:10], nrns[:10])
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections of the first ten neurons', 222)
# Get SynapseCollection consisting of just the stdp_synapses
conns = nest.GetConnections(synapse_model='stdp_synapse')
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'Connections with stdp_synapse', 223)
# Get SynapseCollection consisting of the fixed_total_number connections, but set
# weight before plotting
conns = nest.GetConnections(nrns[5:10], nrns[:5])
w = [{'weight': x*1.0} for x in range(1, 6)]
conns.set(w)
g = conns.get(['source', 'target', 'weight'])
srcs = g['source']
tgts = g['target']
weights = g['weight']
plotMatrix(srcs, tgts, weights, 'fixed_total_number, set weight', 224)
plt.show()
|
makeMatrix
|
identifier_name
|
esprelay.py
|
#!/usr/bin/env python
#
# Simple TCP line-based text chat server. Clients connect and send a
# conversation ID (any string) as the first line, then they are connected
# to other clients who sent the same conversation ID.
#
# Uses a thread pair per connection, so not highly scalable.
from Queue import Queue, Empty
from SocketServer import StreamRequestHandler, ThreadingTCPServer
from collections import defaultdict
from threading import Lock, Thread
from socket import error
conversations = defaultdict(set)
"""Key is a conversation ID string, value is a set of ConnectionHandlers
that use that ID"""
conversations_lock = Lock()
"""Lock held while accessing conversations"""
def print_conversation(conversation_id, handlers):
|
class ConversationConnectionHandler(StreamRequestHandler):
"""Handles TCP connections with the conversation protocol"""
def __init__(self, request, client_address, server):
self._conversation_id = None
self._messages = Queue()
# This is a blocking call, so we have to set our fields before calling
StreamRequestHandler.__init__(self, request, client_address, server)
def setup(self):
print('connect %s:%d' % self.client_address)
StreamRequestHandler.setup(self)
def finish(self):
StreamRequestHandler.finish(self)
# Remove this handler from the conversations dict
print('close %s:%d' % self.client_address)
if self._conversation_id is not None:
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.remove(self)
if not handlers:
del conversations[self._conversation_id]
print_conversation(self._conversation_id, handlers)
self._conversation_id = None
def handle(self):
# The first line is the conversation ID
line = self.rfile.readline()
if not line:
# Client disconnected or declined or sent an empty line
return
self._conversation_id = line.strip()
# Register this handler instance for the conversation ID
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.add(self)
print_conversation(self._conversation_id, handlers)
# Spawn another thread to handle writes
Thread(target=self._write_handler).start()
while True:
try:
line = self.rfile.readline()
except error:
# Client disconnected or other socket error
break
if not line:
# Client disconnected
break
# Send the message to each connected client
with conversations_lock:
for handler in conversations[self._conversation_id]:
if handler is not self:
handler._messages.put(line)
def _write_handler(self):
while not self.rfile.closed and not self.wfile.closed:
try:
# Get the next message we should write from the queue.
# A short timeout lets us detect a closed socket (otherwise
# the thread would only discover it on the next attempt
# to write a message).
message = self._messages.get(block=True, timeout=1)
try:
self.wfile.write(message)
self.wfile.flush()
except error:
# The connection probably dropped; end the handler
break
except Empty:
# Queue was empty at timeout; just keep going
pass
class TCPReuseAddrServer(ThreadingTCPServer):
"""Extends ThreadingTCPServer to enable address reuse"""
allow_reuse_address = True
def main():
host = '0.0.0.0'
port = 54321
listen_addr = (host, port)
server = TCPReuseAddrServer(listen_addr, ConversationConnectionHandler)
print('listening on %s:%d' % listen_addr)
server.serve_forever()
if __name__ == '__main__':
main()
|
"""Prints a line about which handlers are attached to a conversation"""
handler_addrs = sorted(['%s:%d' % h.client_address for h in handlers])
handlers_str = '(%s)' % (', '.join(handler_addrs))
print('conversation "%s" -> %s' % (conversation_id, handlers_str))
|
identifier_body
|
esprelay.py
|
#!/usr/bin/env python
#
# Simple TCP line-based text chat server. Clients connect and send a
# conversation ID (any string) as the first line, then they are connected
# to other clients who sent the same conversation ID.
#
# Uses a thread pair per connection, so not highly scalable.
from Queue import Queue, Empty
from SocketServer import StreamRequestHandler, ThreadingTCPServer
from collections import defaultdict
from threading import Lock, Thread
from socket import error
conversations = defaultdict(set)
"""Key is a conversation ID string, value is a set of ConnectionHandlers
that use that ID"""
conversations_lock = Lock()
"""Lock held while accessing conversations"""
def print_conversation(conversation_id, handlers):
"""Prints a line about which handlers are attached to a conversation"""
handler_addrs = sorted(['%s:%d' % h.client_address for h in handlers])
handlers_str = '(%s)' % (', '.join(handler_addrs))
print('conversation "%s" -> %s' % (conversation_id, handlers_str))
class ConversationConnectionHandler(StreamRequestHandler):
"""Handles TCP connections with the conversation protocol"""
def __init__(self, request, client_address, server):
self._conversation_id = None
self._messages = Queue()
# This is a blocking call, so we have to set our fields before calling
StreamRequestHandler.__init__(self, request, client_address, server)
def setup(self):
print('connect %s:%d' % self.client_address)
StreamRequestHandler.setup(self)
def finish(self):
StreamRequestHandler.finish(self)
# Remove this handler from the conversations dict
print('close %s:%d' % self.client_address)
if self._conversation_id is not None:
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.remove(self)
if not handlers:
del conversations[self._conversation_id]
print_conversation(self._conversation_id, handlers)
self._conversation_id = None
def handle(self):
# The first line is the conversation ID
line = self.rfile.readline()
if not line:
# Client disconnected or declined or sent an empty line
return
self._conversation_id = line.strip()
# Register this handler instance for the conversation ID
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.add(self)
print_conversation(self._conversation_id, handlers)
|
line = self.rfile.readline()
except error:
# Client disconnected or other socket error
break
if not line:
# Client disconnected
break
# Send the message to each connected client
with conversations_lock:
for handler in conversations[self._conversation_id]:
if handler is not self:
handler._messages.put(line)
def _write_handler(self):
while not self.rfile.closed and not self.wfile.closed:
try:
# Get the next message we should write from the queue.
# A short timeout lets us detect a closed socket (otherwise
# the thread would only discover it on the next attempt
# to write a message).
message = self._messages.get(block=True, timeout=1)
try:
self.wfile.write(message)
self.wfile.flush()
except error:
# The connection probably dropped; end the handler
break
except Empty:
# Queue was empty at timeout; just keep going
pass
class TCPReuseAddrServer(ThreadingTCPServer):
"""Extends ThreadingTCPServer to enable address reuse"""
allow_reuse_address = True
def main():
host = '0.0.0.0'
port = 54321
listen_addr = (host, port)
server = TCPReuseAddrServer(listen_addr, ConversationConnectionHandler)
print('listening on %s:%d' % listen_addr)
server.serve_forever()
if __name__ == '__main__':
main()
|
# Spawn another thread to handle writes
Thread(target=self._write_handler).start()
while True:
try:
|
random_line_split
|
esprelay.py
|
#!/usr/bin/env python
#
# Simple TCP line-based text chat server. Clients connect and send a
# conversation ID (any string) as the first line, then they are connected
# to other clients who sent the same conversation ID.
#
# Uses a thread pair per connection, so not highly scalable.
from Queue import Queue, Empty
from SocketServer import StreamRequestHandler, ThreadingTCPServer
from collections import defaultdict
from threading import Lock, Thread
from socket import error
conversations = defaultdict(set)
"""Key is a conversation ID string, value is a set of ConnectionHandlers
that use that ID"""
conversations_lock = Lock()
"""Lock held while accessing conversations"""
def print_conversation(conversation_id, handlers):
"""Prints a line about which handlers are attached to a conversation"""
handler_addrs = sorted(['%s:%d' % h.client_address for h in handlers])
handlers_str = '(%s)' % (', '.join(handler_addrs))
print('conversation "%s" -> %s' % (conversation_id, handlers_str))
class ConversationConnectionHandler(StreamRequestHandler):
"""Handles TCP connections with the conversation protocol"""
def __init__(self, request, client_address, server):
self._conversation_id = None
self._messages = Queue()
# This is a blocking call, so we have to set our fields before calling
StreamRequestHandler.__init__(self, request, client_address, server)
def setup(self):
print('connect %s:%d' % self.client_address)
StreamRequestHandler.setup(self)
def finish(self):
StreamRequestHandler.finish(self)
# Remove this handler from the conversations dict
print('close %s:%d' % self.client_address)
if self._conversation_id is not None:
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.remove(self)
if not handlers:
del conversations[self._conversation_id]
print_conversation(self._conversation_id, handlers)
self._conversation_id = None
def handle(self):
# The first line is the conversation ID
line = self.rfile.readline()
if not line:
# Client disconnected or declined or sent an empty line
|
self._conversation_id = line.strip()
# Register this handler instance for the conversation ID
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.add(self)
print_conversation(self._conversation_id, handlers)
# Spawn another thread to handle writes
Thread(target=self._write_handler).start()
while True:
try:
line = self.rfile.readline()
except error:
# Client disconnected or other socket error
break
if not line:
# Client disconnected
break
# Send the message to each connected client
with conversations_lock:
for handler in conversations[self._conversation_id]:
if handler is not self:
handler._messages.put(line)
def _write_handler(self):
while not self.rfile.closed and not self.wfile.closed:
try:
# Get the next message we should write from the queue.
# A short timeout lets us detect a closed socket (otherwise
# the thread would only discover it on the next attempt
# to write a message).
message = self._messages.get(block=True, timeout=1)
try:
self.wfile.write(message)
self.wfile.flush()
except error:
# The connection probably dropped; end the handler
break
except Empty:
# Queue was empty at timeout; just keep going
pass
class TCPReuseAddrServer(ThreadingTCPServer):
"""Extends ThreadingTCPServer to enable address reuse"""
allow_reuse_address = True
def main():
host = '0.0.0.0'
port = 54321
listen_addr = (host, port)
server = TCPReuseAddrServer(listen_addr, ConversationConnectionHandler)
print('listening on %s:%d' % listen_addr)
server.serve_forever()
if __name__ == '__main__':
main()
|
return
|
conditional_block
|
esprelay.py
|
#!/usr/bin/env python
#
# Simple TCP line-based text chat server. Clients connect and send a
# conversation ID (any string) as the first line, then they are connected
# to other clients who sent the same conversation ID.
#
# Uses a thread pair per connection, so not highly scalable.
from Queue import Queue, Empty
from SocketServer import StreamRequestHandler, ThreadingTCPServer
from collections import defaultdict
from threading import Lock, Thread
from socket import error
conversations = defaultdict(set)
"""Key is a conversation ID string, value is a set of ConnectionHandlers
that use that ID"""
conversations_lock = Lock()
"""Lock held while accessing conversations"""
def print_conversation(conversation_id, handlers):
"""Prints a line about which handlers are attached to a conversation"""
handler_addrs = sorted(['%s:%d' % h.client_address for h in handlers])
handlers_str = '(%s)' % (', '.join(handler_addrs))
print('conversation "%s" -> %s' % (conversation_id, handlers_str))
class ConversationConnectionHandler(StreamRequestHandler):
"""Handles TCP connections with the conversation protocol"""
def
|
(self, request, client_address, server):
self._conversation_id = None
self._messages = Queue()
# This is a blocking call, so we have to set our fields before calling
StreamRequestHandler.__init__(self, request, client_address, server)
def setup(self):
print('connect %s:%d' % self.client_address)
StreamRequestHandler.setup(self)
def finish(self):
StreamRequestHandler.finish(self)
# Remove this handler from the conversations dict
print('close %s:%d' % self.client_address)
if self._conversation_id is not None:
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.remove(self)
if not handlers:
del conversations[self._conversation_id]
print_conversation(self._conversation_id, handlers)
self._conversation_id = None
def handle(self):
# The first line is the conversation ID
line = self.rfile.readline()
if not line:
# Client disconnected or declined or sent an empty line
return
self._conversation_id = line.strip()
# Register this handler instance for the conversation ID
with conversations_lock:
handlers = conversations[self._conversation_id]
handlers.add(self)
print_conversation(self._conversation_id, handlers)
# Spawn another thread to handle writes
Thread(target=self._write_handler).start()
while True:
try:
line = self.rfile.readline()
except error:
# Client disconnected or other socket error
break
if not line:
# Client disconnected
break
# Send the message to each connected client
with conversations_lock:
for handler in conversations[self._conversation_id]:
if handler is not self:
handler._messages.put(line)
def _write_handler(self):
while not self.rfile.closed and not self.wfile.closed:
try:
# Get the next message we should write from the queue.
# A short timeout lets us detect a closed socket (otherwise
# the thread would only discover it on the next attempt
# to write a message).
message = self._messages.get(block=True, timeout=1)
try:
self.wfile.write(message)
self.wfile.flush()
except error:
# The connection probably dropped; end the handler
break
except Empty:
# Queue was empty at timeout; just keep going
pass
class TCPReuseAddrServer(ThreadingTCPServer):
"""Extends ThreadingTCPServer to enable address reuse"""
allow_reuse_address = True
def main():
host = '0.0.0.0'
port = 54321
listen_addr = (host, port)
server = TCPReuseAddrServer(listen_addr, ConversationConnectionHandler)
print('listening on %s:%d' % listen_addr)
server.serve_forever()
if __name__ == '__main__':
main()
|
__init__
|
identifier_name
|
account.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Single account in the system.
use util::*;
use pod_account::*;
use rlp::*;
use lru_cache::LruCache;
use basic_account::BasicAccount;
use std::cell::{RefCell, Cell};
const STORAGE_CACHE_ITEMS: usize = 8192;
/// Single account in the system.
/// Keeps track of changes to the code and storage.
/// The changes are applied in `commit_storage` and `commit_code`
pub struct Account {
// Balance of the account.
balance: U256,
// Nonce of the account.
nonce: U256,
// Trie-backed storage.
storage_root: H256,
// LRU Cache of the trie-backed storage.
// This is limited to `STORAGE_CACHE_ITEMS` recent queries
storage_cache: RefCell<LruCache<H256, H256>>,
// Modified storage. Accumulates changes to storage made in `set_storage`
// Takes precedence over `storage_cache`.
storage_changes: HashMap<H256, H256>,
// Code hash of the account.
code_hash: H256,
// Size of the accoun code.
code_size: Option<usize>,
// Code cache of the account.
code_cache: Arc<Bytes>,
// Account code new or has been modified.
code_filth: Filth,
// Cached address hash.
address_hash: Cell<Option<H256>>,
}
impl From<BasicAccount> for Account {
fn from(basic: BasicAccount) -> Self {
Account {
balance: basic.balance,
nonce: basic.nonce,
storage_root: basic.storage_root,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: basic.code_hash,
code_size: None,
code_cache: Arc::new(vec![]),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
}
impl Account {
#[cfg(test)]
/// General constructor.
pub fn new(balance: U256, nonce: U256, storage: HashMap<H256, H256>, code: Bytes) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: storage,
code_hash: code.sha3(),
code_size: Some(code.len()),
code_cache: Arc::new(code),
code_filth: Filth::Dirty,
address_hash: Cell::new(None),
}
}
fn empty_storage_cache() -> RefCell<LruCache<H256, H256>> {
RefCell::new(LruCache::new(STORAGE_CACHE_ITEMS))
}
/// General constructor.
pub fn from_pod(pod: PodAccount) -> Account {
Account {
balance: pod.balance,
nonce: pod.nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: pod.storage.into_iter().collect(),
code_hash: pod.code.as_ref().map_or(SHA3_EMPTY, |c| c.sha3()),
code_filth: Filth::Dirty,
code_size: Some(pod.code.as_ref().map_or(0, |c| c.len())),
code_cache: Arc::new(pod.code.map_or_else(|| { warn!("POD account with unknown code is being created! Assuming no code."); vec![] }, |c| c)),
address_hash: Cell::new(None),
}
}
/// Create a new account with the given balance.
pub fn new_basic(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: Some(0),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Create a new account from RLP.
pub fn from_rlp(rlp: &[u8]) -> Account {
let basic: BasicAccount = ::rlp::decode(rlp);
basic.into()
}
/// Create a new contract account.
/// NOTE: make sure you use `init_code` on this before `commit`ing.
pub fn new_contract(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: None,
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Set this account's code to the given code.
/// NOTE: Account should have been created with `new_contract()`
pub fn init_code(&mut self, code: Bytes) {
self.code_hash = code.sha3();
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Dirty;
}
/// Reset this account's code to the given code.
pub fn reset_code(&mut self, code: Bytes) {
self.init_code(code);
}
/// Set (and cache) the contents of the trie's storage at `key` to `value`.
pub fn set_storage(&mut self, key: H256, value: H256) {
self.storage_changes.insert(key, value);
}
/// Get (and cache) the contents of the trie's storage at `key`.
/// Takes modifed storage into account.
pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
if let Some(value) = self.cached_storage_at(key) {
return value;
}
let db = SecTrieDB::new(db, &self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
let item: U256 = match db.get_with(key, ::rlp::decode) {
Ok(x) => x.unwrap_or_else(U256::zero),
Err(e) => panic!("Encountered potential DB corruption: {}", e),
};
let value: H256 = item.into();
self.storage_cache.borrow_mut().insert(key.clone(), value.clone());
value
}
/// Get cached storage value if any. Returns `None` if the
/// key is not in the cache.
pub fn cached_storage_at(&self, key: &H256) -> Option<H256> {
if let Some(value) = self.storage_changes.get(key) {
return Some(value.clone())
}
if let Some(value) = self.storage_cache.borrow_mut().get_mut(key) {
return Some(value.clone())
}
None
}
/// return the balance associated with this account.
pub fn balance(&self) -> &U256 { &self.balance }
/// return the nonce associated with this account.
pub fn nonce(&self) -> &U256 { &self.nonce }
/// return the code hash associated with this account.
pub fn code_hash(&self) -> H256 {
self.code_hash.clone()
}
/// return the code hash associated with this account.
pub fn address_hash(&self, address: &Address) -> H256 {
let hash = self.address_hash.get();
hash.unwrap_or_else(|| {
let hash = address.sha3();
self.address_hash.set(Some(hash.clone()));
hash
})
}
/// returns the account's code. If `None` then the code cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code(&self) -> Option<Arc<Bytes>> {
if self.code_hash != SHA3_EMPTY && self.code_cache.is_empty() {
return None;
}
Some(self.code_cache.clone())
}
/// returns the account's code size. If `None` then the code cache or code size cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code_size(&self) -> Option<usize> {
self.code_size.clone()
}
#[cfg(test)]
/// Provide a byte array which hashes to the `code_hash`. returns the hash as a result.
pub fn note_code(&mut self, code: Bytes) -> Result<(), H256> {
let h = code.sha3();
if self.code_hash == h {
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
Ok(())
} else {
Err(h)
}
}
/// Is `code_cache` valid; such that code is going to return Some?
pub fn is_cached(&self) -> bool {
!self.code_cache.is_empty() || (self.code_cache.is_empty() && self.code_hash == SHA3_EMPTY)
}
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
// TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
if self.is_cached() { return Some(self.code_cache.clone()) }
match db.get(&self.code_hash) {
Some(x) => {
self.code_size = Some(x.len());
self.code_cache = Arc::new(x.to_vec());
Some(self.code_cache.clone())
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
None
},
}
}
/// Provide code to cache. For correctness, should be the correct code for the
/// account.
pub fn cache_given_code(&mut self, code: Arc<Bytes>) {
trace!("Account::cache_given_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size = Some(code.len());
self.code_cache = code;
}
/// Provide a database to get `code_size`. Should not be called if it is a contract without code.
pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache;
trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size.is_some() ||
if self.code_hash != SHA3_EMPTY {
match db.get(&self.code_hash) {
Some(x) => {
self.code_size = Some(x.len());
true
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
false
},
}
} else {
false
}
}
/// Determine whether there are any un-`commit()`-ed storage-setting operations.
pub fn storage_is_clean(&self) -> bool { self.storage_changes.is_empty() }
/// Check if account has zero nonce, balance, no code and no storage.
///
/// NOTE: Will panic if `!self.storage_is_clean()`
pub fn is_empty(&self) -> bool {
assert!(self.storage_is_clean(), "Account::is_empty() may only legally be called when storage is clean.");
self.is_null() && self.storage_root == SHA3_NULL_RLP
}
/// Check if account has zero nonce, balance, no code.
pub fn is_null(&self) -> bool {
self.balance.is_zero() &&
self.nonce.is_zero() &&
self.code_hash == SHA3_EMPTY
}
/// Return the storage root associated with this account or None if it has been altered via the overlay.
pub fn storage_root(&self) -> Option<&H256> { if self.storage_is_clean() {Some(&self.storage_root)} else {None} }
/// Return the storage overlay.
pub fn storage_changes(&self) -> &HashMap<H256, H256> { &self.storage_changes }
/// Increment the nonce of the account by one.
pub fn inc_nonce(&mut self) {
self.nonce = self.nonce + U256::from(1u8);
}
/// Increase account balance.
pub fn add_balance(&mut self, x: &U256) {
self.balance = self.balance + *x;
}
/// Decrease account balance.
/// Panics if balance is less than `x`
pub fn sub_balance(&mut self, x: &U256) {
assert!(self.balance >= *x);
self.balance = self.balance - *x;
}
/// Commit the `storage_changes` to the backing DB and update `storage_root`.
pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut HashDB) {
let mut t = trie_factory.from_existing(db, &mut self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
for (k, v) in self.storage_changes.drain() {
// cast key and value to trait type,
// so we can call overloaded `to_bytes` method
let res = match v.is_zero() {
true => t.remove(&k),
false => t.insert(&k, &encode(&U256::from(&*v))),
};
if let Err(e) = res {
warn!("Encountered potential DB corruption: {}", e);
}
self.storage_cache.borrow_mut().insert(k, v);
}
}
/// Commit any unsaved code. `code_hash` will always return the hash of the `code_cache` after this.
pub fn commit_code(&mut self, db: &mut HashDB) {
trace!("Commiting code of {:?} - {:?}, {:?}", self, self.code_filth == Filth::Dirty, self.code_cache.is_empty());
match (self.code_filth == Filth::Dirty, self.code_cache.is_empty()) {
(true, true) => {
self.code_size = Some(0);
self.code_filth = Filth::Clean;
},
(true, false) => {
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache));
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Clean;
},
(false, _) => {},
}
}
/// Export to RLP.
pub fn rlp(&self) -> Bytes {
let mut stream = RlpStream::new_list(4);
stream.append(&self.nonce);
stream.append(&self.balance);
stream.append(&self.storage_root);
stream.append(&self.code_hash);
stream.out()
}
/// Clone basic account data
pub fn
|
(&self) -> Account {
Account {
balance: self.balance.clone(),
nonce: self.nonce.clone(),
storage_root: self.storage_root.clone(),
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: self.code_hash.clone(),
code_size: self.code_size.clone(),
code_cache: self.code_cache.clone(),
code_filth: self.code_filth,
address_hash: self.address_hash.clone(),
}
}
/// Clone account data and dirty storage keys
pub fn clone_dirty(&self) -> Account {
let mut account = self.clone_basic();
account.storage_changes = self.storage_changes.clone();
account.code_cache = self.code_cache.clone();
account
}
/// Clone account data, dirty storage keys and cached storage keys.
pub fn clone_all(&self) -> Account {
let mut account = self.clone_dirty();
account.storage_cache = self.storage_cache.clone();
account
}
/// Replace self with the data from other account merging storage cache.
/// Basic account data and all modifications are overwritten
/// with new values.
pub fn overwrite_with(&mut self, other: Account) {
self.balance = other.balance;
self.nonce = other.nonce;
self.storage_root = other.storage_root;
self.code_hash = other.code_hash;
self.code_filth = other.code_filth;
self.code_cache = other.code_cache;
self.code_size = other.code_size;
self.address_hash = other.address_hash;
let mut cache = self.storage_cache.borrow_mut();
for (k, v) in other.storage_cache.into_inner() {
cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here
}
self.storage_changes = other.storage_changes;
}
}
// light client storage proof.
impl Account {
/// Prove a storage key's existence or nonexistence in the account's storage
/// trie.
/// `storage_key` is the hash of the desired storage key, meaning
/// this will only work correctly under a secure trie.
/// Returns a merkle proof of the storage trie node with all nodes before `from_level`
/// omitted.
pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> {
use util::trie::{Trie, TrieDB};
use util::trie::recorder::Recorder;
let mut recorder = Recorder::with_depth(from_level);
let trie = TrieDB::new(db, &self.storage_root)?;
let _ = trie.get_with(&storage_key, &mut recorder)?;
Ok(recorder.drain().into_iter().map(|r| r.data).collect())
}
}
impl fmt::Debug for Account {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", PodAccount::from_account(self))
}
}
#[cfg(test)]
mod tests {
use rlp::{UntrustedRlp, RlpType, View, Compressible};
use util::*;
use super::*;
use account_db::*;
#[test]
fn account_compress() {
let raw = Account::new_basic(2.into(), 4.into()).rlp();
let rlp = UntrustedRlp::new(&raw);
let compact_vec = rlp.compress(RlpType::Snapshot).to_vec();
assert!(raw.len() > compact_vec.len());
let again_raw = UntrustedRlp::new(&compact_vec).decompress(RlpType::Snapshot);
assert_eq!(raw, again_raw.to_vec());
}
#[test]
fn storage_at() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
let mut a = Account::new_contract(69.into(), 0.into());
a.set_storage(H256::from(&U256::from(0x00u64)), H256::from(&U256::from(0x1234u64)));
a.commit_storage(&Default::default(), &mut db);
a.init_code(vec![]);
a.commit_code(&mut db);
a.rlp()
};
let a = Account::from_rlp(&rlp);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64)));
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x01u64))), H256::new());
}
#[test]
fn note_code() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
let mut a = Account::new_contract(69.into(), 0.into());
a.init_code(vec![0x55, 0x44, 0xffu8]);
a.commit_code(&mut db);
a.rlp()
};
let mut a = Account::from_rlp(&rlp);
assert!(a.cache_code(&db.immutable()).is_some());
let mut a = Account::from_rlp(&rlp);
assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(()));
}
#[test]
fn commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
assert_eq!(a.storage_root(), None);
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_remove_commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0.into());
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
assert_eq!(a.code_size(), Some(3));
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
}
#[test]
fn reset_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_filth, Filth::Clean);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
a.reset_code(vec![0x55]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "37bf2238b11b68cdc8382cece82651b59d3c3988873b6e0f33d79694aa45f1be");
}
#[test]
fn rlpio() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
let b = Account::from_rlp(&a.rlp());
assert_eq!(a.balance(), b.balance());
assert_eq!(a.nonce(), b.nonce());
assert_eq!(a.code_hash(), b.code_hash());
assert_eq!(a.storage_root(), b.storage_root());
}
#[test]
fn new_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
assert_eq!(a.balance(), &U256::from(69u8));
assert_eq!(a.nonce(), &U256::from(0u8));
assert_eq!(a.code_hash(), SHA3_EMPTY);
assert_eq!(a.storage_root().unwrap(), &SHA3_NULL_RLP);
}
#[test]
fn create_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
}
}
|
clone_basic
|
identifier_name
|
account.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Single account in the system.
use util::*;
use pod_account::*;
use rlp::*;
use lru_cache::LruCache;
use basic_account::BasicAccount;
use std::cell::{RefCell, Cell};
const STORAGE_CACHE_ITEMS: usize = 8192;
/// Single account in the system.
/// Keeps track of changes to the code and storage.
/// The changes are applied in `commit_storage` and `commit_code`
pub struct Account {
// Balance of the account.
balance: U256,
// Nonce of the account.
nonce: U256,
// Trie-backed storage.
storage_root: H256,
// LRU Cache of the trie-backed storage.
// This is limited to `STORAGE_CACHE_ITEMS` recent queries
storage_cache: RefCell<LruCache<H256, H256>>,
// Modified storage. Accumulates changes to storage made in `set_storage`
// Takes precedence over `storage_cache`.
storage_changes: HashMap<H256, H256>,
// Code hash of the account.
code_hash: H256,
// Size of the accoun code.
code_size: Option<usize>,
// Code cache of the account.
code_cache: Arc<Bytes>,
// Account code new or has been modified.
code_filth: Filth,
// Cached address hash.
address_hash: Cell<Option<H256>>,
}
impl From<BasicAccount> for Account {
fn from(basic: BasicAccount) -> Self {
Account {
balance: basic.balance,
nonce: basic.nonce,
storage_root: basic.storage_root,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: basic.code_hash,
code_size: None,
code_cache: Arc::new(vec![]),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
}
impl Account {
#[cfg(test)]
/// General constructor.
pub fn new(balance: U256, nonce: U256, storage: HashMap<H256, H256>, code: Bytes) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: storage,
code_hash: code.sha3(),
code_size: Some(code.len()),
code_cache: Arc::new(code),
code_filth: Filth::Dirty,
address_hash: Cell::new(None),
}
}
fn empty_storage_cache() -> RefCell<LruCache<H256, H256>> {
RefCell::new(LruCache::new(STORAGE_CACHE_ITEMS))
}
/// General constructor.
pub fn from_pod(pod: PodAccount) -> Account {
Account {
balance: pod.balance,
nonce: pod.nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: pod.storage.into_iter().collect(),
code_hash: pod.code.as_ref().map_or(SHA3_EMPTY, |c| c.sha3()),
code_filth: Filth::Dirty,
code_size: Some(pod.code.as_ref().map_or(0, |c| c.len())),
code_cache: Arc::new(pod.code.map_or_else(|| { warn!("POD account with unknown code is being created! Assuming no code."); vec![] }, |c| c)),
address_hash: Cell::new(None),
}
}
/// Create a new account with the given balance.
pub fn new_basic(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: Some(0),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Create a new account from RLP.
pub fn from_rlp(rlp: &[u8]) -> Account {
let basic: BasicAccount = ::rlp::decode(rlp);
basic.into()
}
/// Create a new contract account.
/// NOTE: make sure you use `init_code` on this before `commit`ing.
pub fn new_contract(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: None,
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Set this account's code to the given code.
/// NOTE: Account should have been created with `new_contract()`
pub fn init_code(&mut self, code: Bytes) {
self.code_hash = code.sha3();
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Dirty;
}
/// Reset this account's code to the given code.
pub fn reset_code(&mut self, code: Bytes) {
self.init_code(code);
}
/// Set (and cache) the contents of the trie's storage at `key` to `value`.
pub fn set_storage(&mut self, key: H256, value: H256) {
self.storage_changes.insert(key, value);
}
/// Get (and cache) the contents of the trie's storage at `key`.
/// Takes modifed storage into account.
pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
if let Some(value) = self.cached_storage_at(key) {
return value;
}
let db = SecTrieDB::new(db, &self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
let item: U256 = match db.get_with(key, ::rlp::decode) {
Ok(x) => x.unwrap_or_else(U256::zero),
Err(e) => panic!("Encountered potential DB corruption: {}", e),
};
let value: H256 = item.into();
self.storage_cache.borrow_mut().insert(key.clone(), value.clone());
value
}
/// Get cached storage value if any. Returns `None` if the
/// key is not in the cache.
pub fn cached_storage_at(&self, key: &H256) -> Option<H256> {
if let Some(value) = self.storage_changes.get(key) {
return Some(value.clone())
}
if let Some(value) = self.storage_cache.borrow_mut().get_mut(key) {
return Some(value.clone())
}
None
}
/// return the balance associated with this account.
pub fn balance(&self) -> &U256 { &self.balance }
/// return the nonce associated with this account.
pub fn nonce(&self) -> &U256 { &self.nonce }
/// return the code hash associated with this account.
pub fn code_hash(&self) -> H256 {
self.code_hash.clone()
}
/// return the code hash associated with this account.
pub fn address_hash(&self, address: &Address) -> H256 {
let hash = self.address_hash.get();
hash.unwrap_or_else(|| {
let hash = address.sha3();
self.address_hash.set(Some(hash.clone()));
hash
})
}
/// returns the account's code. If `None` then the code cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code(&self) -> Option<Arc<Bytes>> {
if self.code_hash != SHA3_EMPTY && self.code_cache.is_empty() {
return None;
}
Some(self.code_cache.clone())
}
/// returns the account's code size. If `None` then the code cache or code size cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code_size(&self) -> Option<usize> {
self.code_size.clone()
}
#[cfg(test)]
/// Provide a byte array which hashes to the `code_hash`. returns the hash as a result.
pub fn note_code(&mut self, code: Bytes) -> Result<(), H256> {
let h = code.sha3();
if self.code_hash == h {
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
Ok(())
} else {
Err(h)
}
}
/// Is `code_cache` valid; such that code is going to return Some?
pub fn is_cached(&self) -> bool {
!self.code_cache.is_empty() || (self.code_cache.is_empty() && self.code_hash == SHA3_EMPTY)
}
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
// TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
if self.is_cached() { return Some(self.code_cache.clone()) }
match db.get(&self.code_hash) {
Some(x) => {
self.code_size = Some(x.len());
self.code_cache = Arc::new(x.to_vec());
Some(self.code_cache.clone())
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
None
},
}
}
/// Provide code to cache. For correctness, should be the correct code for the
/// account.
pub fn cache_given_code(&mut self, code: Arc<Bytes>) {
trace!("Account::cache_given_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size = Some(code.len());
self.code_cache = code;
}
/// Provide a database to get `code_size`. Should not be called if it is a contract without code.
pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache;
trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size.is_some() ||
if self.code_hash != SHA3_EMPTY {
match db.get(&self.code_hash) {
Some(x) => {
self.code_size = Some(x.len());
true
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
false
},
}
} else {
false
}
}
/// Determine whether there are any un-`commit()`-ed storage-setting operations.
pub fn storage_is_clean(&self) -> bool { self.storage_changes.is_empty() }
/// Check if account has zero nonce, balance, no code and no storage.
///
/// NOTE: Will panic if `!self.storage_is_clean()`
pub fn is_empty(&self) -> bool {
assert!(self.storage_is_clean(), "Account::is_empty() may only legally be called when storage is clean.");
self.is_null() && self.storage_root == SHA3_NULL_RLP
}
/// Check if account has zero nonce, balance, no code.
pub fn is_null(&self) -> bool {
self.balance.is_zero() &&
self.nonce.is_zero() &&
self.code_hash == SHA3_EMPTY
}
/// Return the storage root associated with this account or None if it has been altered via the overlay.
pub fn storage_root(&self) -> Option<&H256> { if self.storage_is_clean() {Some(&self.storage_root)} else {None} }
/// Return the storage overlay.
pub fn storage_changes(&self) -> &HashMap<H256, H256> { &self.storage_changes }
/// Increment the nonce of the account by one.
pub fn inc_nonce(&mut self)
|
/// Increase account balance.
pub fn add_balance(&mut self, x: &U256) {
self.balance = self.balance + *x;
}
/// Decrease account balance.
/// Panics if balance is less than `x`
pub fn sub_balance(&mut self, x: &U256) {
assert!(self.balance >= *x);
self.balance = self.balance - *x;
}
/// Commit the `storage_changes` to the backing DB and update `storage_root`.
pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut HashDB) {
let mut t = trie_factory.from_existing(db, &mut self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
for (k, v) in self.storage_changes.drain() {
// cast key and value to trait type,
// so we can call overloaded `to_bytes` method
let res = match v.is_zero() {
true => t.remove(&k),
false => t.insert(&k, &encode(&U256::from(&*v))),
};
if let Err(e) = res {
warn!("Encountered potential DB corruption: {}", e);
}
self.storage_cache.borrow_mut().insert(k, v);
}
}
/// Commit any unsaved code. `code_hash` will always return the hash of the `code_cache` after this.
pub fn commit_code(&mut self, db: &mut HashDB) {
trace!("Commiting code of {:?} - {:?}, {:?}", self, self.code_filth == Filth::Dirty, self.code_cache.is_empty());
match (self.code_filth == Filth::Dirty, self.code_cache.is_empty()) {
(true, true) => {
self.code_size = Some(0);
self.code_filth = Filth::Clean;
},
(true, false) => {
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache));
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Clean;
},
(false, _) => {},
}
}
/// Export to RLP.
pub fn rlp(&self) -> Bytes {
let mut stream = RlpStream::new_list(4);
stream.append(&self.nonce);
stream.append(&self.balance);
stream.append(&self.storage_root);
stream.append(&self.code_hash);
stream.out()
}
/// Clone basic account data
pub fn clone_basic(&self) -> Account {
Account {
balance: self.balance.clone(),
nonce: self.nonce.clone(),
storage_root: self.storage_root.clone(),
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: self.code_hash.clone(),
code_size: self.code_size.clone(),
code_cache: self.code_cache.clone(),
code_filth: self.code_filth,
address_hash: self.address_hash.clone(),
}
}
/// Clone account data and dirty storage keys
pub fn clone_dirty(&self) -> Account {
let mut account = self.clone_basic();
account.storage_changes = self.storage_changes.clone();
account.code_cache = self.code_cache.clone();
account
}
/// Clone account data, dirty storage keys and cached storage keys.
pub fn clone_all(&self) -> Account {
let mut account = self.clone_dirty();
account.storage_cache = self.storage_cache.clone();
account
}
/// Replace self with the data from other account merging storage cache.
/// Basic account data and all modifications are overwritten
/// with new values.
pub fn overwrite_with(&mut self, other: Account) {
self.balance = other.balance;
self.nonce = other.nonce;
self.storage_root = other.storage_root;
self.code_hash = other.code_hash;
self.code_filth = other.code_filth;
self.code_cache = other.code_cache;
self.code_size = other.code_size;
self.address_hash = other.address_hash;
let mut cache = self.storage_cache.borrow_mut();
for (k, v) in other.storage_cache.into_inner() {
cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here
}
self.storage_changes = other.storage_changes;
}
}
// light client storage proof.
impl Account {
/// Prove a storage key's existence or nonexistence in the account's storage
/// trie.
/// `storage_key` is the hash of the desired storage key, meaning
/// this will only work correctly under a secure trie.
/// Returns a merkle proof of the storage trie node with all nodes before `from_level`
/// omitted.
pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> {
use util::trie::{Trie, TrieDB};
use util::trie::recorder::Recorder;
let mut recorder = Recorder::with_depth(from_level);
let trie = TrieDB::new(db, &self.storage_root)?;
let _ = trie.get_with(&storage_key, &mut recorder)?;
Ok(recorder.drain().into_iter().map(|r| r.data).collect())
}
}
impl fmt::Debug for Account {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", PodAccount::from_account(self))
}
}
#[cfg(test)]
mod tests {
use rlp::{UntrustedRlp, RlpType, View, Compressible};
use util::*;
use super::*;
use account_db::*;
#[test]
fn account_compress() {
let raw = Account::new_basic(2.into(), 4.into()).rlp();
let rlp = UntrustedRlp::new(&raw);
let compact_vec = rlp.compress(RlpType::Snapshot).to_vec();
assert!(raw.len() > compact_vec.len());
let again_raw = UntrustedRlp::new(&compact_vec).decompress(RlpType::Snapshot);
assert_eq!(raw, again_raw.to_vec());
}
#[test]
fn storage_at() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
let mut a = Account::new_contract(69.into(), 0.into());
a.set_storage(H256::from(&U256::from(0x00u64)), H256::from(&U256::from(0x1234u64)));
a.commit_storage(&Default::default(), &mut db);
a.init_code(vec![]);
a.commit_code(&mut db);
a.rlp()
};
let a = Account::from_rlp(&rlp);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64)));
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x01u64))), H256::new());
}
#[test]
fn note_code() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
let mut a = Account::new_contract(69.into(), 0.into());
a.init_code(vec![0x55, 0x44, 0xffu8]);
a.commit_code(&mut db);
a.rlp()
};
let mut a = Account::from_rlp(&rlp);
assert!(a.cache_code(&db.immutable()).is_some());
let mut a = Account::from_rlp(&rlp);
assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(()));
}
#[test]
fn commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
assert_eq!(a.storage_root(), None);
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_remove_commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0.into());
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
assert_eq!(a.code_size(), Some(3));
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
}
#[test]
fn reset_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_filth, Filth::Clean);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
a.reset_code(vec![0x55]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "37bf2238b11b68cdc8382cece82651b59d3c3988873b6e0f33d79694aa45f1be");
}
#[test]
fn rlpio() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
let b = Account::from_rlp(&a.rlp());
assert_eq!(a.balance(), b.balance());
assert_eq!(a.nonce(), b.nonce());
assert_eq!(a.code_hash(), b.code_hash());
assert_eq!(a.storage_root(), b.storage_root());
}
#[test]
fn new_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
assert_eq!(a.balance(), &U256::from(69u8));
assert_eq!(a.nonce(), &U256::from(0u8));
assert_eq!(a.code_hash(), SHA3_EMPTY);
assert_eq!(a.storage_root().unwrap(), &SHA3_NULL_RLP);
}
#[test]
fn create_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
}
}
|
{
self.nonce = self.nonce + U256::from(1u8);
}
|
identifier_body
|
account.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Single account in the system.
use util::*;
use pod_account::*;
use rlp::*;
use lru_cache::LruCache;
use basic_account::BasicAccount;
use std::cell::{RefCell, Cell};
const STORAGE_CACHE_ITEMS: usize = 8192;
/// Single account in the system.
/// Keeps track of changes to the code and storage.
/// The changes are applied in `commit_storage` and `commit_code`
pub struct Account {
// Balance of the account.
balance: U256,
// Nonce of the account.
nonce: U256,
// Trie-backed storage.
storage_root: H256,
// LRU Cache of the trie-backed storage.
// This is limited to `STORAGE_CACHE_ITEMS` recent queries
storage_cache: RefCell<LruCache<H256, H256>>,
// Modified storage. Accumulates changes to storage made in `set_storage`
// Takes precedence over `storage_cache`.
storage_changes: HashMap<H256, H256>,
// Code hash of the account.
code_hash: H256,
// Size of the accoun code.
code_size: Option<usize>,
// Code cache of the account.
code_cache: Arc<Bytes>,
// Account code new or has been modified.
code_filth: Filth,
// Cached address hash.
address_hash: Cell<Option<H256>>,
}
impl From<BasicAccount> for Account {
fn from(basic: BasicAccount) -> Self {
Account {
balance: basic.balance,
nonce: basic.nonce,
storage_root: basic.storage_root,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: basic.code_hash,
code_size: None,
code_cache: Arc::new(vec![]),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
}
impl Account {
#[cfg(test)]
/// General constructor.
pub fn new(balance: U256, nonce: U256, storage: HashMap<H256, H256>, code: Bytes) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: storage,
code_hash: code.sha3(),
code_size: Some(code.len()),
code_cache: Arc::new(code),
code_filth: Filth::Dirty,
address_hash: Cell::new(None),
}
}
fn empty_storage_cache() -> RefCell<LruCache<H256, H256>> {
RefCell::new(LruCache::new(STORAGE_CACHE_ITEMS))
}
/// General constructor.
pub fn from_pod(pod: PodAccount) -> Account {
Account {
balance: pod.balance,
nonce: pod.nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: pod.storage.into_iter().collect(),
code_hash: pod.code.as_ref().map_or(SHA3_EMPTY, |c| c.sha3()),
code_filth: Filth::Dirty,
code_size: Some(pod.code.as_ref().map_or(0, |c| c.len())),
code_cache: Arc::new(pod.code.map_or_else(|| { warn!("POD account with unknown code is being created! Assuming no code."); vec![] }, |c| c)),
address_hash: Cell::new(None),
}
}
/// Create a new account with the given balance.
pub fn new_basic(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: Some(0),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Create a new account from RLP.
pub fn from_rlp(rlp: &[u8]) -> Account {
let basic: BasicAccount = ::rlp::decode(rlp);
basic.into()
}
/// Create a new contract account.
/// NOTE: make sure you use `init_code` on this before `commit`ing.
pub fn new_contract(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: None,
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Set this account's code to the given code.
/// NOTE: Account should have been created with `new_contract()`
pub fn init_code(&mut self, code: Bytes) {
self.code_hash = code.sha3();
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Dirty;
}
/// Reset this account's code to the given code.
pub fn reset_code(&mut self, code: Bytes) {
self.init_code(code);
}
/// Set (and cache) the contents of the trie's storage at `key` to `value`.
pub fn set_storage(&mut self, key: H256, value: H256) {
self.storage_changes.insert(key, value);
}
/// Get (and cache) the contents of the trie's storage at `key`.
/// Takes modifed storage into account.
pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
if let Some(value) = self.cached_storage_at(key) {
return value;
}
let db = SecTrieDB::new(db, &self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
let item: U256 = match db.get_with(key, ::rlp::decode) {
Ok(x) => x.unwrap_or_else(U256::zero),
Err(e) => panic!("Encountered potential DB corruption: {}", e),
};
let value: H256 = item.into();
self.storage_cache.borrow_mut().insert(key.clone(), value.clone());
value
}
/// Get cached storage value if any. Returns `None` if the
/// key is not in the cache.
pub fn cached_storage_at(&self, key: &H256) -> Option<H256> {
if let Some(value) = self.storage_changes.get(key) {
return Some(value.clone())
}
if let Some(value) = self.storage_cache.borrow_mut().get_mut(key) {
return Some(value.clone())
}
None
}
/// return the balance associated with this account.
pub fn balance(&self) -> &U256 { &self.balance }
/// return the nonce associated with this account.
pub fn nonce(&self) -> &U256 { &self.nonce }
/// return the code hash associated with this account.
pub fn code_hash(&self) -> H256 {
self.code_hash.clone()
}
/// return the code hash associated with this account.
pub fn address_hash(&self, address: &Address) -> H256 {
let hash = self.address_hash.get();
hash.unwrap_or_else(|| {
let hash = address.sha3();
self.address_hash.set(Some(hash.clone()));
hash
})
}
/// returns the account's code. If `None` then the code cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code(&self) -> Option<Arc<Bytes>> {
if self.code_hash != SHA3_EMPTY && self.code_cache.is_empty() {
return None;
}
Some(self.code_cache.clone())
}
/// returns the account's code size. If `None` then the code cache or code size cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code_size(&self) -> Option<usize> {
self.code_size.clone()
}
#[cfg(test)]
/// Provide a byte array which hashes to the `code_hash`. returns the hash as a result.
pub fn note_code(&mut self, code: Bytes) -> Result<(), H256> {
let h = code.sha3();
if self.code_hash == h {
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
Ok(())
} else {
Err(h)
}
}
/// Is `code_cache` valid; such that code is going to return Some?
pub fn is_cached(&self) -> bool {
!self.code_cache.is_empty() || (self.code_cache.is_empty() && self.code_hash == SHA3_EMPTY)
}
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
// TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
if self.is_cached() { return Some(self.code_cache.clone()) }
match db.get(&self.code_hash) {
Some(x) => {
self.code_size = Some(x.len());
self.code_cache = Arc::new(x.to_vec());
Some(self.code_cache.clone())
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
None
},
}
}
/// Provide code to cache. For correctness, should be the correct code for the
/// account.
pub fn cache_given_code(&mut self, code: Arc<Bytes>) {
trace!("Account::cache_given_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size = Some(code.len());
self.code_cache = code;
}
/// Provide a database to get `code_size`. Should not be called if it is a contract without code.
pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache;
trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size.is_some() ||
if self.code_hash != SHA3_EMPTY {
match db.get(&self.code_hash) {
Some(x) => {
self.code_size = Some(x.len());
true
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
false
},
}
} else {
false
}
}
/// Determine whether there are any un-`commit()`-ed storage-setting operations.
pub fn storage_is_clean(&self) -> bool { self.storage_changes.is_empty() }
/// Check if account has zero nonce, balance, no code and no storage.
///
/// NOTE: Will panic if `!self.storage_is_clean()`
pub fn is_empty(&self) -> bool {
assert!(self.storage_is_clean(), "Account::is_empty() may only legally be called when storage is clean.");
self.is_null() && self.storage_root == SHA3_NULL_RLP
}
/// Check if account has zero nonce, balance, no code.
pub fn is_null(&self) -> bool {
self.balance.is_zero() &&
self.nonce.is_zero() &&
self.code_hash == SHA3_EMPTY
}
/// Return the storage root associated with this account or None if it has been altered via the overlay.
pub fn storage_root(&self) -> Option<&H256> { if self.storage_is_clean() {Some(&self.storage_root)} else {None} }
/// Return the storage overlay.
pub fn storage_changes(&self) -> &HashMap<H256, H256> { &self.storage_changes }
/// Increment the nonce of the account by one.
pub fn inc_nonce(&mut self) {
self.nonce = self.nonce + U256::from(1u8);
}
/// Increase account balance.
pub fn add_balance(&mut self, x: &U256) {
self.balance = self.balance + *x;
}
/// Decrease account balance.
/// Panics if balance is less than `x`
pub fn sub_balance(&mut self, x: &U256) {
assert!(self.balance >= *x);
self.balance = self.balance - *x;
}
/// Commit the `storage_changes` to the backing DB and update `storage_root`.
pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut HashDB) {
let mut t = trie_factory.from_existing(db, &mut self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
for (k, v) in self.storage_changes.drain() {
// cast key and value to trait type,
// so we can call overloaded `to_bytes` method
let res = match v.is_zero() {
true => t.remove(&k),
false => t.insert(&k, &encode(&U256::from(&*v))),
};
if let Err(e) = res {
warn!("Encountered potential DB corruption: {}", e);
}
self.storage_cache.borrow_mut().insert(k, v);
}
}
/// Commit any unsaved code. `code_hash` will always return the hash of the `code_cache` after this.
pub fn commit_code(&mut self, db: &mut HashDB) {
trace!("Commiting code of {:?} - {:?}, {:?}", self, self.code_filth == Filth::Dirty, self.code_cache.is_empty());
match (self.code_filth == Filth::Dirty, self.code_cache.is_empty()) {
(true, true) => {
self.code_size = Some(0);
self.code_filth = Filth::Clean;
},
(true, false) => {
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache));
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Clean;
},
(false, _) => {},
}
}
/// Export to RLP.
pub fn rlp(&self) -> Bytes {
let mut stream = RlpStream::new_list(4);
stream.append(&self.nonce);
stream.append(&self.balance);
stream.append(&self.storage_root);
stream.append(&self.code_hash);
stream.out()
}
/// Clone basic account data
pub fn clone_basic(&self) -> Account {
Account {
balance: self.balance.clone(),
nonce: self.nonce.clone(),
storage_root: self.storage_root.clone(),
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: self.code_hash.clone(),
code_size: self.code_size.clone(),
code_cache: self.code_cache.clone(),
code_filth: self.code_filth,
address_hash: self.address_hash.clone(),
}
}
/// Clone account data and dirty storage keys
pub fn clone_dirty(&self) -> Account {
let mut account = self.clone_basic();
account.storage_changes = self.storage_changes.clone();
account.code_cache = self.code_cache.clone();
account
}
/// Clone account data, dirty storage keys and cached storage keys.
pub fn clone_all(&self) -> Account {
let mut account = self.clone_dirty();
account.storage_cache = self.storage_cache.clone();
account
}
/// Replace self with the data from other account merging storage cache.
/// Basic account data and all modifications are overwritten
/// with new values.
pub fn overwrite_with(&mut self, other: Account) {
self.balance = other.balance;
self.nonce = other.nonce;
self.storage_root = other.storage_root;
self.code_hash = other.code_hash;
self.code_filth = other.code_filth;
self.code_cache = other.code_cache;
self.code_size = other.code_size;
self.address_hash = other.address_hash;
let mut cache = self.storage_cache.borrow_mut();
for (k, v) in other.storage_cache.into_inner() {
cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here
}
self.storage_changes = other.storage_changes;
}
}
// light client storage proof.
impl Account {
/// Prove a storage key's existence or nonexistence in the account's storage
/// trie.
/// `storage_key` is the hash of the desired storage key, meaning
/// this will only work correctly under a secure trie.
/// Returns a merkle proof of the storage trie node with all nodes before `from_level`
/// omitted.
pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> {
use util::trie::{Trie, TrieDB};
use util::trie::recorder::Recorder;
let mut recorder = Recorder::with_depth(from_level);
let trie = TrieDB::new(db, &self.storage_root)?;
let _ = trie.get_with(&storage_key, &mut recorder)?;
Ok(recorder.drain().into_iter().map(|r| r.data).collect())
}
}
impl fmt::Debug for Account {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", PodAccount::from_account(self))
}
}
#[cfg(test)]
mod tests {
use rlp::{UntrustedRlp, RlpType, View, Compressible};
use util::*;
use super::*;
use account_db::*;
#[test]
fn account_compress() {
let raw = Account::new_basic(2.into(), 4.into()).rlp();
let rlp = UntrustedRlp::new(&raw);
let compact_vec = rlp.compress(RlpType::Snapshot).to_vec();
assert!(raw.len() > compact_vec.len());
let again_raw = UntrustedRlp::new(&compact_vec).decompress(RlpType::Snapshot);
assert_eq!(raw, again_raw.to_vec());
}
#[test]
|
let mut a = Account::new_contract(69.into(), 0.into());
a.set_storage(H256::from(&U256::from(0x00u64)), H256::from(&U256::from(0x1234u64)));
a.commit_storage(&Default::default(), &mut db);
a.init_code(vec![]);
a.commit_code(&mut db);
a.rlp()
};
let a = Account::from_rlp(&rlp);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64)));
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x01u64))), H256::new());
}
#[test]
fn note_code() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
let mut a = Account::new_contract(69.into(), 0.into());
a.init_code(vec![0x55, 0x44, 0xffu8]);
a.commit_code(&mut db);
a.rlp()
};
let mut a = Account::from_rlp(&rlp);
assert!(a.cache_code(&db.immutable()).is_some());
let mut a = Account::from_rlp(&rlp);
assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(()));
}
#[test]
fn commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
assert_eq!(a.storage_root(), None);
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_remove_commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0.into());
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
assert_eq!(a.code_size(), Some(3));
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
}
#[test]
fn reset_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_filth, Filth::Clean);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
a.reset_code(vec![0x55]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "37bf2238b11b68cdc8382cece82651b59d3c3988873b6e0f33d79694aa45f1be");
}
#[test]
fn rlpio() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
let b = Account::from_rlp(&a.rlp());
assert_eq!(a.balance(), b.balance());
assert_eq!(a.nonce(), b.nonce());
assert_eq!(a.code_hash(), b.code_hash());
assert_eq!(a.storage_root(), b.storage_root());
}
#[test]
fn new_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
assert_eq!(a.balance(), &U256::from(69u8));
assert_eq!(a.nonce(), &U256::from(0u8));
assert_eq!(a.code_hash(), SHA3_EMPTY);
assert_eq!(a.storage_root().unwrap(), &SHA3_NULL_RLP);
}
#[test]
fn create_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
}
}
|
fn storage_at() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
|
random_line_split
|
account.rs
|
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Single account in the system.
use util::*;
use pod_account::*;
use rlp::*;
use lru_cache::LruCache;
use basic_account::BasicAccount;
use std::cell::{RefCell, Cell};
const STORAGE_CACHE_ITEMS: usize = 8192;
/// Single account in the system.
/// Keeps track of changes to the code and storage.
/// The changes are applied in `commit_storage` and `commit_code`
pub struct Account {
// Balance of the account.
balance: U256,
// Nonce of the account.
nonce: U256,
// Trie-backed storage.
storage_root: H256,
// LRU Cache of the trie-backed storage.
// This is limited to `STORAGE_CACHE_ITEMS` recent queries
storage_cache: RefCell<LruCache<H256, H256>>,
// Modified storage. Accumulates changes to storage made in `set_storage`
// Takes precedence over `storage_cache`.
storage_changes: HashMap<H256, H256>,
// Code hash of the account.
code_hash: H256,
// Size of the accoun code.
code_size: Option<usize>,
// Code cache of the account.
code_cache: Arc<Bytes>,
// Account code new or has been modified.
code_filth: Filth,
// Cached address hash.
address_hash: Cell<Option<H256>>,
}
impl From<BasicAccount> for Account {
fn from(basic: BasicAccount) -> Self {
Account {
balance: basic.balance,
nonce: basic.nonce,
storage_root: basic.storage_root,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: basic.code_hash,
code_size: None,
code_cache: Arc::new(vec![]),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
}
impl Account {
#[cfg(test)]
/// General constructor.
pub fn new(balance: U256, nonce: U256, storage: HashMap<H256, H256>, code: Bytes) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: storage,
code_hash: code.sha3(),
code_size: Some(code.len()),
code_cache: Arc::new(code),
code_filth: Filth::Dirty,
address_hash: Cell::new(None),
}
}
fn empty_storage_cache() -> RefCell<LruCache<H256, H256>> {
RefCell::new(LruCache::new(STORAGE_CACHE_ITEMS))
}
/// General constructor.
pub fn from_pod(pod: PodAccount) -> Account {
Account {
balance: pod.balance,
nonce: pod.nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: pod.storage.into_iter().collect(),
code_hash: pod.code.as_ref().map_or(SHA3_EMPTY, |c| c.sha3()),
code_filth: Filth::Dirty,
code_size: Some(pod.code.as_ref().map_or(0, |c| c.len())),
code_cache: Arc::new(pod.code.map_or_else(|| { warn!("POD account with unknown code is being created! Assuming no code."); vec![] }, |c| c)),
address_hash: Cell::new(None),
}
}
/// Create a new account with the given balance.
pub fn new_basic(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: Some(0),
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Create a new account from RLP.
pub fn from_rlp(rlp: &[u8]) -> Account {
let basic: BasicAccount = ::rlp::decode(rlp);
basic.into()
}
/// Create a new contract account.
/// NOTE: make sure you use `init_code` on this before `commit`ing.
pub fn new_contract(balance: U256, nonce: U256) -> Account {
Account {
balance: balance,
nonce: nonce,
storage_root: SHA3_NULL_RLP,
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: SHA3_EMPTY,
code_cache: Arc::new(vec![]),
code_size: None,
code_filth: Filth::Clean,
address_hash: Cell::new(None),
}
}
/// Set this account's code to the given code.
/// NOTE: Account should have been created with `new_contract()`
pub fn init_code(&mut self, code: Bytes) {
self.code_hash = code.sha3();
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Dirty;
}
/// Reset this account's code to the given code.
pub fn reset_code(&mut self, code: Bytes) {
self.init_code(code);
}
/// Set (and cache) the contents of the trie's storage at `key` to `value`.
pub fn set_storage(&mut self, key: H256, value: H256) {
self.storage_changes.insert(key, value);
}
/// Get (and cache) the contents of the trie's storage at `key`.
/// Takes modifed storage into account.
pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
if let Some(value) = self.cached_storage_at(key) {
return value;
}
let db = SecTrieDB::new(db, &self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
let item: U256 = match db.get_with(key, ::rlp::decode) {
Ok(x) => x.unwrap_or_else(U256::zero),
Err(e) => panic!("Encountered potential DB corruption: {}", e),
};
let value: H256 = item.into();
self.storage_cache.borrow_mut().insert(key.clone(), value.clone());
value
}
/// Get cached storage value if any. Returns `None` if the
/// key is not in the cache.
pub fn cached_storage_at(&self, key: &H256) -> Option<H256> {
if let Some(value) = self.storage_changes.get(key) {
return Some(value.clone())
}
if let Some(value) = self.storage_cache.borrow_mut().get_mut(key) {
return Some(value.clone())
}
None
}
/// return the balance associated with this account.
pub fn balance(&self) -> &U256 { &self.balance }
/// return the nonce associated with this account.
pub fn nonce(&self) -> &U256 { &self.nonce }
/// return the code hash associated with this account.
pub fn code_hash(&self) -> H256 {
self.code_hash.clone()
}
/// return the code hash associated with this account.
pub fn address_hash(&self, address: &Address) -> H256 {
let hash = self.address_hash.get();
hash.unwrap_or_else(|| {
let hash = address.sha3();
self.address_hash.set(Some(hash.clone()));
hash
})
}
/// returns the account's code. If `None` then the code cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code(&self) -> Option<Arc<Bytes>> {
if self.code_hash != SHA3_EMPTY && self.code_cache.is_empty() {
return None;
}
Some(self.code_cache.clone())
}
/// returns the account's code size. If `None` then the code cache or code size cache isn't available -
/// get someone who knows to call `note_code`.
pub fn code_size(&self) -> Option<usize> {
self.code_size.clone()
}
#[cfg(test)]
/// Provide a byte array which hashes to the `code_hash`. returns the hash as a result.
pub fn note_code(&mut self, code: Bytes) -> Result<(), H256> {
let h = code.sha3();
if self.code_hash == h {
self.code_cache = Arc::new(code);
self.code_size = Some(self.code_cache.len());
Ok(())
} else {
Err(h)
}
}
/// Is `code_cache` valid; such that code is going to return Some?
pub fn is_cached(&self) -> bool {
!self.code_cache.is_empty() || (self.code_cache.is_empty() && self.code_hash == SHA3_EMPTY)
}
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
// TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
if self.is_cached() { return Some(self.code_cache.clone()) }
match db.get(&self.code_hash) {
Some(x) =>
|
,
_ => {
warn!("Failed reverse get of {}", self.code_hash);
None
},
}
}
/// Provide code to cache. For correctness, should be the correct code for the
/// account.
pub fn cache_given_code(&mut self, code: Arc<Bytes>) {
trace!("Account::cache_given_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size = Some(code.len());
self.code_cache = code;
}
/// Provide a database to get `code_size`. Should not be called if it is a contract without code.
pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache;
trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size.is_some() ||
if self.code_hash != SHA3_EMPTY {
match db.get(&self.code_hash) {
Some(x) => {
self.code_size = Some(x.len());
true
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
false
},
}
} else {
false
}
}
/// Determine whether there are any un-`commit()`-ed storage-setting operations.
pub fn storage_is_clean(&self) -> bool { self.storage_changes.is_empty() }
/// Check if account has zero nonce, balance, no code and no storage.
///
/// NOTE: Will panic if `!self.storage_is_clean()`
pub fn is_empty(&self) -> bool {
assert!(self.storage_is_clean(), "Account::is_empty() may only legally be called when storage is clean.");
self.is_null() && self.storage_root == SHA3_NULL_RLP
}
/// Check if account has zero nonce, balance, no code.
pub fn is_null(&self) -> bool {
self.balance.is_zero() &&
self.nonce.is_zero() &&
self.code_hash == SHA3_EMPTY
}
/// Return the storage root associated with this account or None if it has been altered via the overlay.
pub fn storage_root(&self) -> Option<&H256> { if self.storage_is_clean() {Some(&self.storage_root)} else {None} }
/// Return the storage overlay.
pub fn storage_changes(&self) -> &HashMap<H256, H256> { &self.storage_changes }
/// Increment the nonce of the account by one.
pub fn inc_nonce(&mut self) {
self.nonce = self.nonce + U256::from(1u8);
}
/// Increase account balance.
pub fn add_balance(&mut self, x: &U256) {
self.balance = self.balance + *x;
}
/// Decrease account balance.
/// Panics if balance is less than `x`
pub fn sub_balance(&mut self, x: &U256) {
assert!(self.balance >= *x);
self.balance = self.balance - *x;
}
/// Commit the `storage_changes` to the backing DB and update `storage_root`.
pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut HashDB) {
let mut t = trie_factory.from_existing(db, &mut self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
using it will not fail.");
for (k, v) in self.storage_changes.drain() {
// cast key and value to trait type,
// so we can call overloaded `to_bytes` method
let res = match v.is_zero() {
true => t.remove(&k),
false => t.insert(&k, &encode(&U256::from(&*v))),
};
if let Err(e) = res {
warn!("Encountered potential DB corruption: {}", e);
}
self.storage_cache.borrow_mut().insert(k, v);
}
}
/// Commit any unsaved code. `code_hash` will always return the hash of the `code_cache` after this.
pub fn commit_code(&mut self, db: &mut HashDB) {
trace!("Commiting code of {:?} - {:?}, {:?}", self, self.code_filth == Filth::Dirty, self.code_cache.is_empty());
match (self.code_filth == Filth::Dirty, self.code_cache.is_empty()) {
(true, true) => {
self.code_size = Some(0);
self.code_filth = Filth::Clean;
},
(true, false) => {
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache));
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Clean;
},
(false, _) => {},
}
}
/// Export to RLP.
pub fn rlp(&self) -> Bytes {
let mut stream = RlpStream::new_list(4);
stream.append(&self.nonce);
stream.append(&self.balance);
stream.append(&self.storage_root);
stream.append(&self.code_hash);
stream.out()
}
/// Clone basic account data
pub fn clone_basic(&self) -> Account {
Account {
balance: self.balance.clone(),
nonce: self.nonce.clone(),
storage_root: self.storage_root.clone(),
storage_cache: Self::empty_storage_cache(),
storage_changes: HashMap::new(),
code_hash: self.code_hash.clone(),
code_size: self.code_size.clone(),
code_cache: self.code_cache.clone(),
code_filth: self.code_filth,
address_hash: self.address_hash.clone(),
}
}
/// Clone account data and dirty storage keys
pub fn clone_dirty(&self) -> Account {
let mut account = self.clone_basic();
account.storage_changes = self.storage_changes.clone();
account.code_cache = self.code_cache.clone();
account
}
/// Clone account data, dirty storage keys and cached storage keys.
pub fn clone_all(&self) -> Account {
let mut account = self.clone_dirty();
account.storage_cache = self.storage_cache.clone();
account
}
/// Replace self with the data from other account merging storage cache.
/// Basic account data and all modifications are overwritten
/// with new values.
pub fn overwrite_with(&mut self, other: Account) {
self.balance = other.balance;
self.nonce = other.nonce;
self.storage_root = other.storage_root;
self.code_hash = other.code_hash;
self.code_filth = other.code_filth;
self.code_cache = other.code_cache;
self.code_size = other.code_size;
self.address_hash = other.address_hash;
let mut cache = self.storage_cache.borrow_mut();
for (k, v) in other.storage_cache.into_inner() {
cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here
}
self.storage_changes = other.storage_changes;
}
}
// light client storage proof.
impl Account {
/// Prove a storage key's existence or nonexistence in the account's storage
/// trie.
/// `storage_key` is the hash of the desired storage key, meaning
/// this will only work correctly under a secure trie.
/// Returns a merkle proof of the storage trie node with all nodes before `from_level`
/// omitted.
pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> {
use util::trie::{Trie, TrieDB};
use util::trie::recorder::Recorder;
let mut recorder = Recorder::with_depth(from_level);
let trie = TrieDB::new(db, &self.storage_root)?;
let _ = trie.get_with(&storage_key, &mut recorder)?;
Ok(recorder.drain().into_iter().map(|r| r.data).collect())
}
}
impl fmt::Debug for Account {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", PodAccount::from_account(self))
}
}
#[cfg(test)]
mod tests {
use rlp::{UntrustedRlp, RlpType, View, Compressible};
use util::*;
use super::*;
use account_db::*;
#[test]
fn account_compress() {
let raw = Account::new_basic(2.into(), 4.into()).rlp();
let rlp = UntrustedRlp::new(&raw);
let compact_vec = rlp.compress(RlpType::Snapshot).to_vec();
assert!(raw.len() > compact_vec.len());
let again_raw = UntrustedRlp::new(&compact_vec).decompress(RlpType::Snapshot);
assert_eq!(raw, again_raw.to_vec());
}
#[test]
fn storage_at() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
let mut a = Account::new_contract(69.into(), 0.into());
a.set_storage(H256::from(&U256::from(0x00u64)), H256::from(&U256::from(0x1234u64)));
a.commit_storage(&Default::default(), &mut db);
a.init_code(vec![]);
a.commit_code(&mut db);
a.rlp()
};
let a = Account::from_rlp(&rlp);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x00u64))), H256::from(&U256::from(0x1234u64)));
assert_eq!(a.storage_at(&db.immutable(), &H256::from(&U256::from(0x01u64))), H256::new());
}
#[test]
fn note_code() {
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
let rlp = {
let mut a = Account::new_contract(69.into(), 0.into());
a.init_code(vec![0x55, 0x44, 0xffu8]);
a.commit_code(&mut db);
a.rlp()
};
let mut a = Account::from_rlp(&rlp);
assert!(a.cache_code(&db.immutable()).is_some());
let mut a = Account::from_rlp(&rlp);
assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(()));
}
#[test]
fn commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
assert_eq!(a.storage_root(), None);
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_remove_commit_storage() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.set_storage(0.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0x1234.into());
a.commit_storage(&Default::default(), &mut db);
a.set_storage(1.into(), 0.into());
a.commit_storage(&Default::default(), &mut db);
assert_eq!(a.storage_root().unwrap().hex(), "c57e1afb758b07f8d2c8f13a3b6e44fa5ff94ab266facc5a4fd3f062426e50b2");
}
#[test]
fn commit_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
assert_eq!(a.code_size(), Some(3));
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
}
#[test]
fn reset_code() {
let mut a = Account::new_contract(69.into(), 0.into());
let mut db = MemoryDB::new();
let mut db = AccountDBMut::new(&mut db, &Address::new());
a.init_code(vec![0x55, 0x44, 0xffu8]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_filth, Filth::Clean);
assert_eq!(a.code_hash().hex(), "af231e631776a517ca23125370d542873eca1fb4d613ed9b5d5335a46ae5b7eb");
a.reset_code(vec![0x55]);
assert_eq!(a.code_filth, Filth::Dirty);
a.commit_code(&mut db);
assert_eq!(a.code_hash().hex(), "37bf2238b11b68cdc8382cece82651b59d3c3988873b6e0f33d79694aa45f1be");
}
#[test]
fn rlpio() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
let b = Account::from_rlp(&a.rlp());
assert_eq!(a.balance(), b.balance());
assert_eq!(a.nonce(), b.nonce());
assert_eq!(a.code_hash(), b.code_hash());
assert_eq!(a.storage_root(), b.storage_root());
}
#[test]
fn new_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
assert_eq!(a.balance(), &U256::from(69u8));
assert_eq!(a.nonce(), &U256::from(0u8));
assert_eq!(a.code_hash(), SHA3_EMPTY);
assert_eq!(a.storage_root().unwrap(), &SHA3_NULL_RLP);
}
#[test]
fn create_account() {
let a = Account::new(U256::from(69u8), U256::from(0u8), HashMap::new(), Bytes::new());
assert_eq!(a.rlp().to_hex(), "f8448045a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
}
}
|
{
self.code_size = Some(x.len());
self.code_cache = Arc::new(x.to_vec());
Some(self.code_cache.clone())
}
|
conditional_block
|
fsu-moves-and-copies.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue 4691: Ensure that functional-struct-updates operates
// correctly and moves rather than copy when appropriate.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::marker::NoCopy as NP;
struct ncint { np: NP, v: int }
fn ncint(v: int) -> ncint { ncint { np: NP, v: v } }
struct NoFoo { copied: int, nocopy: ncint, }
impl NoFoo {
fn new(x:int,y:int) -> NoFoo { NoFoo { copied: x, nocopy: ncint(y) } }
}
struct MoveFoo { copied: int, moved: Box<int>, }
impl MoveFoo {
fn new(x:int,y:int) -> MoveFoo { MoveFoo { copied: x, moved: box y } }
}
struct DropNoFoo { inner: NoFoo }
impl DropNoFoo {
fn new(x:int,y:int) -> DropNoFoo { DropNoFoo { inner: NoFoo::new(x,y) } }
}
impl Drop for DropNoFoo { fn drop(&mut self) { } }
struct DropMoveFoo { inner: MoveFoo }
impl DropMoveFoo {
fn new(x:int,y:int) -> DropMoveFoo { DropMoveFoo { inner: MoveFoo::new(x,y) } }
}
impl Drop for DropMoveFoo { fn drop(&mut self) { } }
fn test0() {
// just copy implicitly copyable fields from `f`, no moves
// (and thus it is okay that these are Drop; compare against
// compile-fail test: borrowck-struct-update-with-dtor.rs).
// Case 1: Nocopyable
let f = DropNoFoo::new(1, 2);
let b = DropNoFoo { inner: NoFoo { nocopy: ncint(3), ..f.inner }};
let c = DropNoFoo { inner: NoFoo { nocopy: ncint(4), ..f.inner }};
assert_eq!(f.inner.copied, 1);
assert_eq!(f.inner.nocopy.v, 2);
assert_eq!(b.inner.copied, 1);
assert_eq!(b.inner.nocopy.v, 3);
assert_eq!(c.inner.copied, 1);
assert_eq!(c.inner.nocopy.v, 4);
// Case 2: Owned
let f = DropMoveFoo::new(5, 6);
let b = DropMoveFoo { inner: MoveFoo { moved: box 7, ..f.inner }};
let c = DropMoveFoo { inner: MoveFoo { moved: box 8, ..f.inner }};
assert_eq!(f.inner.copied, 5);
assert_eq!(*f.inner.moved, 6);
assert_eq!(b.inner.copied, 5);
assert_eq!(*b.inner.moved, 7);
assert_eq!(c.inner.copied, 5);
assert_eq!(*c.inner.moved, 8);
}
fn test1() {
|
let b = MoveFoo {moved: box 13, ..f};
let c = MoveFoo {copied: 14, ..f};
assert_eq!(b.copied, 11);
assert_eq!(*b.moved, 13);
assert_eq!(c.copied, 14);
assert_eq!(*c.moved, 12);
}
fn test2() {
// move non-copyable field
let f = NoFoo::new(21, 22);
let b = NoFoo {nocopy: ncint(23), ..f};
let c = NoFoo {copied: 24, ..f};
assert_eq!(b.copied, 21);
assert_eq!(b.nocopy.v, 23);
assert_eq!(c.copied, 24);
assert_eq!(c.nocopy.v, 22);
}
pub fn main() {
test0();
test1();
test2();
}
|
// copying move-by-default fields from `f`, so it moves:
let f = MoveFoo::new(11, 12);
|
random_line_split
|
fsu-moves-and-copies.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue 4691: Ensure that functional-struct-updates operates
// correctly and moves rather than copy when appropriate.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::marker::NoCopy as NP;
struct ncint { np: NP, v: int }
fn ncint(v: int) -> ncint { ncint { np: NP, v: v } }
struct NoFoo { copied: int, nocopy: ncint, }
impl NoFoo {
fn new(x:int,y:int) -> NoFoo { NoFoo { copied: x, nocopy: ncint(y) } }
}
struct MoveFoo { copied: int, moved: Box<int>, }
impl MoveFoo {
fn new(x:int,y:int) -> MoveFoo { MoveFoo { copied: x, moved: box y } }
}
struct DropNoFoo { inner: NoFoo }
impl DropNoFoo {
fn new(x:int,y:int) -> DropNoFoo { DropNoFoo { inner: NoFoo::new(x,y) } }
}
impl Drop for DropNoFoo { fn drop(&mut self) { } }
struct DropMoveFoo { inner: MoveFoo }
impl DropMoveFoo {
fn new(x:int,y:int) -> DropMoveFoo
|
}
impl Drop for DropMoveFoo { fn drop(&mut self) { } }
fn test0() {
// just copy implicitly copyable fields from `f`, no moves
// (and thus it is okay that these are Drop; compare against
// compile-fail test: borrowck-struct-update-with-dtor.rs).
// Case 1: Nocopyable
let f = DropNoFoo::new(1, 2);
let b = DropNoFoo { inner: NoFoo { nocopy: ncint(3), ..f.inner }};
let c = DropNoFoo { inner: NoFoo { nocopy: ncint(4), ..f.inner }};
assert_eq!(f.inner.copied, 1);
assert_eq!(f.inner.nocopy.v, 2);
assert_eq!(b.inner.copied, 1);
assert_eq!(b.inner.nocopy.v, 3);
assert_eq!(c.inner.copied, 1);
assert_eq!(c.inner.nocopy.v, 4);
// Case 2: Owned
let f = DropMoveFoo::new(5, 6);
let b = DropMoveFoo { inner: MoveFoo { moved: box 7, ..f.inner }};
let c = DropMoveFoo { inner: MoveFoo { moved: box 8, ..f.inner }};
assert_eq!(f.inner.copied, 5);
assert_eq!(*f.inner.moved, 6);
assert_eq!(b.inner.copied, 5);
assert_eq!(*b.inner.moved, 7);
assert_eq!(c.inner.copied, 5);
assert_eq!(*c.inner.moved, 8);
}
fn test1() {
// copying move-by-default fields from `f`, so it moves:
let f = MoveFoo::new(11, 12);
let b = MoveFoo {moved: box 13, ..f};
let c = MoveFoo {copied: 14, ..f};
assert_eq!(b.copied, 11);
assert_eq!(*b.moved, 13);
assert_eq!(c.copied, 14);
assert_eq!(*c.moved, 12);
}
fn test2() {
// move non-copyable field
let f = NoFoo::new(21, 22);
let b = NoFoo {nocopy: ncint(23), ..f};
let c = NoFoo {copied: 24, ..f};
assert_eq!(b.copied, 21);
assert_eq!(b.nocopy.v, 23);
assert_eq!(c.copied, 24);
assert_eq!(c.nocopy.v, 22);
}
pub fn main() {
test0();
test1();
test2();
}
|
{ DropMoveFoo { inner: MoveFoo::new(x,y) } }
|
identifier_body
|
fsu-moves-and-copies.rs
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue 4691: Ensure that functional-struct-updates operates
// correctly and moves rather than copy when appropriate.
#![allow(unknown_features)]
#![feature(box_syntax)]
use std::marker::NoCopy as NP;
struct ncint { np: NP, v: int }
fn ncint(v: int) -> ncint { ncint { np: NP, v: v } }
struct NoFoo { copied: int, nocopy: ncint, }
impl NoFoo {
fn new(x:int,y:int) -> NoFoo { NoFoo { copied: x, nocopy: ncint(y) } }
}
struct MoveFoo { copied: int, moved: Box<int>, }
impl MoveFoo {
fn
|
(x:int,y:int) -> MoveFoo { MoveFoo { copied: x, moved: box y } }
}
struct DropNoFoo { inner: NoFoo }
impl DropNoFoo {
fn new(x:int,y:int) -> DropNoFoo { DropNoFoo { inner: NoFoo::new(x,y) } }
}
impl Drop for DropNoFoo { fn drop(&mut self) { } }
struct DropMoveFoo { inner: MoveFoo }
impl DropMoveFoo {
fn new(x:int,y:int) -> DropMoveFoo { DropMoveFoo { inner: MoveFoo::new(x,y) } }
}
impl Drop for DropMoveFoo { fn drop(&mut self) { } }
fn test0() {
// just copy implicitly copyable fields from `f`, no moves
// (and thus it is okay that these are Drop; compare against
// compile-fail test: borrowck-struct-update-with-dtor.rs).
// Case 1: Nocopyable
let f = DropNoFoo::new(1, 2);
let b = DropNoFoo { inner: NoFoo { nocopy: ncint(3), ..f.inner }};
let c = DropNoFoo { inner: NoFoo { nocopy: ncint(4), ..f.inner }};
assert_eq!(f.inner.copied, 1);
assert_eq!(f.inner.nocopy.v, 2);
assert_eq!(b.inner.copied, 1);
assert_eq!(b.inner.nocopy.v, 3);
assert_eq!(c.inner.copied, 1);
assert_eq!(c.inner.nocopy.v, 4);
// Case 2: Owned
let f = DropMoveFoo::new(5, 6);
let b = DropMoveFoo { inner: MoveFoo { moved: box 7, ..f.inner }};
let c = DropMoveFoo { inner: MoveFoo { moved: box 8, ..f.inner }};
assert_eq!(f.inner.copied, 5);
assert_eq!(*f.inner.moved, 6);
assert_eq!(b.inner.copied, 5);
assert_eq!(*b.inner.moved, 7);
assert_eq!(c.inner.copied, 5);
assert_eq!(*c.inner.moved, 8);
}
fn test1() {
// copying move-by-default fields from `f`, so it moves:
let f = MoveFoo::new(11, 12);
let b = MoveFoo {moved: box 13, ..f};
let c = MoveFoo {copied: 14, ..f};
assert_eq!(b.copied, 11);
assert_eq!(*b.moved, 13);
assert_eq!(c.copied, 14);
assert_eq!(*c.moved, 12);
}
fn test2() {
// move non-copyable field
let f = NoFoo::new(21, 22);
let b = NoFoo {nocopy: ncint(23), ..f};
let c = NoFoo {copied: 24, ..f};
assert_eq!(b.copied, 21);
assert_eq!(b.nocopy.v, 23);
assert_eq!(c.copied, 24);
assert_eq!(c.nocopy.v, 22);
}
pub fn main() {
test0();
test1();
test2();
}
|
new
|
identifier_name
|
transactionPending.js
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { observer } from 'mobx-react';
import { Button, GasPriceEditor } from '~/ui';
import TransactionMainDetails from '../TransactionMainDetails';
import TransactionPendingForm from '../TransactionPendingForm';
import styles from './transactionPending.css';
import * as tUtil from '../util/transaction';
@observer
export default class TransactionPending extends Component {
static contextTypes = {
api: PropTypes.object.isRequired
};
static propTypes = {
className: PropTypes.string,
date: PropTypes.instanceOf(Date).isRequired,
focus: PropTypes.bool,
gasLimit: PropTypes.object,
id: PropTypes.object.isRequired,
isSending: PropTypes.bool.isRequired,
isTest: PropTypes.bool.isRequired,
nonce: PropTypes.number,
onConfirm: PropTypes.func.isRequired,
onReject: PropTypes.func.isRequired,
store: PropTypes.object.isRequired,
transaction: PropTypes.shape({
data: PropTypes.string,
from: PropTypes.string.isRequired,
gas: PropTypes.object.isRequired,
gasPrice: PropTypes.object.isRequired,
to: PropTypes.string,
value: PropTypes.object.isRequired
}).isRequired
};
static defaultProps = {
focus: false
};
gasStore = new GasPriceEditor.Store(this.context.api, {
gas: this.props.transaction.gas.toFixed(),
gasLimit: this.props.gasLimit,
gasPrice: this.props.transaction.gasPrice.toFixed()
});
componentWillMount () {
const { store, transaction } = this.props;
const { from, gas, gasPrice, to, value } = transaction;
const fee = tUtil.getFee(gas, gasPrice); // BigNumber object
const gasPriceEthmDisplay = tUtil.getEthmFromWeiDisplay(gasPrice);
const gasToDisplay = tUtil.getGasDisplay(gas);
const totalValue = tUtil.getTotalValue(fee, value);
this.setState({ gasPriceEthmDisplay, totalValue, gasToDisplay });
this.gasStore.setEthValue(value);
store.fetchBalances([from, to]);
}
render () {
return this.gasStore.isEditing
? this.renderGasEditor()
: this.renderTransaction();
}
renderTransaction () {
const { className, focus, id, isSending, isTest, store, transaction } = this.props;
const { totalValue } = this.state;
const { from, value } = transaction;
const fromBalance = store.balances[from];
return (
<div className={ `${styles.container} ${className}` }>
<TransactionMainDetails
className={ styles.transactionDetails }
from={ from }
fromBalance={ fromBalance }
gasStore={ this.gasStore }
id={ id }
isTest={ isTest }
totalValue={ totalValue }
transaction={ transaction }
value={ value }
/>
<TransactionPendingForm
address={ from }
focus={ focus }
isSending={ isSending }
onConfirm={ this.onConfirm }
onReject={ this.onReject }
/>
</div>
);
}
|
renderGasEditor () {
const { className } = this.props;
return (
<div className={ `${styles.container} ${className}` }>
<GasPriceEditor store={ this.gasStore }>
<Button
label='view transaction'
onClick={ this.toggleGasEditor }
/>
</GasPriceEditor>
</div>
);
}
onConfirm = (data) => {
const { id, transaction } = this.props;
const { password, wallet } = data;
const { gas, gasPrice } = this.gasStore.overrideTransaction(transaction);
this.props.onConfirm({
gas,
gasPrice,
id,
password,
wallet
});
}
onReject = () => {
this.props.onReject(this.props.id);
}
toggleGasEditor = () => {
this.gasStore.setEditing(false);
}
}
|
random_line_split
|
|
transactionPending.js
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { observer } from 'mobx-react';
import { Button, GasPriceEditor } from '~/ui';
import TransactionMainDetails from '../TransactionMainDetails';
import TransactionPendingForm from '../TransactionPendingForm';
import styles from './transactionPending.css';
import * as tUtil from '../util/transaction';
@observer
export default class TransactionPending extends Component {
static contextTypes = {
api: PropTypes.object.isRequired
};
static propTypes = {
className: PropTypes.string,
date: PropTypes.instanceOf(Date).isRequired,
focus: PropTypes.bool,
gasLimit: PropTypes.object,
id: PropTypes.object.isRequired,
isSending: PropTypes.bool.isRequired,
isTest: PropTypes.bool.isRequired,
nonce: PropTypes.number,
onConfirm: PropTypes.func.isRequired,
onReject: PropTypes.func.isRequired,
store: PropTypes.object.isRequired,
transaction: PropTypes.shape({
data: PropTypes.string,
from: PropTypes.string.isRequired,
gas: PropTypes.object.isRequired,
gasPrice: PropTypes.object.isRequired,
to: PropTypes.string,
value: PropTypes.object.isRequired
}).isRequired
};
static defaultProps = {
focus: false
};
gasStore = new GasPriceEditor.Store(this.context.api, {
gas: this.props.transaction.gas.toFixed(),
gasLimit: this.props.gasLimit,
gasPrice: this.props.transaction.gasPrice.toFixed()
});
componentWillMount () {
const { store, transaction } = this.props;
const { from, gas, gasPrice, to, value } = transaction;
const fee = tUtil.getFee(gas, gasPrice); // BigNumber object
const gasPriceEthmDisplay = tUtil.getEthmFromWeiDisplay(gasPrice);
const gasToDisplay = tUtil.getGasDisplay(gas);
const totalValue = tUtil.getTotalValue(fee, value);
this.setState({ gasPriceEthmDisplay, totalValue, gasToDisplay });
this.gasStore.setEthValue(value);
store.fetchBalances([from, to]);
}
render () {
return this.gasStore.isEditing
? this.renderGasEditor()
: this.renderTransaction();
}
renderTransaction () {
const { className, focus, id, isSending, isTest, store, transaction } = this.props;
const { totalValue } = this.state;
const { from, value } = transaction;
const fromBalance = store.balances[from];
return (
<div className={ `${styles.container} ${className}` }>
<TransactionMainDetails
className={ styles.transactionDetails }
from={ from }
fromBalance={ fromBalance }
gasStore={ this.gasStore }
id={ id }
isTest={ isTest }
totalValue={ totalValue }
transaction={ transaction }
value={ value }
/>
<TransactionPendingForm
address={ from }
focus={ focus }
isSending={ isSending }
onConfirm={ this.onConfirm }
onReject={ this.onReject }
/>
</div>
);
}
|
() {
const { className } = this.props;
return (
<div className={ `${styles.container} ${className}` }>
<GasPriceEditor store={ this.gasStore }>
<Button
label='view transaction'
onClick={ this.toggleGasEditor }
/>
</GasPriceEditor>
</div>
);
}
onConfirm = (data) => {
const { id, transaction } = this.props;
const { password, wallet } = data;
const { gas, gasPrice } = this.gasStore.overrideTransaction(transaction);
this.props.onConfirm({
gas,
gasPrice,
id,
password,
wallet
});
}
onReject = () => {
this.props.onReject(this.props.id);
}
toggleGasEditor = () => {
this.gasStore.setEditing(false);
}
}
|
renderGasEditor
|
identifier_name
|
transactionPending.js
|
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { observer } from 'mobx-react';
import { Button, GasPriceEditor } from '~/ui';
import TransactionMainDetails from '../TransactionMainDetails';
import TransactionPendingForm from '../TransactionPendingForm';
import styles from './transactionPending.css';
import * as tUtil from '../util/transaction';
@observer
export default class TransactionPending extends Component {
static contextTypes = {
api: PropTypes.object.isRequired
};
static propTypes = {
className: PropTypes.string,
date: PropTypes.instanceOf(Date).isRequired,
focus: PropTypes.bool,
gasLimit: PropTypes.object,
id: PropTypes.object.isRequired,
isSending: PropTypes.bool.isRequired,
isTest: PropTypes.bool.isRequired,
nonce: PropTypes.number,
onConfirm: PropTypes.func.isRequired,
onReject: PropTypes.func.isRequired,
store: PropTypes.object.isRequired,
transaction: PropTypes.shape({
data: PropTypes.string,
from: PropTypes.string.isRequired,
gas: PropTypes.object.isRequired,
gasPrice: PropTypes.object.isRequired,
to: PropTypes.string,
value: PropTypes.object.isRequired
}).isRequired
};
static defaultProps = {
focus: false
};
gasStore = new GasPriceEditor.Store(this.context.api, {
gas: this.props.transaction.gas.toFixed(),
gasLimit: this.props.gasLimit,
gasPrice: this.props.transaction.gasPrice.toFixed()
});
componentWillMount () {
const { store, transaction } = this.props;
const { from, gas, gasPrice, to, value } = transaction;
const fee = tUtil.getFee(gas, gasPrice); // BigNumber object
const gasPriceEthmDisplay = tUtil.getEthmFromWeiDisplay(gasPrice);
const gasToDisplay = tUtil.getGasDisplay(gas);
const totalValue = tUtil.getTotalValue(fee, value);
this.setState({ gasPriceEthmDisplay, totalValue, gasToDisplay });
this.gasStore.setEthValue(value);
store.fetchBalances([from, to]);
}
render () {
return this.gasStore.isEditing
? this.renderGasEditor()
: this.renderTransaction();
}
renderTransaction () {
const { className, focus, id, isSending, isTest, store, transaction } = this.props;
const { totalValue } = this.state;
const { from, value } = transaction;
const fromBalance = store.balances[from];
return (
<div className={ `${styles.container} ${className}` }>
<TransactionMainDetails
className={ styles.transactionDetails }
from={ from }
fromBalance={ fromBalance }
gasStore={ this.gasStore }
id={ id }
isTest={ isTest }
totalValue={ totalValue }
transaction={ transaction }
value={ value }
/>
<TransactionPendingForm
address={ from }
focus={ focus }
isSending={ isSending }
onConfirm={ this.onConfirm }
onReject={ this.onReject }
/>
</div>
);
}
renderGasEditor ()
|
onConfirm = (data) => {
const { id, transaction } = this.props;
const { password, wallet } = data;
const { gas, gasPrice } = this.gasStore.overrideTransaction(transaction);
this.props.onConfirm({
gas,
gasPrice,
id,
password,
wallet
});
}
onReject = () => {
this.props.onReject(this.props.id);
}
toggleGasEditor = () => {
this.gasStore.setEditing(false);
}
}
|
{
const { className } = this.props;
return (
<div className={ `${styles.container} ${className}` }>
<GasPriceEditor store={ this.gasStore }>
<Button
label='view transaction'
onClick={ this.toggleGasEditor }
/>
</GasPriceEditor>
</div>
);
}
|
identifier_body
|
ng_component_outlet.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ComponentFactoryResolver, ComponentRef, Directive, Injector, Input, NgModuleFactory, NgModuleRef, OnChanges, OnDestroy, SimpleChanges, StaticProvider, Type, ViewContainerRef} from '@angular/core';
/**
* Instantiates a single {@link Component} type and inserts its Host View into current View.
* `NgComponentOutlet` provides a declarative approach for dynamic component creation.
*
* `NgComponentOutlet` requires a component type, if a falsy value is set the view will clear and
* any existing component will get destroyed.
*
* @usageNotes
*
* ### Fine tune control
*
* You can control the component creation process by using the following optional attributes:
*
* * `ngComponentOutletInjector`: Optional custom {@link Injector} that will be used as parent for
* the Component. Defaults to the injector of the current view container.
*
* * `ngComponentOutletContent`: Optional list of projectable nodes to insert into the content
* section of the component, if exists.
*
* * `ngComponentOutletNgModuleFactory`: Optional module factory to allow dynamically loading other
* module, then load a component from that module.
*
* ### Syntax
*
* Simple
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression"></ng-container>
* ```
*
* Customized injector/content
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression;
* injector: injectorExpression;
* content: contentNodesExpression;">
* </ng-container>
* ```
*
* Customized ngModuleFactory
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression;
|
* ```
*
* ### A simple example
*
* {@example common/ngComponentOutlet/ts/module.ts region='SimpleExample'}
*
* A more complete example with additional options:
*
* {@example common/ngComponentOutlet/ts/module.ts region='CompleteExample'}
* A more complete example with ngModuleFactory:
*
* {@example common/ngComponentOutlet/ts/module.ts region='NgModuleFactoryExample'}
*
* @experimental
*/
@Directive({selector: '[ngComponentOutlet]'})
export class NgComponentOutlet implements OnChanges, OnDestroy {
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutlet !: Type<any>;
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletInjector !: Injector;
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletContent !: any[][];
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletNgModuleFactory !: NgModuleFactory<any>;
private _componentRef: ComponentRef<any>|null = null;
private _moduleRef: NgModuleRef<any>|null = null;
constructor(private _viewContainerRef: ViewContainerRef) {}
ngOnChanges(changes: SimpleChanges) {
this._viewContainerRef.clear();
this._componentRef = null;
if (this.ngComponentOutlet) {
const elInjector = this.ngComponentOutletInjector || this._viewContainerRef.parentInjector;
if (changes['ngComponentOutletNgModuleFactory']) {
if (this._moduleRef) this._moduleRef.destroy();
if (this.ngComponentOutletNgModuleFactory) {
const parentModule = elInjector.get(NgModuleRef);
this._moduleRef = this.ngComponentOutletNgModuleFactory.create(parentModule.injector);
} else {
this._moduleRef = null;
}
}
const componentFactoryResolver = this._moduleRef ? this._moduleRef.componentFactoryResolver :
elInjector.get(ComponentFactoryResolver);
const componentFactory =
componentFactoryResolver.resolveComponentFactory(this.ngComponentOutlet);
this._componentRef = this._viewContainerRef.createComponent(
componentFactory, this._viewContainerRef.length, elInjector,
this.ngComponentOutletContent);
}
}
ngOnDestroy() {
if (this._moduleRef) this._moduleRef.destroy();
}
}
|
* ngModuleFactory: moduleFactory;">
* </ng-container>
|
random_line_split
|
ng_component_outlet.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ComponentFactoryResolver, ComponentRef, Directive, Injector, Input, NgModuleFactory, NgModuleRef, OnChanges, OnDestroy, SimpleChanges, StaticProvider, Type, ViewContainerRef} from '@angular/core';
/**
* Instantiates a single {@link Component} type and inserts its Host View into current View.
* `NgComponentOutlet` provides a declarative approach for dynamic component creation.
*
* `NgComponentOutlet` requires a component type, if a falsy value is set the view will clear and
* any existing component will get destroyed.
*
* @usageNotes
*
* ### Fine tune control
*
* You can control the component creation process by using the following optional attributes:
*
* * `ngComponentOutletInjector`: Optional custom {@link Injector} that will be used as parent for
* the Component. Defaults to the injector of the current view container.
*
* * `ngComponentOutletContent`: Optional list of projectable nodes to insert into the content
* section of the component, if exists.
*
* * `ngComponentOutletNgModuleFactory`: Optional module factory to allow dynamically loading other
* module, then load a component from that module.
*
* ### Syntax
*
* Simple
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression"></ng-container>
* ```
*
* Customized injector/content
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression;
* injector: injectorExpression;
* content: contentNodesExpression;">
* </ng-container>
* ```
*
* Customized ngModuleFactory
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression;
* ngModuleFactory: moduleFactory;">
* </ng-container>
* ```
*
* ### A simple example
*
* {@example common/ngComponentOutlet/ts/module.ts region='SimpleExample'}
*
* A more complete example with additional options:
*
* {@example common/ngComponentOutlet/ts/module.ts region='CompleteExample'}
* A more complete example with ngModuleFactory:
*
* {@example common/ngComponentOutlet/ts/module.ts region='NgModuleFactoryExample'}
*
* @experimental
*/
@Directive({selector: '[ngComponentOutlet]'})
export class NgComponentOutlet implements OnChanges, OnDestroy {
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutlet !: Type<any>;
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletInjector !: Injector;
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletContent !: any[][];
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletNgModuleFactory !: NgModuleFactory<any>;
private _componentRef: ComponentRef<any>|null = null;
private _moduleRef: NgModuleRef<any>|null = null;
constructor(private _viewContainerRef: ViewContainerRef) {}
ngOnChanges(changes: SimpleChanges) {
this._viewContainerRef.clear();
this._componentRef = null;
if (this.ngComponentOutlet) {
const elInjector = this.ngComponentOutletInjector || this._viewContainerRef.parentInjector;
if (changes['ngComponentOutletNgModuleFactory']) {
if (this._moduleRef) this._moduleRef.destroy();
if (this.ngComponentOutletNgModuleFactory) {
const parentModule = elInjector.get(NgModuleRef);
this._moduleRef = this.ngComponentOutletNgModuleFactory.create(parentModule.injector);
} else {
this._moduleRef = null;
}
}
const componentFactoryResolver = this._moduleRef ? this._moduleRef.componentFactoryResolver :
elInjector.get(ComponentFactoryResolver);
const componentFactory =
componentFactoryResolver.resolveComponentFactory(this.ngComponentOutlet);
this._componentRef = this._viewContainerRef.createComponent(
componentFactory, this._viewContainerRef.length, elInjector,
this.ngComponentOutletContent);
}
}
|
() {
if (this._moduleRef) this._moduleRef.destroy();
}
}
|
ngOnDestroy
|
identifier_name
|
ng_component_outlet.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ComponentFactoryResolver, ComponentRef, Directive, Injector, Input, NgModuleFactory, NgModuleRef, OnChanges, OnDestroy, SimpleChanges, StaticProvider, Type, ViewContainerRef} from '@angular/core';
/**
* Instantiates a single {@link Component} type and inserts its Host View into current View.
* `NgComponentOutlet` provides a declarative approach for dynamic component creation.
*
* `NgComponentOutlet` requires a component type, if a falsy value is set the view will clear and
* any existing component will get destroyed.
*
* @usageNotes
*
* ### Fine tune control
*
* You can control the component creation process by using the following optional attributes:
*
* * `ngComponentOutletInjector`: Optional custom {@link Injector} that will be used as parent for
* the Component. Defaults to the injector of the current view container.
*
* * `ngComponentOutletContent`: Optional list of projectable nodes to insert into the content
* section of the component, if exists.
*
* * `ngComponentOutletNgModuleFactory`: Optional module factory to allow dynamically loading other
* module, then load a component from that module.
*
* ### Syntax
*
* Simple
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression"></ng-container>
* ```
*
* Customized injector/content
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression;
* injector: injectorExpression;
* content: contentNodesExpression;">
* </ng-container>
* ```
*
* Customized ngModuleFactory
* ```
* <ng-container *ngComponentOutlet="componentTypeExpression;
* ngModuleFactory: moduleFactory;">
* </ng-container>
* ```
*
* ### A simple example
*
* {@example common/ngComponentOutlet/ts/module.ts region='SimpleExample'}
*
* A more complete example with additional options:
*
* {@example common/ngComponentOutlet/ts/module.ts region='CompleteExample'}
* A more complete example with ngModuleFactory:
*
* {@example common/ngComponentOutlet/ts/module.ts region='NgModuleFactoryExample'}
*
* @experimental
*/
@Directive({selector: '[ngComponentOutlet]'})
export class NgComponentOutlet implements OnChanges, OnDestroy {
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutlet !: Type<any>;
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletInjector !: Injector;
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletContent !: any[][];
// TODO(issue/24571): remove '!'.
@Input() ngComponentOutletNgModuleFactory !: NgModuleFactory<any>;
private _componentRef: ComponentRef<any>|null = null;
private _moduleRef: NgModuleRef<any>|null = null;
constructor(private _viewContainerRef: ViewContainerRef) {}
ngOnChanges(changes: SimpleChanges)
|
ngOnDestroy() {
if (this._moduleRef) this._moduleRef.destroy();
}
}
|
{
this._viewContainerRef.clear();
this._componentRef = null;
if (this.ngComponentOutlet) {
const elInjector = this.ngComponentOutletInjector || this._viewContainerRef.parentInjector;
if (changes['ngComponentOutletNgModuleFactory']) {
if (this._moduleRef) this._moduleRef.destroy();
if (this.ngComponentOutletNgModuleFactory) {
const parentModule = elInjector.get(NgModuleRef);
this._moduleRef = this.ngComponentOutletNgModuleFactory.create(parentModule.injector);
} else {
this._moduleRef = null;
}
}
const componentFactoryResolver = this._moduleRef ? this._moduleRef.componentFactoryResolver :
elInjector.get(ComponentFactoryResolver);
const componentFactory =
componentFactoryResolver.resolveComponentFactory(this.ngComponentOutlet);
this._componentRef = this._viewContainerRef.createComponent(
componentFactory, this._viewContainerRef.length, elInjector,
this.ngComponentOutletContent);
}
}
|
identifier_body
|
index.ts
|
import * as nodes from './nodes'
import {BaseNode, Tick, State, Tree, Blackboard} from './BaseNode'
export function buildNode(data: IDataBehaviorNode) {
var type = data.nodeType
var node: BaseNode = new nodes[type]()
node.config = data.config
if (data.children.length > 0) {
if (node instanceof nodes.Decorator) {
var childNode = buildNode(data.children[0])
node.addChild(childNode)
} else if (node instanceof nodes.Composite) {
for (var i = 0; i < data.children.length; i++) {
var childNode = buildNode(data.children[i])
node.addChild(childNode)
}
}
}
return node
}
export function registerNodeType(name: string, func: Function)
|
export {BaseNode, Tick, State, Tree, Blackboard, nodes}
|
{
if (nodes[name]) throw new Error('nodeType already exist:' + name)
nodes[name] = func
}
|
identifier_body
|
index.ts
|
import * as nodes from './nodes'
import {BaseNode, Tick, State, Tree, Blackboard} from './BaseNode'
export function buildNode(data: IDataBehaviorNode) {
var type = data.nodeType
var node: BaseNode = new nodes[type]()
node.config = data.config
if (data.children.length > 0) {
|
var childNode = buildNode(data.children[0])
node.addChild(childNode)
} else if (node instanceof nodes.Composite) {
for (var i = 0; i < data.children.length; i++) {
var childNode = buildNode(data.children[i])
node.addChild(childNode)
}
}
}
return node
}
export function registerNodeType(name: string, func: Function) {
if (nodes[name]) throw new Error('nodeType already exist:' + name)
nodes[name] = func
}
export {BaseNode, Tick, State, Tree, Blackboard, nodes}
|
if (node instanceof nodes.Decorator) {
|
random_line_split
|
index.ts
|
import * as nodes from './nodes'
import {BaseNode, Tick, State, Tree, Blackboard} from './BaseNode'
export function
|
(data: IDataBehaviorNode) {
var type = data.nodeType
var node: BaseNode = new nodes[type]()
node.config = data.config
if (data.children.length > 0) {
if (node instanceof nodes.Decorator) {
var childNode = buildNode(data.children[0])
node.addChild(childNode)
} else if (node instanceof nodes.Composite) {
for (var i = 0; i < data.children.length; i++) {
var childNode = buildNode(data.children[i])
node.addChild(childNode)
}
}
}
return node
}
export function registerNodeType(name: string, func: Function) {
if (nodes[name]) throw new Error('nodeType already exist:' + name)
nodes[name] = func
}
export {BaseNode, Tick, State, Tree, Blackboard, nodes}
|
buildNode
|
identifier_name
|
IRIS_DF_Controller.py
|
#!/usr/bin/env python
'''======================================================
Created by: D. Spencer Maughan
Last updated: May 2015
File name: IRIS_DF_Controller.py
Organization: RISC Lab, Utah State University
|
Notes:
======================================================'''
import roslib; roslib.load_manifest('risc_msgs')
import rospy
from math import *
import numpy as np
import time
#=======================#
# Messages Needed #
#=======================#
from risc_msgs.msg import *
from std_msgs.msg import Bool
from roscopter.msg import Status
#=====================#
# Gain Matrices #
#=====================#
K = np.matrix([[ 1.8, 0, 0, 1.4, 0, 0, 0],\
[ 0, 1.8, 0, 0, 1.4, 0, 0],\
[ 0, 0, 3, 0, 0, 5, 0],\
[ 0, 0, 0, 0, 0, 0,.5]])
#========================#
# Globals #
#========================#
nominal_thrust = 0 # thrust necessary to maintain hover given battery level
phi_scale = 3.053261127645355
phi_trim = 0.0#0.058941904209906
theta_scale = 3.815398742249453
theta_trim = 0.0#-0.091216767651723
ctrl_status = False
states = Cortex()
states.Obj = [States()]*1
traj = Trajectories()
traj.Obj = [Trajectory()]*1
euler_max = 45*np.pi/180
max_yaw_rate = .3490659 #in radians/sec
rate = 45 # Hz
image = 0
start_time = 0
#==================#
# Publishers #
#==================#
pub_ctrl = rospy.Publisher('/controls', Controls, queue_size = 1)
#========================#
# Get Cortex States #
#========================#
def GetStates(S):
global states
states = S
#=====================#
# Get Trajectory #
#=====================#
def GetTraj(S):
global traj
traj = S
#=========================#
# Get Battery Status #
#=========================#
def GetBatt(S):
global nominal_thrust
B = S.battery_remaining
# coefficients for fourth order fit
# determined 11 May 2015 by Spencer Maughan and Ishmaal Erekson
c0 = 0.491674747062374
c1 = -0.024809293286468
c2 = 0.000662710609466
c3 = -0.000008160593348
c4 = 0.000000033699651
nominal_thrust = c0+c1*B+c2*B**2+c3*B**3+c4*B**4
#============================#
# Get Controller Status #
#============================#
def GetStatus(S):
global ctrl_status
ctrl_status = S.data
#========================#
# Basic Controller #
#========================#
def Basic_Controller():
global states, euler_max, max_yaw_rate, pub_ctrl,K,traj
Ctrl = Controls()
Ctrl.Obj = [Control()]*1
Ctrl.header.stamp = states.header.stamp
g = 9.80665 # average value of earth's gravitational constant m/s^2
m = 1.282 # IRIS mass in kg
#===================================#
# Get State Trajectory Errors #
#===================================#
if states.Obj[0].visible:
X = np.asmatrix(np.zeros((7,1)))
X[0] = traj.Obj[0].x-states.Obj[0].x
X[1] = traj.Obj[0].y-states.Obj[0].y
X[2] = traj.Obj[0].z-states.Obj[0].z
X[3] = traj.Obj[0].xdot-states.Obj[0].u
X[4] = traj.Obj[0].ydot-states.Obj[0].v
X[5] = traj.Obj[0].zdot-states.Obj[0].w
X[6] = traj.Obj[0].psi-states.Obj[0].psi*np.pi/180
#============================================#
# Differential Flatness Control Input #
#============================================#
# LQR input
utilde = -K*X
# required input
u_r = np.asmatrix(np.zeros((4,1)))
u = utilde+u_r-np.matrix([[0],[0],[9.81],[0]])
#==================================#
# Rotate to Vehicle 1 Frame #
#==================================#
psi = states.Obj[0].psi*np.pi/180
rotZ = np.matrix([[cos(psi), sin(psi), 0],[-sin(psi), cos(psi), 0],[0, 0, 1]])
Cart = np.matrix([[1, 0, 0],[0, -1, 0],[0, 0, -1]])
u[:-1] = Cart*rotZ*u[:-1]
#===================================#
# Normalize given the Thrust #
#===================================#
T = sqrt(u[0:3].T*u[0:3])
u[:-1] = np.divide(u[:-1],-T)
#==================#
# Set Controls #
#==================#
# Controls for Ardrone
# -phi = right... +phi = left
# -theta = back... +theta = forward
# -psi = right... +psi = left
global phi_trim,theta_trim,phi_scale,theta_scale
phi_d = (asin(u[1,-1]))
theta_d = (-asin(u[0,-1]))
ctrl = Control()
ctrl.name = states.Obj[0].name
ctrl.phi = phi_trim + phi_scale*phi_d
ctrl.theta = theta_trim + theta_scale*theta_d
ctrl.psi = -u[3,-1]/max_yaw_rate
global nominal_thrust
T_d = nominal_thrust+(T-g)/g
ctrl.T = T_d
Ctrl.Obj[0] = ctrl
Ctrl.header = states.header
#rospy.loginfo("latency = %f",states.header.stamp.to_sec()-rospy.get_time())
pub_ctrl.publish(Ctrl)
#===================#
# Main #
#===================#
if __name__=='__main__':
import sys
rospy.init_node('IRIS_DF_Controller')
#=====================================#
# Set up Publish/Subscribe Loop #
#=====================================#
r = rospy.Rate(rate)
while not rospy.is_shutdown():
sub_cortex = rospy.Subscriber('/cortex_raw' , Cortex, GetStates, queue_size=1, buff_size=2**24)
sub_traj = rospy.Subscriber('/trajectory' , Trajectories, GetTraj, queue_size=1, buff_size=2**24)
sub_Batt = rospy.Subscriber('/apm/status' , Status, GetBatt)
sub_status = rospy.Subscriber('/controller_status' , Bool, GetStatus)
Basic_Controller()
r.sleep()
|
random_line_split
|
|
IRIS_DF_Controller.py
|
#!/usr/bin/env python
'''======================================================
Created by: D. Spencer Maughan
Last updated: May 2015
File name: IRIS_DF_Controller.py
Organization: RISC Lab, Utah State University
Notes:
======================================================'''
import roslib; roslib.load_manifest('risc_msgs')
import rospy
from math import *
import numpy as np
import time
#=======================#
# Messages Needed #
#=======================#
from risc_msgs.msg import *
from std_msgs.msg import Bool
from roscopter.msg import Status
#=====================#
# Gain Matrices #
#=====================#
K = np.matrix([[ 1.8, 0, 0, 1.4, 0, 0, 0],\
[ 0, 1.8, 0, 0, 1.4, 0, 0],\
[ 0, 0, 3, 0, 0, 5, 0],\
[ 0, 0, 0, 0, 0, 0,.5]])
#========================#
# Globals #
#========================#
nominal_thrust = 0 # thrust necessary to maintain hover given battery level
phi_scale = 3.053261127645355
phi_trim = 0.0#0.058941904209906
theta_scale = 3.815398742249453
theta_trim = 0.0#-0.091216767651723
ctrl_status = False
states = Cortex()
states.Obj = [States()]*1
traj = Trajectories()
traj.Obj = [Trajectory()]*1
euler_max = 45*np.pi/180
max_yaw_rate = .3490659 #in radians/sec
rate = 45 # Hz
image = 0
start_time = 0
#==================#
# Publishers #
#==================#
pub_ctrl = rospy.Publisher('/controls', Controls, queue_size = 1)
#========================#
# Get Cortex States #
#========================#
def GetStates(S):
global states
states = S
#=====================#
# Get Trajectory #
#=====================#
def GetTraj(S):
global traj
traj = S
#=========================#
# Get Battery Status #
#=========================#
def GetBatt(S):
global nominal_thrust
B = S.battery_remaining
# coefficients for fourth order fit
# determined 11 May 2015 by Spencer Maughan and Ishmaal Erekson
c0 = 0.491674747062374
c1 = -0.024809293286468
c2 = 0.000662710609466
c3 = -0.000008160593348
c4 = 0.000000033699651
nominal_thrust = c0+c1*B+c2*B**2+c3*B**3+c4*B**4
#============================#
# Get Controller Status #
#============================#
def GetStatus(S):
global ctrl_status
ctrl_status = S.data
#========================#
# Basic Controller #
#========================#
def Basic_Controller():
global states, euler_max, max_yaw_rate, pub_ctrl,K,traj
Ctrl = Controls()
Ctrl.Obj = [Control()]*1
Ctrl.header.stamp = states.header.stamp
g = 9.80665 # average value of earth's gravitational constant m/s^2
m = 1.282 # IRIS mass in kg
#===================================#
# Get State Trajectory Errors #
#===================================#
if states.Obj[0].visible:
X = np.asmatrix(np.zeros((7,1)))
X[0] = traj.Obj[0].x-states.Obj[0].x
X[1] = traj.Obj[0].y-states.Obj[0].y
X[2] = traj.Obj[0].z-states.Obj[0].z
X[3] = traj.Obj[0].xdot-states.Obj[0].u
X[4] = traj.Obj[0].ydot-states.Obj[0].v
X[5] = traj.Obj[0].zdot-states.Obj[0].w
X[6] = traj.Obj[0].psi-states.Obj[0].psi*np.pi/180
#============================================#
# Differential Flatness Control Input #
#============================================#
# LQR input
utilde = -K*X
# required input
u_r = np.asmatrix(np.zeros((4,1)))
u = utilde+u_r-np.matrix([[0],[0],[9.81],[0]])
#==================================#
# Rotate to Vehicle 1 Frame #
#==================================#
psi = states.Obj[0].psi*np.pi/180
rotZ = np.matrix([[cos(psi), sin(psi), 0],[-sin(psi), cos(psi), 0],[0, 0, 1]])
Cart = np.matrix([[1, 0, 0],[0, -1, 0],[0, 0, -1]])
u[:-1] = Cart*rotZ*u[:-1]
#===================================#
# Normalize given the Thrust #
#===================================#
T = sqrt(u[0:3].T*u[0:3])
u[:-1] = np.divide(u[:-1],-T)
#==================#
# Set Controls #
#==================#
# Controls for Ardrone
# -phi = right... +phi = left
# -theta = back... +theta = forward
# -psi = right... +psi = left
global phi_trim,theta_trim,phi_scale,theta_scale
phi_d = (asin(u[1,-1]))
theta_d = (-asin(u[0,-1]))
ctrl = Control()
ctrl.name = states.Obj[0].name
ctrl.phi = phi_trim + phi_scale*phi_d
ctrl.theta = theta_trim + theta_scale*theta_d
ctrl.psi = -u[3,-1]/max_yaw_rate
global nominal_thrust
T_d = nominal_thrust+(T-g)/g
ctrl.T = T_d
Ctrl.Obj[0] = ctrl
Ctrl.header = states.header
#rospy.loginfo("latency = %f",states.header.stamp.to_sec()-rospy.get_time())
pub_ctrl.publish(Ctrl)
#===================#
# Main #
#===================#
if __name__=='__main__':
|
import sys
rospy.init_node('IRIS_DF_Controller')
#=====================================#
# Set up Publish/Subscribe Loop #
#=====================================#
r = rospy.Rate(rate)
while not rospy.is_shutdown():
sub_cortex = rospy.Subscriber('/cortex_raw' , Cortex, GetStates, queue_size=1, buff_size=2**24)
sub_traj = rospy.Subscriber('/trajectory' , Trajectories, GetTraj, queue_size=1, buff_size=2**24)
sub_Batt = rospy.Subscriber('/apm/status' , Status, GetBatt)
sub_status = rospy.Subscriber('/controller_status' , Bool, GetStatus)
Basic_Controller()
r.sleep()
|
conditional_block
|
|
IRIS_DF_Controller.py
|
#!/usr/bin/env python
'''======================================================
Created by: D. Spencer Maughan
Last updated: May 2015
File name: IRIS_DF_Controller.py
Organization: RISC Lab, Utah State University
Notes:
======================================================'''
import roslib; roslib.load_manifest('risc_msgs')
import rospy
from math import *
import numpy as np
import time
#=======================#
# Messages Needed #
#=======================#
from risc_msgs.msg import *
from std_msgs.msg import Bool
from roscopter.msg import Status
#=====================#
# Gain Matrices #
#=====================#
K = np.matrix([[ 1.8, 0, 0, 1.4, 0, 0, 0],\
[ 0, 1.8, 0, 0, 1.4, 0, 0],\
[ 0, 0, 3, 0, 0, 5, 0],\
[ 0, 0, 0, 0, 0, 0,.5]])
#========================#
# Globals #
#========================#
nominal_thrust = 0 # thrust necessary to maintain hover given battery level
phi_scale = 3.053261127645355
phi_trim = 0.0#0.058941904209906
theta_scale = 3.815398742249453
theta_trim = 0.0#-0.091216767651723
ctrl_status = False
states = Cortex()
states.Obj = [States()]*1
traj = Trajectories()
traj.Obj = [Trajectory()]*1
euler_max = 45*np.pi/180
max_yaw_rate = .3490659 #in radians/sec
rate = 45 # Hz
image = 0
start_time = 0
#==================#
# Publishers #
#==================#
pub_ctrl = rospy.Publisher('/controls', Controls, queue_size = 1)
#========================#
# Get Cortex States #
#========================#
def GetStates(S):
global states
states = S
#=====================#
# Get Trajectory #
#=====================#
def GetTraj(S):
global traj
traj = S
#=========================#
# Get Battery Status #
#=========================#
def GetBatt(S):
global nominal_thrust
B = S.battery_remaining
# coefficients for fourth order fit
# determined 11 May 2015 by Spencer Maughan and Ishmaal Erekson
c0 = 0.491674747062374
c1 = -0.024809293286468
c2 = 0.000662710609466
c3 = -0.000008160593348
c4 = 0.000000033699651
nominal_thrust = c0+c1*B+c2*B**2+c3*B**3+c4*B**4
#============================#
# Get Controller Status #
#============================#
def
|
(S):
global ctrl_status
ctrl_status = S.data
#========================#
# Basic Controller #
#========================#
def Basic_Controller():
global states, euler_max, max_yaw_rate, pub_ctrl,K,traj
Ctrl = Controls()
Ctrl.Obj = [Control()]*1
Ctrl.header.stamp = states.header.stamp
g = 9.80665 # average value of earth's gravitational constant m/s^2
m = 1.282 # IRIS mass in kg
#===================================#
# Get State Trajectory Errors #
#===================================#
if states.Obj[0].visible:
X = np.asmatrix(np.zeros((7,1)))
X[0] = traj.Obj[0].x-states.Obj[0].x
X[1] = traj.Obj[0].y-states.Obj[0].y
X[2] = traj.Obj[0].z-states.Obj[0].z
X[3] = traj.Obj[0].xdot-states.Obj[0].u
X[4] = traj.Obj[0].ydot-states.Obj[0].v
X[5] = traj.Obj[0].zdot-states.Obj[0].w
X[6] = traj.Obj[0].psi-states.Obj[0].psi*np.pi/180
#============================================#
# Differential Flatness Control Input #
#============================================#
# LQR input
utilde = -K*X
# required input
u_r = np.asmatrix(np.zeros((4,1)))
u = utilde+u_r-np.matrix([[0],[0],[9.81],[0]])
#==================================#
# Rotate to Vehicle 1 Frame #
#==================================#
psi = states.Obj[0].psi*np.pi/180
rotZ = np.matrix([[cos(psi), sin(psi), 0],[-sin(psi), cos(psi), 0],[0, 0, 1]])
Cart = np.matrix([[1, 0, 0],[0, -1, 0],[0, 0, -1]])
u[:-1] = Cart*rotZ*u[:-1]
#===================================#
# Normalize given the Thrust #
#===================================#
T = sqrt(u[0:3].T*u[0:3])
u[:-1] = np.divide(u[:-1],-T)
#==================#
# Set Controls #
#==================#
# Controls for Ardrone
# -phi = right... +phi = left
# -theta = back... +theta = forward
# -psi = right... +psi = left
global phi_trim,theta_trim,phi_scale,theta_scale
phi_d = (asin(u[1,-1]))
theta_d = (-asin(u[0,-1]))
ctrl = Control()
ctrl.name = states.Obj[0].name
ctrl.phi = phi_trim + phi_scale*phi_d
ctrl.theta = theta_trim + theta_scale*theta_d
ctrl.psi = -u[3,-1]/max_yaw_rate
global nominal_thrust
T_d = nominal_thrust+(T-g)/g
ctrl.T = T_d
Ctrl.Obj[0] = ctrl
Ctrl.header = states.header
#rospy.loginfo("latency = %f",states.header.stamp.to_sec()-rospy.get_time())
pub_ctrl.publish(Ctrl)
#===================#
# Main #
#===================#
if __name__=='__main__':
import sys
rospy.init_node('IRIS_DF_Controller')
#=====================================#
# Set up Publish/Subscribe Loop #
#=====================================#
r = rospy.Rate(rate)
while not rospy.is_shutdown():
sub_cortex = rospy.Subscriber('/cortex_raw' , Cortex, GetStates, queue_size=1, buff_size=2**24)
sub_traj = rospy.Subscriber('/trajectory' , Trajectories, GetTraj, queue_size=1, buff_size=2**24)
sub_Batt = rospy.Subscriber('/apm/status' , Status, GetBatt)
sub_status = rospy.Subscriber('/controller_status' , Bool, GetStatus)
Basic_Controller()
r.sleep()
|
GetStatus
|
identifier_name
|
IRIS_DF_Controller.py
|
#!/usr/bin/env python
'''======================================================
Created by: D. Spencer Maughan
Last updated: May 2015
File name: IRIS_DF_Controller.py
Organization: RISC Lab, Utah State University
Notes:
======================================================'''
import roslib; roslib.load_manifest('risc_msgs')
import rospy
from math import *
import numpy as np
import time
#=======================#
# Messages Needed #
#=======================#
from risc_msgs.msg import *
from std_msgs.msg import Bool
from roscopter.msg import Status
#=====================#
# Gain Matrices #
#=====================#
K = np.matrix([[ 1.8, 0, 0, 1.4, 0, 0, 0],\
[ 0, 1.8, 0, 0, 1.4, 0, 0],\
[ 0, 0, 3, 0, 0, 5, 0],\
[ 0, 0, 0, 0, 0, 0,.5]])
#========================#
# Globals #
#========================#
nominal_thrust = 0 # thrust necessary to maintain hover given battery level
phi_scale = 3.053261127645355
phi_trim = 0.0#0.058941904209906
theta_scale = 3.815398742249453
theta_trim = 0.0#-0.091216767651723
ctrl_status = False
states = Cortex()
states.Obj = [States()]*1
traj = Trajectories()
traj.Obj = [Trajectory()]*1
euler_max = 45*np.pi/180
max_yaw_rate = .3490659 #in radians/sec
rate = 45 # Hz
image = 0
start_time = 0
#==================#
# Publishers #
#==================#
pub_ctrl = rospy.Publisher('/controls', Controls, queue_size = 1)
#========================#
# Get Cortex States #
#========================#
def GetStates(S):
global states
states = S
#=====================#
# Get Trajectory #
#=====================#
def GetTraj(S):
global traj
traj = S
#=========================#
# Get Battery Status #
#=========================#
def GetBatt(S):
|
def GetStatus(S):
global ctrl_status
ctrl_status = S.data
#========================#
# Basic Controller #
#========================#
def Basic_Controller():
global states, euler_max, max_yaw_rate, pub_ctrl,K,traj
Ctrl = Controls()
Ctrl.Obj = [Control()]*1
Ctrl.header.stamp = states.header.stamp
g = 9.80665 # average value of earth's gravitational constant m/s^2
m = 1.282 # IRIS mass in kg
#===================================#
# Get State Trajectory Errors #
#===================================#
if states.Obj[0].visible:
X = np.asmatrix(np.zeros((7,1)))
X[0] = traj.Obj[0].x-states.Obj[0].x
X[1] = traj.Obj[0].y-states.Obj[0].y
X[2] = traj.Obj[0].z-states.Obj[0].z
X[3] = traj.Obj[0].xdot-states.Obj[0].u
X[4] = traj.Obj[0].ydot-states.Obj[0].v
X[5] = traj.Obj[0].zdot-states.Obj[0].w
X[6] = traj.Obj[0].psi-states.Obj[0].psi*np.pi/180
#============================================#
# Differential Flatness Control Input #
#============================================#
# LQR input
utilde = -K*X
# required input
u_r = np.asmatrix(np.zeros((4,1)))
u = utilde+u_r-np.matrix([[0],[0],[9.81],[0]])
#==================================#
# Rotate to Vehicle 1 Frame #
#==================================#
psi = states.Obj[0].psi*np.pi/180
rotZ = np.matrix([[cos(psi), sin(psi), 0],[-sin(psi), cos(psi), 0],[0, 0, 1]])
Cart = np.matrix([[1, 0, 0],[0, -1, 0],[0, 0, -1]])
u[:-1] = Cart*rotZ*u[:-1]
#===================================#
# Normalize given the Thrust #
#===================================#
T = sqrt(u[0:3].T*u[0:3])
u[:-1] = np.divide(u[:-1],-T)
#==================#
# Set Controls #
#==================#
# Controls for Ardrone
# -phi = right... +phi = left
# -theta = back... +theta = forward
# -psi = right... +psi = left
global phi_trim,theta_trim,phi_scale,theta_scale
phi_d = (asin(u[1,-1]))
theta_d = (-asin(u[0,-1]))
ctrl = Control()
ctrl.name = states.Obj[0].name
ctrl.phi = phi_trim + phi_scale*phi_d
ctrl.theta = theta_trim + theta_scale*theta_d
ctrl.psi = -u[3,-1]/max_yaw_rate
global nominal_thrust
T_d = nominal_thrust+(T-g)/g
ctrl.T = T_d
Ctrl.Obj[0] = ctrl
Ctrl.header = states.header
#rospy.loginfo("latency = %f",states.header.stamp.to_sec()-rospy.get_time())
pub_ctrl.publish(Ctrl)
#===================#
# Main #
#===================#
if __name__=='__main__':
import sys
rospy.init_node('IRIS_DF_Controller')
#=====================================#
# Set up Publish/Subscribe Loop #
#=====================================#
r = rospy.Rate(rate)
while not rospy.is_shutdown():
sub_cortex = rospy.Subscriber('/cortex_raw' , Cortex, GetStates, queue_size=1, buff_size=2**24)
sub_traj = rospy.Subscriber('/trajectory' , Trajectories, GetTraj, queue_size=1, buff_size=2**24)
sub_Batt = rospy.Subscriber('/apm/status' , Status, GetBatt)
sub_status = rospy.Subscriber('/controller_status' , Bool, GetStatus)
Basic_Controller()
r.sleep()
|
global nominal_thrust
B = S.battery_remaining
# coefficients for fourth order fit
# determined 11 May 2015 by Spencer Maughan and Ishmaal Erekson
c0 = 0.491674747062374
c1 = -0.024809293286468
c2 = 0.000662710609466
c3 = -0.000008160593348
c4 = 0.000000033699651
nominal_thrust = c0+c1*B+c2*B**2+c3*B**3+c4*B**4
#============================#
# Get Controller Status #
#============================#
|
identifier_body
|
NodePush.ts
|
/*
* Copyright (c) 2012 - 2020, Tim Düsterhus
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { Socket } from "socket.io-client";
import _Push from "Bastelstu.be/_Push";
type Push = typeof _Push;
class NodePush implements Push {
private initialized = false;
private connected = false;
private waitForInit: Promise<Socket>;
private initResolve!: (value: Socket) => void;
private initReject!: (reason?: any) => void;
constructor() {
this.waitForInit = new Promise((resolve, reject) => {
this.initResolve = resolve;
this.initReject = reject;
});
}
/**
* Connect to the given host and provide the given signed authentication string.
*/
async init(host: string, connectData: string): Promise<void> {
if (this.initialized) {
return;
}
this.initialized = true;
try {
const socket = (await import("socket.io-client")).default(host);
let token: string | undefined = undefined;
socket.on("connect", () => {
if (token === undefined) {
socket.emit("connectData", connectData);
} else {
|
});
socket.on("rekey", (newToken: string) => {
token = newToken;
});
socket.on("authenticated", () => {
this.connected = true;
});
socket.on("disconnect", () => {
this.connected = false;
});
this.initResolve(socket);
} catch (err) {
console.log("Initializing nodePush failed:", err);
this.initReject(err);
}
}
getFeatureFlags(): string[] {
return [
"authentication",
"target:channels",
"target:groups",
"target:users",
"target:registered",
"target:guest",
];
}
/**
* Execute the given callback after connecting to the nodePush service.
*/
async onConnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("authenticated", () => {
callback();
});
if (this.connected) {
setTimeout(() => {
callback();
}, 0);
}
}
/**
* Execute the given callback after disconnecting from the nodePush service.
*/
async onDisconnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("disconnect", () => {
callback();
});
}
/**
* Execute the given callback after receiving the given message from the nodePush service.
*/
async onMessage(
message: string,
callback: (payload: unknown) => unknown
): Promise<void> {
if (!/^[a-zA-Z0-9-_]+\.[a-zA-Z0-9-_]+(\.[a-zA-Z0-9-_]+)+$/.test(message)) {
throw new Error("Invalid message identifier");
}
const socket = await this.waitForInit;
socket.on(message, (payload: unknown) => {
callback(payload);
});
}
}
export = new NodePush();
|
socket.emit("token", token);
}
|
conditional_block
|
NodePush.ts
|
/*
* Copyright (c) 2012 - 2020, Tim Düsterhus
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { Socket } from "socket.io-client";
import _Push from "Bastelstu.be/_Push";
type Push = typeof _Push;
class NodePush implements Push {
private initialized = false;
private connected = false;
private waitForInit: Promise<Socket>;
private initResolve!: (value: Socket) => void;
private initReject!: (reason?: any) => void;
constructor() {
|
/**
* Connect to the given host and provide the given signed authentication string.
*/
async init(host: string, connectData: string): Promise<void> {
if (this.initialized) {
return;
}
this.initialized = true;
try {
const socket = (await import("socket.io-client")).default(host);
let token: string | undefined = undefined;
socket.on("connect", () => {
if (token === undefined) {
socket.emit("connectData", connectData);
} else {
socket.emit("token", token);
}
});
socket.on("rekey", (newToken: string) => {
token = newToken;
});
socket.on("authenticated", () => {
this.connected = true;
});
socket.on("disconnect", () => {
this.connected = false;
});
this.initResolve(socket);
} catch (err) {
console.log("Initializing nodePush failed:", err);
this.initReject(err);
}
}
getFeatureFlags(): string[] {
return [
"authentication",
"target:channels",
"target:groups",
"target:users",
"target:registered",
"target:guest",
];
}
/**
* Execute the given callback after connecting to the nodePush service.
*/
async onConnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("authenticated", () => {
callback();
});
if (this.connected) {
setTimeout(() => {
callback();
}, 0);
}
}
/**
* Execute the given callback after disconnecting from the nodePush service.
*/
async onDisconnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("disconnect", () => {
callback();
});
}
/**
* Execute the given callback after receiving the given message from the nodePush service.
*/
async onMessage(
message: string,
callback: (payload: unknown) => unknown
): Promise<void> {
if (!/^[a-zA-Z0-9-_]+\.[a-zA-Z0-9-_]+(\.[a-zA-Z0-9-_]+)+$/.test(message)) {
throw new Error("Invalid message identifier");
}
const socket = await this.waitForInit;
socket.on(message, (payload: unknown) => {
callback(payload);
});
}
}
export = new NodePush();
|
this.waitForInit = new Promise((resolve, reject) => {
this.initResolve = resolve;
this.initReject = reject;
});
}
|
identifier_body
|
NodePush.ts
|
/*
* Copyright (c) 2012 - 2020, Tim Düsterhus
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { Socket } from "socket.io-client";
import _Push from "Bastelstu.be/_Push";
type Push = typeof _Push;
class NodePush implements Push {
private initialized = false;
private connected = false;
private waitForInit: Promise<Socket>;
private initResolve!: (value: Socket) => void;
private initReject!: (reason?: any) => void;
constructor() {
this.waitForInit = new Promise((resolve, reject) => {
this.initResolve = resolve;
this.initReject = reject;
});
}
/**
* Connect to the given host and provide the given signed authentication string.
*/
async init(host: string, connectData: string): Promise<void> {
if (this.initialized) {
return;
}
this.initialized = true;
try {
const socket = (await import("socket.io-client")).default(host);
let token: string | undefined = undefined;
socket.on("connect", () => {
if (token === undefined) {
socket.emit("connectData", connectData);
} else {
socket.emit("token", token);
}
});
socket.on("rekey", (newToken: string) => {
token = newToken;
});
socket.on("authenticated", () => {
this.connected = true;
});
socket.on("disconnect", () => {
this.connected = false;
});
this.initResolve(socket);
} catch (err) {
console.log("Initializing nodePush failed:", err);
this.initReject(err);
}
}
getFeatureFlags(): string[] {
return [
"authentication",
"target:channels",
"target:groups",
"target:users",
"target:registered",
"target:guest",
];
}
/**
* Execute the given callback after connecting to the nodePush service.
*/
async onConnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("authenticated", () => {
callback();
});
if (this.connected) {
setTimeout(() => {
callback();
}, 0);
}
}
/**
* Execute the given callback after disconnecting from the nodePush service.
*/
async onDisconnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("disconnect", () => {
callback();
});
}
/**
* Execute the given callback after receiving the given message from the nodePush service.
*/
async o
|
message: string,
callback: (payload: unknown) => unknown
): Promise<void> {
if (!/^[a-zA-Z0-9-_]+\.[a-zA-Z0-9-_]+(\.[a-zA-Z0-9-_]+)+$/.test(message)) {
throw new Error("Invalid message identifier");
}
const socket = await this.waitForInit;
socket.on(message, (payload: unknown) => {
callback(payload);
});
}
}
export = new NodePush();
|
nMessage(
|
identifier_name
|
NodePush.ts
|
/*
* Copyright (c) 2012 - 2020, Tim Düsterhus
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { Socket } from "socket.io-client";
import _Push from "Bastelstu.be/_Push";
type Push = typeof _Push;
class NodePush implements Push {
private initialized = false;
private connected = false;
private waitForInit: Promise<Socket>;
private initResolve!: (value: Socket) => void;
private initReject!: (reason?: any) => void;
constructor() {
this.waitForInit = new Promise((resolve, reject) => {
this.initResolve = resolve;
this.initReject = reject;
});
}
/**
* Connect to the given host and provide the given signed authentication string.
*/
async init(host: string, connectData: string): Promise<void> {
if (this.initialized) {
return;
}
this.initialized = true;
try {
const socket = (await import("socket.io-client")).default(host);
let token: string | undefined = undefined;
socket.on("connect", () => {
if (token === undefined) {
socket.emit("connectData", connectData);
} else {
socket.emit("token", token);
}
});
socket.on("rekey", (newToken: string) => {
token = newToken;
});
socket.on("authenticated", () => {
this.connected = true;
});
socket.on("disconnect", () => {
this.connected = false;
});
this.initResolve(socket);
} catch (err) {
console.log("Initializing nodePush failed:", err);
this.initReject(err);
}
}
getFeatureFlags(): string[] {
return [
"authentication",
"target:channels",
"target:groups",
"target:users",
"target:registered",
"target:guest",
];
}
/**
* Execute the given callback after connecting to the nodePush service.
*/
async onConnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("authenticated", () => {
callback();
});
if (this.connected) {
setTimeout(() => {
callback();
}, 0);
}
}
/**
* Execute the given callback after disconnecting from the nodePush service.
*/
async onDisconnect(callback: () => unknown): Promise<void> {
const socket = await this.waitForInit;
socket.on("disconnect", () => {
callback();
});
}
/**
* Execute the given callback after receiving the given message from the nodePush service.
*/
async onMessage(
message: string,
callback: (payload: unknown) => unknown
): Promise<void> {
if (!/^[a-zA-Z0-9-_]+\.[a-zA-Z0-9-_]+(\.[a-zA-Z0-9-_]+)+$/.test(message)) {
throw new Error("Invalid message identifier");
}
|
socket.on(message, (payload: unknown) => {
callback(payload);
});
}
}
export = new NodePush();
|
const socket = await this.waitForInit;
|
random_line_split
|
linktest_rsp_header.py
|
#####################################################################
# linktest_rsp_header.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
|
from .header import HsmsHeader
class HsmsLinktestRspHeader(HsmsHeader):
"""
Header for Linktest Response.
Header for message with SType 6.
"""
def __init__(self, system):
"""
Initialize a hsms linktest response.
:param system: message ID
:type system: integer
**Example**::
>>> import secsgem.hsms
>>>
>>> secsgem.hsms.HsmsLinktestRspHeader(10)
HsmsLinktestRspHeader({sessionID:0xffff, stream:00, function:00, pType:0x00, sType:0x06, \
system:0x0000000a, requireResponse:False})
"""
HsmsHeader.__init__(self, system, 0xFFFF)
self.requireResponse = False
self.stream = 0x00
self.function = 0x00
self.pType = 0x00
self.sType = 0x06
|
#####################################################################
"""Header for the hsms linktest response."""
|
random_line_split
|
linktest_rsp_header.py
|
#####################################################################
# linktest_rsp_header.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""Header for the hsms linktest response."""
from .header import HsmsHeader
class HsmsLinktestRspHeader(HsmsHeader):
"""
Header for Linktest Response.
Header for message with SType 6.
"""
def __init__(self, system):
|
"""
Initialize a hsms linktest response.
:param system: message ID
:type system: integer
**Example**::
>>> import secsgem.hsms
>>>
>>> secsgem.hsms.HsmsLinktestRspHeader(10)
HsmsLinktestRspHeader({sessionID:0xffff, stream:00, function:00, pType:0x00, sType:0x06, \
system:0x0000000a, requireResponse:False})
"""
HsmsHeader.__init__(self, system, 0xFFFF)
self.requireResponse = False
self.stream = 0x00
self.function = 0x00
self.pType = 0x00
self.sType = 0x06
|
identifier_body
|
|
linktest_rsp_header.py
|
#####################################################################
# linktest_rsp_header.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""Header for the hsms linktest response."""
from .header import HsmsHeader
class
|
(HsmsHeader):
"""
Header for Linktest Response.
Header for message with SType 6.
"""
def __init__(self, system):
"""
Initialize a hsms linktest response.
:param system: message ID
:type system: integer
**Example**::
>>> import secsgem.hsms
>>>
>>> secsgem.hsms.HsmsLinktestRspHeader(10)
HsmsLinktestRspHeader({sessionID:0xffff, stream:00, function:00, pType:0x00, sType:0x06, \
system:0x0000000a, requireResponse:False})
"""
HsmsHeader.__init__(self, system, 0xFFFF)
self.requireResponse = False
self.stream = 0x00
self.function = 0x00
self.pType = 0x00
self.sType = 0x06
|
HsmsLinktestRspHeader
|
identifier_name
|
PostsItemMetaInfo.tsx
|
import React from 'react';
import { registerComponent, Components } from '../../lib/vulcan-lib';
import classNames from 'classnames'
const styles = (theme: ThemeType): JssStyles => ({
root: {
color: theme.palette.grey[600],
fontSize: "1.1rem",
display: "flex",
alignItems: "center",
},
})
const PostsItemMetaInfo = ({children, classes, className}: {
children?: React.ReactNode,
classes: ClassesType,
className?: string,
|
component='span'
className={classNames(classes.root, className)}
variant='body2'>
{children}
</Components.Typography>
}
const PostsItemMetaInfoComponent = registerComponent('PostsItemMetaInfo', PostsItemMetaInfo, {styles});
declare global {
interface ComponentTypes {
PostsItemMetaInfo: typeof PostsItemMetaInfoComponent
}
}
|
}) => {
return <Components.Typography
|
random_line_split
|
tabmenu.py
|
# from pytigon_js.tools import history_push_state, correct_href, remove_element, process_resize
# from pytigon_js.ajax_region import mount_html
class Page:
def __init__(self, id, page):
self.id = id
self.page = page
def set_href(self, href):
self.page.attr("_href", href)
def get_href(self):
return self.page.attr("_href")
class TabMenuItem:
def __init__(self, id, title, url, data=None):
self.id = id
self.title = jQuery.trim(title)
self.url = url
self.data = data
class TabMenu:
def __init__(self):
self.id = 0
self.titles = {}
self.active_item = None
def get_active_item(self):
return self.active_item
def is_open(self, title):
if self.titles and title in self.titles and self.titles[title]:
return True
else:
return False
def activate(self, title, push_state=True):
menu_item = self.titles[title]
jQuery(sprintf("#li_%s a", menu_item.id)).tab("show")
if push_state and window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
def register(self, title):
|
def new_page(self, title, data_or_html, href, title_alt=None):
_id = "tab" + self.id
menu_item = TabMenuItem(_id, title, href, data_or_html)
self.titles[title] = menu_item
if title_alt and title_alt != title:
self.titles[title_alt] = menu_item
menu_pos = vsprintf(
"<li id='li_%s' class ='nav-item'><a href='#%s' class='nav-link bg-info' data-toggle='tab' data-bs-toggle='tab' role='tab' title='%s'>%s    </a> <button id = 'button_%s' class='close btn btn-outline-danger btn-xs' title='remove page' type='button'><span class='fa fa-times'></span></button></li>",
[_id, _id, title, title, _id],
)
append_left = jQuery("#tabs2").hasClass("append-left")
if append_left:
jQuery("#tabs2").prepend(menu_pos)
else:
jQuery("#tabs2").append(menu_pos)
jQuery("#tabs2_content").append(
sprintf(
"<div class='tab-pane container-fluid ajax-region ajax-frame ajax-link win-content content page' id='%s' data-region='page' href='%s'></div>",
_id,
href,
)
)
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id))
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(title, href)
def _on_show_tab(self, e):
nonlocal menu_item
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id), menu_item)
menu = get_menu()
menu_item = menu.titles[jQuery.trim(e.target.text)]
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
process_resize(document.getElementById(menu_item.id))
if append_left:
jQuery("#tabs2 a:first").on("shown.bs.tab", _on_show_tab)
jQuery("#tabs2 a:first").tab("show")
else:
jQuery("#tabs2 a:last").on("shown.bs.tab", _on_show_tab)
jQuery("#tabs2 a:last").tab("show")
mount_html(document.getElementById(_id), data_or_html, None)
def _on_button_click(self, event):
get_menu().remove_page(jQuery(this).attr("id").replace("button_", ""))
jQuery(sprintf("#button_%s", _id)).click(_on_button_click)
scripts = jQuery("#" + _id + " script")
def _local_fun(index, element):
eval(this.innerHTML)
scripts.each(_local_fun)
self.id += 1
return _id
def remove_page(self, id):
def _local_fun(index, value):
if value and value.id == id:
self.titles[index] = None
jQuery.each(self.titles, _local_fun)
remove_element(sprintf("#li_%s", id))
remove_element(sprintf("#%s", id))
last_a = jQuery("#tabs2 a:last")
if last_a.length > 0:
last_a.tab("show")
else:
window.ACTIVE_PAGE = None
if window.PUSH_STATE:
history_push_state("", window.BASE_PATH)
if jQuery("#body_desktop").find(".content").length == 0:
window.init_start_wiki_page()
jQuery("#body_desktop").show()
#'standard' 'simple', 'traditional', 'mobile', 'tablet', 'hybrid'
def on_menu_href(self, elem, data_or_html, title, title_alt=None, url=None):
if window.APPLICATION_TEMPLATE == "modern":
if self.is_open(title):
self.activate(title)
else:
self.register(title)
if url:
href = url
else:
href = jQuery(elem).attr("href")
href2 = correct_href(href)
jQuery("#body_desktop").hide()
# self.new_page(title, data_or_html.innerHTML, href2, title_alt)
self.new_page(title, data_or_html, href2, title_alt)
jQuery(".auto-hide").trigger("click")
return False
else:
mount_html(document.querySelector("#body_desktop"), data_or_html, None)
jQuery(".auto-hide").trigger("click")
return False
def get_menu():
if not window.MENU:
window.MENU = TabMenu()
return window.MENU
|
self.titles[title] = "$$$"
|
identifier_body
|
tabmenu.py
|
# from pytigon_js.tools import history_push_state, correct_href, remove_element, process_resize
# from pytigon_js.ajax_region import mount_html
class Page:
def __init__(self, id, page):
self.id = id
self.page = page
def set_href(self, href):
self.page.attr("_href", href)
def get_href(self):
return self.page.attr("_href")
class TabMenuItem:
def __init__(self, id, title, url, data=None):
self.id = id
self.title = jQuery.trim(title)
self.url = url
self.data = data
class
|
:
def __init__(self):
self.id = 0
self.titles = {}
self.active_item = None
def get_active_item(self):
return self.active_item
def is_open(self, title):
if self.titles and title in self.titles and self.titles[title]:
return True
else:
return False
def activate(self, title, push_state=True):
menu_item = self.titles[title]
jQuery(sprintf("#li_%s a", menu_item.id)).tab("show")
if push_state and window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
def register(self, title):
self.titles[title] = "$$$"
def new_page(self, title, data_or_html, href, title_alt=None):
_id = "tab" + self.id
menu_item = TabMenuItem(_id, title, href, data_or_html)
self.titles[title] = menu_item
if title_alt and title_alt != title:
self.titles[title_alt] = menu_item
menu_pos = vsprintf(
"<li id='li_%s' class ='nav-item'><a href='#%s' class='nav-link bg-info' data-toggle='tab' data-bs-toggle='tab' role='tab' title='%s'>%s    </a> <button id = 'button_%s' class='close btn btn-outline-danger btn-xs' title='remove page' type='button'><span class='fa fa-times'></span></button></li>",
[_id, _id, title, title, _id],
)
append_left = jQuery("#tabs2").hasClass("append-left")
if append_left:
jQuery("#tabs2").prepend(menu_pos)
else:
jQuery("#tabs2").append(menu_pos)
jQuery("#tabs2_content").append(
sprintf(
"<div class='tab-pane container-fluid ajax-region ajax-frame ajax-link win-content content page' id='%s' data-region='page' href='%s'></div>",
_id,
href,
)
)
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id))
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(title, href)
def _on_show_tab(self, e):
nonlocal menu_item
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id), menu_item)
menu = get_menu()
menu_item = menu.titles[jQuery.trim(e.target.text)]
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
process_resize(document.getElementById(menu_item.id))
if append_left:
jQuery("#tabs2 a:first").on("shown.bs.tab", _on_show_tab)
jQuery("#tabs2 a:first").tab("show")
else:
jQuery("#tabs2 a:last").on("shown.bs.tab", _on_show_tab)
jQuery("#tabs2 a:last").tab("show")
mount_html(document.getElementById(_id), data_or_html, None)
def _on_button_click(self, event):
get_menu().remove_page(jQuery(this).attr("id").replace("button_", ""))
jQuery(sprintf("#button_%s", _id)).click(_on_button_click)
scripts = jQuery("#" + _id + " script")
def _local_fun(index, element):
eval(this.innerHTML)
scripts.each(_local_fun)
self.id += 1
return _id
def remove_page(self, id):
def _local_fun(index, value):
if value and value.id == id:
self.titles[index] = None
jQuery.each(self.titles, _local_fun)
remove_element(sprintf("#li_%s", id))
remove_element(sprintf("#%s", id))
last_a = jQuery("#tabs2 a:last")
if last_a.length > 0:
last_a.tab("show")
else:
window.ACTIVE_PAGE = None
if window.PUSH_STATE:
history_push_state("", window.BASE_PATH)
if jQuery("#body_desktop").find(".content").length == 0:
window.init_start_wiki_page()
jQuery("#body_desktop").show()
#'standard' 'simple', 'traditional', 'mobile', 'tablet', 'hybrid'
def on_menu_href(self, elem, data_or_html, title, title_alt=None, url=None):
if window.APPLICATION_TEMPLATE == "modern":
if self.is_open(title):
self.activate(title)
else:
self.register(title)
if url:
href = url
else:
href = jQuery(elem).attr("href")
href2 = correct_href(href)
jQuery("#body_desktop").hide()
# self.new_page(title, data_or_html.innerHTML, href2, title_alt)
self.new_page(title, data_or_html, href2, title_alt)
jQuery(".auto-hide").trigger("click")
return False
else:
mount_html(document.querySelector("#body_desktop"), data_or_html, None)
jQuery(".auto-hide").trigger("click")
return False
def get_menu():
if not window.MENU:
window.MENU = TabMenu()
return window.MENU
|
TabMenu
|
identifier_name
|
tabmenu.py
|
# from pytigon_js.tools import history_push_state, correct_href, remove_element, process_resize
# from pytigon_js.ajax_region import mount_html
class Page:
def __init__(self, id, page):
self.id = id
self.page = page
def set_href(self, href):
self.page.attr("_href", href)
def get_href(self):
return self.page.attr("_href")
class TabMenuItem:
def __init__(self, id, title, url, data=None):
self.id = id
self.title = jQuery.trim(title)
self.url = url
self.data = data
class TabMenu:
def __init__(self):
self.id = 0
self.titles = {}
self.active_item = None
def get_active_item(self):
return self.active_item
def is_open(self, title):
if self.titles and title in self.titles and self.titles[title]:
return True
else:
return False
def activate(self, title, push_state=True):
menu_item = self.titles[title]
jQuery(sprintf("#li_%s a", menu_item.id)).tab("show")
if push_state and window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
def register(self, title):
self.titles[title] = "$$$"
def new_page(self, title, data_or_html, href, title_alt=None):
_id = "tab" + self.id
menu_item = TabMenuItem(_id, title, href, data_or_html)
self.titles[title] = menu_item
if title_alt and title_alt != title:
self.titles[title_alt] = menu_item
menu_pos = vsprintf(
"<li id='li_%s' class ='nav-item'><a href='#%s' class='nav-link bg-info' data-toggle='tab' data-bs-toggle='tab' role='tab' title='%s'>%s    </a> <button id = 'button_%s' class='close btn btn-outline-danger btn-xs' title='remove page' type='button'><span class='fa fa-times'></span></button></li>",
[_id, _id, title, title, _id],
)
append_left = jQuery("#tabs2").hasClass("append-left")
if append_left:
jQuery("#tabs2").prepend(menu_pos)
else:
jQuery("#tabs2").append(menu_pos)
jQuery("#tabs2_content").append(
sprintf(
"<div class='tab-pane container-fluid ajax-region ajax-frame ajax-link win-content content page' id='%s' data-region='page' href='%s'></div>",
_id,
href,
)
)
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id))
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(title, href)
def _on_show_tab(self, e):
nonlocal menu_item
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id), menu_item)
menu = get_menu()
menu_item = menu.titles[jQuery.trim(e.target.text)]
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
process_resize(document.getElementById(menu_item.id))
if append_left:
jQuery("#tabs2 a:first").on("shown.bs.tab", _on_show_tab)
jQuery("#tabs2 a:first").tab("show")
else:
jQuery("#tabs2 a:last").on("shown.bs.tab", _on_show_tab)
jQuery("#tabs2 a:last").tab("show")
mount_html(document.getElementById(_id), data_or_html, None)
def _on_button_click(self, event):
get_menu().remove_page(jQuery(this).attr("id").replace("button_", ""))
jQuery(sprintf("#button_%s", _id)).click(_on_button_click)
scripts = jQuery("#" + _id + " script")
def _local_fun(index, element):
eval(this.innerHTML)
scripts.each(_local_fun)
self.id += 1
return _id
def remove_page(self, id):
def _local_fun(index, value):
if value and value.id == id:
self.titles[index] = None
jQuery.each(self.titles, _local_fun)
remove_element(sprintf("#li_%s", id))
remove_element(sprintf("#%s", id))
last_a = jQuery("#tabs2 a:last")
if last_a.length > 0:
|
else:
window.ACTIVE_PAGE = None
if window.PUSH_STATE:
history_push_state("", window.BASE_PATH)
if jQuery("#body_desktop").find(".content").length == 0:
window.init_start_wiki_page()
jQuery("#body_desktop").show()
#'standard' 'simple', 'traditional', 'mobile', 'tablet', 'hybrid'
def on_menu_href(self, elem, data_or_html, title, title_alt=None, url=None):
if window.APPLICATION_TEMPLATE == "modern":
if self.is_open(title):
self.activate(title)
else:
self.register(title)
if url:
href = url
else:
href = jQuery(elem).attr("href")
href2 = correct_href(href)
jQuery("#body_desktop").hide()
# self.new_page(title, data_or_html.innerHTML, href2, title_alt)
self.new_page(title, data_or_html, href2, title_alt)
jQuery(".auto-hide").trigger("click")
return False
else:
mount_html(document.querySelector("#body_desktop"), data_or_html, None)
jQuery(".auto-hide").trigger("click")
return False
def get_menu():
if not window.MENU:
window.MENU = TabMenu()
return window.MENU
|
last_a.tab("show")
|
conditional_block
|
tabmenu.py
|
# from pytigon_js.tools import history_push_state, correct_href, remove_element, process_resize
# from pytigon_js.ajax_region import mount_html
class Page:
def __init__(self, id, page):
self.id = id
self.page = page
def set_href(self, href):
self.page.attr("_href", href)
def get_href(self):
return self.page.attr("_href")
class TabMenuItem:
def __init__(self, id, title, url, data=None):
self.id = id
self.title = jQuery.trim(title)
self.url = url
self.data = data
class TabMenu:
def __init__(self):
self.id = 0
self.titles = {}
self.active_item = None
def get_active_item(self):
return self.active_item
def is_open(self, title):
if self.titles and title in self.titles and self.titles[title]:
return True
else:
return False
def activate(self, title, push_state=True):
menu_item = self.titles[title]
jQuery(sprintf("#li_%s a", menu_item.id)).tab("show")
if push_state and window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
def register(self, title):
self.titles[title] = "$$$"
def new_page(self, title, data_or_html, href, title_alt=None):
_id = "tab" + self.id
menu_item = TabMenuItem(_id, title, href, data_or_html)
self.titles[title] = menu_item
if title_alt and title_alt != title:
self.titles[title_alt] = menu_item
menu_pos = vsprintf(
"<li id='li_%s' class ='nav-item'><a href='#%s' class='nav-link bg-info' data-toggle='tab' data-bs-toggle='tab' role='tab' title='%s'>%s    </a> <button id = 'button_%s' class='close btn btn-outline-danger btn-xs' title='remove page' type='button'><span class='fa fa-times'></span></button></li>",
[_id, _id, title, title, _id],
)
append_left = jQuery("#tabs2").hasClass("append-left")
if append_left:
jQuery("#tabs2").prepend(menu_pos)
else:
jQuery("#tabs2").append(menu_pos)
jQuery("#tabs2_content").append(
sprintf(
"<div class='tab-pane container-fluid ajax-region ajax-frame ajax-link win-content content page' id='%s' data-region='page' href='%s'></div>",
_id,
href,
)
)
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id))
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(title, href)
def _on_show_tab(self, e):
nonlocal menu_item
window.ACTIVE_PAGE = Page(_id, jQuery("#" + _id), menu_item)
menu = get_menu()
menu_item = menu.titles[jQuery.trim(e.target.text)]
self.active_item = menu_item
if window.PUSH_STATE:
history_push_state(menu_item.title, menu_item.url)
process_resize(document.getElementById(menu_item.id))
if append_left:
jQuery("#tabs2 a:first").on("shown.bs.tab", _on_show_tab)
|
jQuery("#tabs2 a:last").on("shown.bs.tab", _on_show_tab)
jQuery("#tabs2 a:last").tab("show")
mount_html(document.getElementById(_id), data_or_html, None)
def _on_button_click(self, event):
get_menu().remove_page(jQuery(this).attr("id").replace("button_", ""))
jQuery(sprintf("#button_%s", _id)).click(_on_button_click)
scripts = jQuery("#" + _id + " script")
def _local_fun(index, element):
eval(this.innerHTML)
scripts.each(_local_fun)
self.id += 1
return _id
def remove_page(self, id):
def _local_fun(index, value):
if value and value.id == id:
self.titles[index] = None
jQuery.each(self.titles, _local_fun)
remove_element(sprintf("#li_%s", id))
remove_element(sprintf("#%s", id))
last_a = jQuery("#tabs2 a:last")
if last_a.length > 0:
last_a.tab("show")
else:
window.ACTIVE_PAGE = None
if window.PUSH_STATE:
history_push_state("", window.BASE_PATH)
if jQuery("#body_desktop").find(".content").length == 0:
window.init_start_wiki_page()
jQuery("#body_desktop").show()
#'standard' 'simple', 'traditional', 'mobile', 'tablet', 'hybrid'
def on_menu_href(self, elem, data_or_html, title, title_alt=None, url=None):
if window.APPLICATION_TEMPLATE == "modern":
if self.is_open(title):
self.activate(title)
else:
self.register(title)
if url:
href = url
else:
href = jQuery(elem).attr("href")
href2 = correct_href(href)
jQuery("#body_desktop").hide()
# self.new_page(title, data_or_html.innerHTML, href2, title_alt)
self.new_page(title, data_or_html, href2, title_alt)
jQuery(".auto-hide").trigger("click")
return False
else:
mount_html(document.querySelector("#body_desktop"), data_or_html, None)
jQuery(".auto-hide").trigger("click")
return False
def get_menu():
if not window.MENU:
window.MENU = TabMenu()
return window.MENU
|
jQuery("#tabs2 a:first").tab("show")
else:
|
random_line_split
|
avi_cloudproperties.py
|
#!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_cloudproperties
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of CloudProperties Avi RESTful Object
description:
- This module is used to configure CloudProperties object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
cc_props:
description:
- Cloudconnector properties.
cc_vtypes:
description:
- Cloud types supported by cloudconnector.
- Enum options - CLOUD_NONE, CLOUD_VCENTER, CLOUD_OPENSTACK, CLOUD_AWS, CLOUD_VCA, CLOUD_APIC, CLOUD_MESOS, CLOUD_LINUXSERVER, CLOUD_DOCKER_UCP,
- CLOUD_RANCHER, CLOUD_OSHIFT_K8S.
hyp_props:
description:
- Hypervisor properties.
info:
description:
- Properties specific to a cloud type.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create CloudProperties object
avi_cloudproperties:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_cloudproperties
"""
RETURN = '''
obj:
description: CloudProperties (api/cloudproperties) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
cc_props=dict(type='dict',),
cc_vtypes=dict(type='list',),
hyp_props=dict(type='list',),
info=dict(type='list',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
|
return avi_ansible_api(module, 'cloudproperties',
set([]))
if __name__ == '__main__':
main()
|
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
|
conditional_block
|
avi_cloudproperties.py
|
#!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_cloudproperties
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of CloudProperties Avi RESTful Object
description:
- This module is used to configure CloudProperties object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
cc_props:
description:
- Cloudconnector properties.
cc_vtypes:
description:
- Cloud types supported by cloudconnector.
- Enum options - CLOUD_NONE, CLOUD_VCENTER, CLOUD_OPENSTACK, CLOUD_AWS, CLOUD_VCA, CLOUD_APIC, CLOUD_MESOS, CLOUD_LINUXSERVER, CLOUD_DOCKER_UCP,
- CLOUD_RANCHER, CLOUD_OSHIFT_K8S.
hyp_props:
description:
- Hypervisor properties.
info:
description:
- Properties specific to a cloud type.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create CloudProperties object
avi_cloudproperties:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_cloudproperties
"""
RETURN = '''
obj:
description: CloudProperties (api/cloudproperties) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def
|
():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
cc_props=dict(type='dict',),
cc_vtypes=dict(type='list',),
hyp_props=dict(type='list',),
info=dict(type='list',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'cloudproperties',
set([]))
if __name__ == '__main__':
main()
|
main
|
identifier_name
|
avi_cloudproperties.py
|
#!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_cloudproperties
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of CloudProperties Avi RESTful Object
description:
- This module is used to configure CloudProperties object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
cc_props:
description:
- Cloudconnector properties.
cc_vtypes:
description:
- Cloud types supported by cloudconnector.
- Enum options - CLOUD_NONE, CLOUD_VCENTER, CLOUD_OPENSTACK, CLOUD_AWS, CLOUD_VCA, CLOUD_APIC, CLOUD_MESOS, CLOUD_LINUXSERVER, CLOUD_DOCKER_UCP,
- CLOUD_RANCHER, CLOUD_OSHIFT_K8S.
hyp_props:
description:
- Hypervisor properties.
info:
description:
- Properties specific to a cloud type.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create CloudProperties object
avi_cloudproperties:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_cloudproperties
"""
|
RETURN = '''
obj:
description: CloudProperties (api/cloudproperties) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
cc_props=dict(type='dict',),
cc_vtypes=dict(type='list',),
hyp_props=dict(type='list',),
info=dict(type='list',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'cloudproperties',
set([]))
if __name__ == '__main__':
main()
|
random_line_split
|
|
avi_cloudproperties.py
|
#!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_cloudproperties
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of CloudProperties Avi RESTful Object
description:
- This module is used to configure CloudProperties object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
cc_props:
description:
- Cloudconnector properties.
cc_vtypes:
description:
- Cloud types supported by cloudconnector.
- Enum options - CLOUD_NONE, CLOUD_VCENTER, CLOUD_OPENSTACK, CLOUD_AWS, CLOUD_VCA, CLOUD_APIC, CLOUD_MESOS, CLOUD_LINUXSERVER, CLOUD_DOCKER_UCP,
- CLOUD_RANCHER, CLOUD_OSHIFT_K8S.
hyp_props:
description:
- Hypervisor properties.
info:
description:
- Properties specific to a cloud type.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create CloudProperties object
avi_cloudproperties:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_cloudproperties
"""
RETURN = '''
obj:
description: CloudProperties (api/cloudproperties) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
|
if __name__ == '__main__':
main()
|
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
cc_props=dict(type='dict',),
cc_vtypes=dict(type='list',),
hyp_props=dict(type='list',),
info=dict(type='list',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'cloudproperties',
set([]))
|
identifier_body
|
tiles.rs
|
use super::Tile;
use colors;
use pixset;
use pixset::PixLike;
use units;
pub struct Tiles<P: PixLike> {
size: units::Size2D,
pub tiles: Vec<Tile<P>>, // TODO impl Index
}
impl<P> Tiles<P>
where
P: pixset::PixLike,
{
pub fn new(size: units::Size2D) -> Self {
let tiles = {
// TODO area
let length = (size.width * size.height) as usize;
let mut ts = Vec::with_capacity(length);
for _ in 0..length {
let t = Tile::new();
ts.push(t);
}
ts
};
Tiles {
size: size,
tiles: tiles,
}
}
#[allow(dead_code)]
pub fn clear(&mut self) {
for t in self.tiles.iter_mut() {
t.clear();
}
}
pub fn
|
(&mut self, loc: units::ScreenTile2D, pix: P, fg: colors::Rgb, bg: colors::Rgb) {
// TODO asserts
let idx = (self.size.width * loc.y + loc.x) as usize;
self.tiles[idx].pix = pix;
self.tiles[idx].fg = fg;
self.tiles[idx].bg = bg;
}
}
|
set
|
identifier_name
|
tiles.rs
|
use super::Tile;
use colors;
use pixset;
use pixset::PixLike;
use units;
pub struct Tiles<P: PixLike> {
size: units::Size2D,
pub tiles: Vec<Tile<P>>, // TODO impl Index
}
impl<P> Tiles<P>
where
P: pixset::PixLike,
{
pub fn new(size: units::Size2D) -> Self {
let tiles = {
// TODO area
let length = (size.width * size.height) as usize;
let mut ts = Vec::with_capacity(length);
for _ in 0..length {
let t = Tile::new();
ts.push(t);
}
ts
};
Tiles {
size: size,
tiles: tiles,
}
}
|
}
}
pub fn set(&mut self, loc: units::ScreenTile2D, pix: P, fg: colors::Rgb, bg: colors::Rgb) {
// TODO asserts
let idx = (self.size.width * loc.y + loc.x) as usize;
self.tiles[idx].pix = pix;
self.tiles[idx].fg = fg;
self.tiles[idx].bg = bg;
}
}
|
#[allow(dead_code)]
pub fn clear(&mut self) {
for t in self.tiles.iter_mut() {
t.clear();
|
random_line_split
|
hw2.py
|
__author__ = 'eric'
import pandas as pd
import numpy as np
import math
import copy
import QSTK.qstkutil.qsdateutil as du
import datetime as dt
import QSTK.qstkutil.DataAccess as da
import QSTK.qstkutil.tsutil as tsu
import QSTK.qstkstudy.EventProfiler as ep
dataObj = da.DataAccess('Yahoo')
def find_events(ls_symbols, d_data):
''' Finding the event dataframe '''
df_close = d_data['actual_close']
print "Finding Events"
# Creating an empty dataframe
df_events = copy.deepcopy(df_close)
df_events = df_events * np.NAN
# Time stamps for the event range
ldt_timestamps = df_close.index
for s_sym in ls_symbols:
for i in range(1, len(ldt_timestamps)):
# Calculating the returns for this timestamp
f_symprice_today = df_close[s_sym].ix[ldt_timestamps[i]]
f_symprice_yest = df_close[s_sym].ix[ldt_timestamps[i - 1]]
# Event is found if on 2 consecutive closes the price went from
# greater than or equal to 5.00 to less than 5.00
if f_symprice_yest >= 5.0 and f_symprice_today < 5.0:
|
return df_events
def create_study(ls_symbols, ldt_timestamps, s_study_name):
global dataObj
print "Grabbing data to perform {0}".format(s_study_name)
ls_keys = ['close', 'actual_close']
ldf_data = dataObj.get_data(ldt_timestamps, ls_symbols, ls_keys)
print "Got data for study {0}".format(s_study_name)
d_data = dict(zip(ls_keys, ldf_data))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method='ffill')
d_data[s_key] = d_data[s_key].fillna(method='bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
df_events = find_events(ls_symbols, d_data)
print "Creating Study"
ep.eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
s_filename=s_study_name, b_market_neutral=True, b_errorbars=True,
s_market_sym='SPY')
def main():
dt_start = dt.datetime(2008, 1, 1)
dt_end = dt.datetime(2009, 12, 31)
ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt.timedelta(hours=16))
global dataObj
ls_symbols_2012 = dataObj.get_symbols_from_list('sp5002012')
ls_symbols_2012.append('SPY')
ls_symbols_2008 = dataObj.get_symbols_from_list('sp5002008')
ls_symbols_2008.append('SPY')
#create_study(ls_symbols_2008, ldt_timestamps, '2008Study2.pdf')
create_study(ls_symbols_2012, ldt_timestamps, '2012Study2.pdf')
if __name__ == '__main__':
main()
|
df_events[s_sym].ix[ldt_timestamps[i]] = 1
|
conditional_block
|
hw2.py
|
__author__ = 'eric'
import pandas as pd
import numpy as np
import math
import copy
import QSTK.qstkutil.qsdateutil as du
import datetime as dt
import QSTK.qstkutil.DataAccess as da
import QSTK.qstkutil.tsutil as tsu
import QSTK.qstkstudy.EventProfiler as ep
dataObj = da.DataAccess('Yahoo')
def find_events(ls_symbols, d_data):
''' Finding the event dataframe '''
df_close = d_data['actual_close']
print "Finding Events"
# Creating an empty dataframe
df_events = copy.deepcopy(df_close)
df_events = df_events * np.NAN
# Time stamps for the event range
ldt_timestamps = df_close.index
for s_sym in ls_symbols:
for i in range(1, len(ldt_timestamps)):
# Calculating the returns for this timestamp
f_symprice_today = df_close[s_sym].ix[ldt_timestamps[i]]
f_symprice_yest = df_close[s_sym].ix[ldt_timestamps[i - 1]]
# Event is found if on 2 consecutive closes the price went from
# greater than or equal to 5.00 to less than 5.00
if f_symprice_yest >= 5.0 and f_symprice_today < 5.0:
df_events[s_sym].ix[ldt_timestamps[i]] = 1
return df_events
def create_study(ls_symbols, ldt_timestamps, s_study_name):
global dataObj
print "Grabbing data to perform {0}".format(s_study_name)
ls_keys = ['close', 'actual_close']
ldf_data = dataObj.get_data(ldt_timestamps, ls_symbols, ls_keys)
print "Got data for study {0}".format(s_study_name)
d_data = dict(zip(ls_keys, ldf_data))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method='ffill')
d_data[s_key] = d_data[s_key].fillna(method='bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
df_events = find_events(ls_symbols, d_data)
print "Creating Study"
ep.eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
s_filename=s_study_name, b_market_neutral=True, b_errorbars=True,
s_market_sym='SPY')
def
|
():
dt_start = dt.datetime(2008, 1, 1)
dt_end = dt.datetime(2009, 12, 31)
ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt.timedelta(hours=16))
global dataObj
ls_symbols_2012 = dataObj.get_symbols_from_list('sp5002012')
ls_symbols_2012.append('SPY')
ls_symbols_2008 = dataObj.get_symbols_from_list('sp5002008')
ls_symbols_2008.append('SPY')
#create_study(ls_symbols_2008, ldt_timestamps, '2008Study2.pdf')
create_study(ls_symbols_2012, ldt_timestamps, '2012Study2.pdf')
if __name__ == '__main__':
main()
|
main
|
identifier_name
|
hw2.py
|
__author__ = 'eric'
import pandas as pd
import numpy as np
import math
import copy
import QSTK.qstkutil.qsdateutil as du
import datetime as dt
import QSTK.qstkutil.DataAccess as da
import QSTK.qstkutil.tsutil as tsu
import QSTK.qstkstudy.EventProfiler as ep
dataObj = da.DataAccess('Yahoo')
def find_events(ls_symbols, d_data):
''' Finding the event dataframe '''
df_close = d_data['actual_close']
print "Finding Events"
# Creating an empty dataframe
df_events = copy.deepcopy(df_close)
df_events = df_events * np.NAN
# Time stamps for the event range
ldt_timestamps = df_close.index
for s_sym in ls_symbols:
for i in range(1, len(ldt_timestamps)):
# Calculating the returns for this timestamp
f_symprice_today = df_close[s_sym].ix[ldt_timestamps[i]]
f_symprice_yest = df_close[s_sym].ix[ldt_timestamps[i - 1]]
# Event is found if on 2 consecutive closes the price went from
# greater than or equal to 5.00 to less than 5.00
if f_symprice_yest >= 5.0 and f_symprice_today < 5.0:
df_events[s_sym].ix[ldt_timestamps[i]] = 1
return df_events
def create_study(ls_symbols, ldt_timestamps, s_study_name):
global dataObj
print "Grabbing data to perform {0}".format(s_study_name)
ls_keys = ['close', 'actual_close']
ldf_data = dataObj.get_data(ldt_timestamps, ls_symbols, ls_keys)
print "Got data for study {0}".format(s_study_name)
d_data = dict(zip(ls_keys, ldf_data))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method='ffill')
d_data[s_key] = d_data[s_key].fillna(method='bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
df_events = find_events(ls_symbols, d_data)
print "Creating Study"
ep.eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
s_filename=s_study_name, b_market_neutral=True, b_errorbars=True,
s_market_sym='SPY')
def main():
dt_start = dt.datetime(2008, 1, 1)
dt_end = dt.datetime(2009, 12, 31)
ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt.timedelta(hours=16))
global dataObj
|
ls_symbols_2008 = dataObj.get_symbols_from_list('sp5002008')
ls_symbols_2008.append('SPY')
#create_study(ls_symbols_2008, ldt_timestamps, '2008Study2.pdf')
create_study(ls_symbols_2012, ldt_timestamps, '2012Study2.pdf')
if __name__ == '__main__':
main()
|
ls_symbols_2012 = dataObj.get_symbols_from_list('sp5002012')
ls_symbols_2012.append('SPY')
|
random_line_split
|
hw2.py
|
__author__ = 'eric'
import pandas as pd
import numpy as np
import math
import copy
import QSTK.qstkutil.qsdateutil as du
import datetime as dt
import QSTK.qstkutil.DataAccess as da
import QSTK.qstkutil.tsutil as tsu
import QSTK.qstkstudy.EventProfiler as ep
dataObj = da.DataAccess('Yahoo')
def find_events(ls_symbols, d_data):
''' Finding the event dataframe '''
df_close = d_data['actual_close']
print "Finding Events"
# Creating an empty dataframe
df_events = copy.deepcopy(df_close)
df_events = df_events * np.NAN
# Time stamps for the event range
ldt_timestamps = df_close.index
for s_sym in ls_symbols:
for i in range(1, len(ldt_timestamps)):
# Calculating the returns for this timestamp
f_symprice_today = df_close[s_sym].ix[ldt_timestamps[i]]
f_symprice_yest = df_close[s_sym].ix[ldt_timestamps[i - 1]]
# Event is found if on 2 consecutive closes the price went from
# greater than or equal to 5.00 to less than 5.00
if f_symprice_yest >= 5.0 and f_symprice_today < 5.0:
df_events[s_sym].ix[ldt_timestamps[i]] = 1
return df_events
def create_study(ls_symbols, ldt_timestamps, s_study_name):
|
def main():
dt_start = dt.datetime(2008, 1, 1)
dt_end = dt.datetime(2009, 12, 31)
ldt_timestamps = du.getNYSEdays(dt_start, dt_end, dt.timedelta(hours=16))
global dataObj
ls_symbols_2012 = dataObj.get_symbols_from_list('sp5002012')
ls_symbols_2012.append('SPY')
ls_symbols_2008 = dataObj.get_symbols_from_list('sp5002008')
ls_symbols_2008.append('SPY')
#create_study(ls_symbols_2008, ldt_timestamps, '2008Study2.pdf')
create_study(ls_symbols_2012, ldt_timestamps, '2012Study2.pdf')
if __name__ == '__main__':
main()
|
global dataObj
print "Grabbing data to perform {0}".format(s_study_name)
ls_keys = ['close', 'actual_close']
ldf_data = dataObj.get_data(ldt_timestamps, ls_symbols, ls_keys)
print "Got data for study {0}".format(s_study_name)
d_data = dict(zip(ls_keys, ldf_data))
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method='ffill')
d_data[s_key] = d_data[s_key].fillna(method='bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)
df_events = find_events(ls_symbols, d_data)
print "Creating Study"
ep.eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
s_filename=s_study_name, b_market_neutral=True, b_errorbars=True,
s_market_sym='SPY')
|
identifier_body
|
gulpfile.babel.js
|
'use strict';
import gulp from 'gulp';
import webpack from 'webpack';
import path from 'path';
import sync from 'run-sequence';
import rename from 'gulp-rename';
import template from 'gulp-template';
import fs from 'fs';
import yargs from 'yargs';
import lodash from 'lodash';
import gutil from 'gulp-util';
import serve from 'browser-sync';
import del from 'del';
import webpackDevMiddleware from 'webpack-dev-middleware';
import webpackHotMiddleware from 'webpack-hot-middleware';
import colorsSupported from 'supports-color';
import historyApiFallback from 'connect-history-api-fallback';
let root = 'client';
// helper method for resolving paths
let resolveToApp = (glob = '') => {
return path.join(root, 'app', glob); // app/{glob}
};
let resolveToComponents = (glob = '') => {
return path.join(root, 'app/components', glob); // app/components/{glob}
};
// map of all paths
let paths = {
js: resolveToComponents('**/*!(.spec.js).js'), // exclude spec files
styl: resolveToApp('**/*.scss'), // stylesheets
html: [
resolveToApp('**/*.html'),
path.join(root, 'index.html')
],
entry: [
'babel-polyfill',
path.join(__dirname, root, 'app/app.js')
],
output: root,
blankTemplates: path.join(__dirname, 'generator', 'component/**/*.**'),
dest: path.join(__dirname, 'dist')
};
// use webpack.config.js to build modules
gulp.task('webpack', ['clean'], (cb) => {
const config = require('./webpack.dist.config');
config.entry.app = paths.entry;
webpack(config, (err, stats) => {
if(err) {
throw new gutil.PluginError("webpack", err);
}
gutil.log("[webpack]", stats.toString({
colors: colorsSupported,
chunks: false,
errorDetails: true
}));
cb();
});
});
gulp.task('serve', () => {
const config = require('./webpack.dev.config');
config.entry.app = [
// this modules required to make HRM working
// it responsible for all this webpack magic
'webpack-hot-middleware/client?reload=true',
// application entry point
].concat(paths.entry);
var compiler = webpack(config);
serve({
port: process.env.PORT || 3000,
open: false,
server: {baseDir: root},
middleware: [
historyApiFallback(),
webpackDevMiddleware(compiler, {
stats: {
colors: colorsSupported,
chunks: false,
modules: false
},
publicPath: config.output.publicPath
}),
webpackHotMiddleware(compiler)
]
});
});
gulp.task('watch', ['serve']);
gulp.task('component', () => {
const cap = (val) => {
return val.charAt(0).toUpperCase() + val.slice(1);
};
const name = yargs.argv.name;
const parentPath = yargs.argv.parent || '';
const destPath = path.join(resolveToComponents(), parentPath, name);
return gulp.src(paths.blankTemplates)
.pipe(template({
name: name,
upCaseName: cap(name)
}))
.pipe(rename((path) => {
path.basename = path.basename.replace('temp', name);
}))
.pipe(gulp.dest(destPath));
});
|
gutil.log("[clean]", paths);
cb();
})
});
gulp.task('default', ['watch']);
|
gulp.task('clean', (cb) => {
del([paths.dest]).then(function (paths) {
|
random_line_split
|
gulpfile.babel.js
|
'use strict';
import gulp from 'gulp';
import webpack from 'webpack';
import path from 'path';
import sync from 'run-sequence';
import rename from 'gulp-rename';
import template from 'gulp-template';
import fs from 'fs';
import yargs from 'yargs';
import lodash from 'lodash';
import gutil from 'gulp-util';
import serve from 'browser-sync';
import del from 'del';
import webpackDevMiddleware from 'webpack-dev-middleware';
import webpackHotMiddleware from 'webpack-hot-middleware';
import colorsSupported from 'supports-color';
import historyApiFallback from 'connect-history-api-fallback';
let root = 'client';
// helper method for resolving paths
let resolveToApp = (glob = '') => {
return path.join(root, 'app', glob); // app/{glob}
};
let resolveToComponents = (glob = '') => {
return path.join(root, 'app/components', glob); // app/components/{glob}
};
// map of all paths
let paths = {
js: resolveToComponents('**/*!(.spec.js).js'), // exclude spec files
styl: resolveToApp('**/*.scss'), // stylesheets
html: [
resolveToApp('**/*.html'),
path.join(root, 'index.html')
],
entry: [
'babel-polyfill',
path.join(__dirname, root, 'app/app.js')
],
output: root,
blankTemplates: path.join(__dirname, 'generator', 'component/**/*.**'),
dest: path.join(__dirname, 'dist')
};
// use webpack.config.js to build modules
gulp.task('webpack', ['clean'], (cb) => {
const config = require('./webpack.dist.config');
config.entry.app = paths.entry;
webpack(config, (err, stats) => {
if(err)
|
gutil.log("[webpack]", stats.toString({
colors: colorsSupported,
chunks: false,
errorDetails: true
}));
cb();
});
});
gulp.task('serve', () => {
const config = require('./webpack.dev.config');
config.entry.app = [
// this modules required to make HRM working
// it responsible for all this webpack magic
'webpack-hot-middleware/client?reload=true',
// application entry point
].concat(paths.entry);
var compiler = webpack(config);
serve({
port: process.env.PORT || 3000,
open: false,
server: {baseDir: root},
middleware: [
historyApiFallback(),
webpackDevMiddleware(compiler, {
stats: {
colors: colorsSupported,
chunks: false,
modules: false
},
publicPath: config.output.publicPath
}),
webpackHotMiddleware(compiler)
]
});
});
gulp.task('watch', ['serve']);
gulp.task('component', () => {
const cap = (val) => {
return val.charAt(0).toUpperCase() + val.slice(1);
};
const name = yargs.argv.name;
const parentPath = yargs.argv.parent || '';
const destPath = path.join(resolveToComponents(), parentPath, name);
return gulp.src(paths.blankTemplates)
.pipe(template({
name: name,
upCaseName: cap(name)
}))
.pipe(rename((path) => {
path.basename = path.basename.replace('temp', name);
}))
.pipe(gulp.dest(destPath));
});
gulp.task('clean', (cb) => {
del([paths.dest]).then(function (paths) {
gutil.log("[clean]", paths);
cb();
})
});
gulp.task('default', ['watch']);
|
{
throw new gutil.PluginError("webpack", err);
}
|
conditional_block
|
story.tsx
|
/*
Copyright (C) 2017 Cloudbase Solutions SRL
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
import React from 'react'
import { storiesOf } from '@storybook/react'
import ProgressBar from '.'
// eslint-disable-next-line react/jsx-props-no-spreading
const Wrapper = (props: any) => <div style={{ width: '800px' }}><ProgressBar {...props} /></div>
storiesOf('ProgressBar', module)
.add('default 100%', () => (
<Wrapper />
))
.add('50%', () => (
<Wrapper progress={50} />
))
.add('10%', () => (
<Wrapper progress={10} />
))
.add('0%', () => (
<Wrapper progress={0} />
))
|
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
|
random_line_split
|
bljsystem.py
|
"""
start a gui for a binary lennard jones cluster.
All that is really needed to start a gui is define a system and call run_gui
system = BLJCluster(natoms, ntypeA)
run_gui(system)
"""
import sys
from PyQt4 import QtGui
from pele.systems import BLJCluster
from pele.gui import run_gui
from _blj_dialog import Ui_DialogLJSetup as UI
class BLJDialog(QtGui.QDialog):
def __init__(self):
QtGui.QDialog.__init__(self)
self.ui = UI()
self.ui.setupUi(self)
self.setWindowTitle("Create binary Lennard-Jones system")
self.natoms = None
# self.ui.buttonBox.Ok.setDefault(True)
# self.ui.buttonBox.Ok.setDefault(True)
def get_input(self):
|
def on_buttonBox_accepted(self):
self.get_input()
self.close()
def on_buttonBox_rejected(self):
self.close()
if __name__ == "__main__":
# create a pop up window to get the number of atoms
app = QtGui.QApplication(sys.argv)
dialog = BLJDialog()
dialog.exec_()
if dialog.natoms is None:
sys.exit()
print dialog.ntypeA, "A atoms interacting with eps", dialog.epsAA, "sig", dialog.sigAA
print dialog.natoms - dialog.ntypeA, "B atoms interacting with eps", dialog.epsBB, "sig", dialog.sigBB
print "The A and B atoms interact with eps", dialog.epsAB, "sig", dialog.sigAB
# create the system and start the gui
# (note: since the application is already started we need to pass it to run_gui)
system = BLJCluster(dialog.natoms, dialog.ntypeA,
sigAB=dialog.sigAB,
epsAB=dialog.epsAB,
sigBB=dialog.sigBB,
epsBB=dialog.epsBB,
)
run_gui(system, application=app)
|
self.natoms = int(self.ui.lineEdit_natoms.text())
self.ntypeA = int(self.ui.lineEdit_ntypeA.text())
self.sigAB = float(self.ui.lineEdit_sigAB.text())
self.epsAB = float(self.ui.lineEdit_epsAB.text())
self.sigBB = float(self.ui.lineEdit_sigBB.text())
self.epsBB = float(self.ui.lineEdit_epsBB.text())
self.sigAA = 1.
self.epsAA = 1.
|
identifier_body
|
bljsystem.py
|
"""
start a gui for a binary lennard jones cluster.
All that is really needed to start a gui is define a system and call run_gui
system = BLJCluster(natoms, ntypeA)
run_gui(system)
"""
import sys
from PyQt4 import QtGui
from pele.systems import BLJCluster
from pele.gui import run_gui
from _blj_dialog import Ui_DialogLJSetup as UI
class BLJDialog(QtGui.QDialog):
def __init__(self):
QtGui.QDialog.__init__(self)
self.ui = UI()
self.ui.setupUi(self)
self.setWindowTitle("Create binary Lennard-Jones system")
self.natoms = None
# self.ui.buttonBox.Ok.setDefault(True)
# self.ui.buttonBox.Ok.setDefault(True)
def
|
(self):
self.natoms = int(self.ui.lineEdit_natoms.text())
self.ntypeA = int(self.ui.lineEdit_ntypeA.text())
self.sigAB = float(self.ui.lineEdit_sigAB.text())
self.epsAB = float(self.ui.lineEdit_epsAB.text())
self.sigBB = float(self.ui.lineEdit_sigBB.text())
self.epsBB = float(self.ui.lineEdit_epsBB.text())
self.sigAA = 1.
self.epsAA = 1.
def on_buttonBox_accepted(self):
self.get_input()
self.close()
def on_buttonBox_rejected(self):
self.close()
if __name__ == "__main__":
# create a pop up window to get the number of atoms
app = QtGui.QApplication(sys.argv)
dialog = BLJDialog()
dialog.exec_()
if dialog.natoms is None:
sys.exit()
print dialog.ntypeA, "A atoms interacting with eps", dialog.epsAA, "sig", dialog.sigAA
print dialog.natoms - dialog.ntypeA, "B atoms interacting with eps", dialog.epsBB, "sig", dialog.sigBB
print "The A and B atoms interact with eps", dialog.epsAB, "sig", dialog.sigAB
# create the system and start the gui
# (note: since the application is already started we need to pass it to run_gui)
system = BLJCluster(dialog.natoms, dialog.ntypeA,
sigAB=dialog.sigAB,
epsAB=dialog.epsAB,
sigBB=dialog.sigBB,
epsBB=dialog.epsBB,
)
run_gui(system, application=app)
|
get_input
|
identifier_name
|
bljsystem.py
|
"""
start a gui for a binary lennard jones cluster.
All that is really needed to start a gui is define a system and call run_gui
system = BLJCluster(natoms, ntypeA)
run_gui(system)
"""
import sys
from PyQt4 import QtGui
from pele.systems import BLJCluster
from pele.gui import run_gui
from _blj_dialog import Ui_DialogLJSetup as UI
class BLJDialog(QtGui.QDialog):
def __init__(self):
QtGui.QDialog.__init__(self)
self.ui = UI()
self.ui.setupUi(self)
self.setWindowTitle("Create binary Lennard-Jones system")
self.natoms = None
# self.ui.buttonBox.Ok.setDefault(True)
# self.ui.buttonBox.Ok.setDefault(True)
|
def get_input(self):
self.natoms = int(self.ui.lineEdit_natoms.text())
self.ntypeA = int(self.ui.lineEdit_ntypeA.text())
self.sigAB = float(self.ui.lineEdit_sigAB.text())
self.epsAB = float(self.ui.lineEdit_epsAB.text())
self.sigBB = float(self.ui.lineEdit_sigBB.text())
self.epsBB = float(self.ui.lineEdit_epsBB.text())
self.sigAA = 1.
self.epsAA = 1.
def on_buttonBox_accepted(self):
self.get_input()
self.close()
def on_buttonBox_rejected(self):
self.close()
if __name__ == "__main__":
# create a pop up window to get the number of atoms
app = QtGui.QApplication(sys.argv)
dialog = BLJDialog()
dialog.exec_()
if dialog.natoms is None:
sys.exit()
print dialog.ntypeA, "A atoms interacting with eps", dialog.epsAA, "sig", dialog.sigAA
print dialog.natoms - dialog.ntypeA, "B atoms interacting with eps", dialog.epsBB, "sig", dialog.sigBB
print "The A and B atoms interact with eps", dialog.epsAB, "sig", dialog.sigAB
# create the system and start the gui
# (note: since the application is already started we need to pass it to run_gui)
system = BLJCluster(dialog.natoms, dialog.ntypeA,
sigAB=dialog.sigAB,
epsAB=dialog.epsAB,
sigBB=dialog.sigBB,
epsBB=dialog.epsBB,
)
run_gui(system, application=app)
|
random_line_split
|
|
bljsystem.py
|
"""
start a gui for a binary lennard jones cluster.
All that is really needed to start a gui is define a system and call run_gui
system = BLJCluster(natoms, ntypeA)
run_gui(system)
"""
import sys
from PyQt4 import QtGui
from pele.systems import BLJCluster
from pele.gui import run_gui
from _blj_dialog import Ui_DialogLJSetup as UI
class BLJDialog(QtGui.QDialog):
def __init__(self):
QtGui.QDialog.__init__(self)
self.ui = UI()
self.ui.setupUi(self)
self.setWindowTitle("Create binary Lennard-Jones system")
self.natoms = None
# self.ui.buttonBox.Ok.setDefault(True)
# self.ui.buttonBox.Ok.setDefault(True)
def get_input(self):
self.natoms = int(self.ui.lineEdit_natoms.text())
self.ntypeA = int(self.ui.lineEdit_ntypeA.text())
self.sigAB = float(self.ui.lineEdit_sigAB.text())
self.epsAB = float(self.ui.lineEdit_epsAB.text())
self.sigBB = float(self.ui.lineEdit_sigBB.text())
self.epsBB = float(self.ui.lineEdit_epsBB.text())
self.sigAA = 1.
self.epsAA = 1.
def on_buttonBox_accepted(self):
self.get_input()
self.close()
def on_buttonBox_rejected(self):
self.close()
if __name__ == "__main__":
# create a pop up window to get the number of atoms
app = QtGui.QApplication(sys.argv)
dialog = BLJDialog()
dialog.exec_()
if dialog.natoms is None:
|
print dialog.ntypeA, "A atoms interacting with eps", dialog.epsAA, "sig", dialog.sigAA
print dialog.natoms - dialog.ntypeA, "B atoms interacting with eps", dialog.epsBB, "sig", dialog.sigBB
print "The A and B atoms interact with eps", dialog.epsAB, "sig", dialog.sigAB
# create the system and start the gui
# (note: since the application is already started we need to pass it to run_gui)
system = BLJCluster(dialog.natoms, dialog.ntypeA,
sigAB=dialog.sigAB,
epsAB=dialog.epsAB,
sigBB=dialog.sigBB,
epsBB=dialog.epsBB,
)
run_gui(system, application=app)
|
sys.exit()
|
conditional_block
|
CommentsMenu.tsx
|
import React, { useState } from 'react';
import { registerComponent, Components } from '../../../lib/vulcan-lib';
import MoreVertIcon from '@material-ui/icons/MoreVert';
import Menu from '@material-ui/core/Menu';
import { useCurrentUser } from '../../common/withUser';
import { useTracking } from "../../../lib/analyticsEvents";
const styles = (theme: ThemeType): JssStyles => ({
icon: {
cursor: "pointer",
fontSize:"1.4rem"
},
menu: {
position:"absolute",
right:0,
top:0,
zIndex: theme.zIndexes.commentsMenu,
}
})
const CommentsMenu = ({classes, className, comment, post, showEdit, icon}: {
|
icon?: any,
}) => {
const [anchorEl, setAnchorEl] = useState<any>(null);
// Render menu-contents if the menu has ever been opened (keep rendering
// contents when closed after open, because of closing animation).
const [everOpened, setEverOpened] = useState(false);
const currentUser = useCurrentUser();
const { captureEvent } = useTracking({eventType: "commentMenuClicked", eventProps: {commentId: comment._id, itemType: "comment"}})
if (!currentUser) return null
return (
<span className={className}>
<span onClick={event => {
captureEvent("commentMenuClicked", {open: true})
setAnchorEl(event.currentTarget)
setEverOpened(true);
}}>
{icon ? icon : <MoreVertIcon
className={classes.icon}/>}
</span>
<Menu
onClick={event => {
captureEvent("commentMenuClicked", {open: false})
setAnchorEl(null)
}}
open={Boolean(anchorEl)}
anchorEl={anchorEl}
>
{everOpened && <Components.CommentActions
currentUser={currentUser}
comment={comment}
post={post}
showEdit={showEdit}
/>}
</Menu>
</span>
)
}
const CommentsMenuComponent = registerComponent('CommentsMenu', CommentsMenu, {styles});
declare global {
interface ComponentTypes {
CommentsMenu: typeof CommentsMenuComponent,
}
}
|
classes: ClassesType,
className?: string,
comment: CommentsList,
post?: PostsMinimumInfo,
showEdit: ()=>void,
|
random_line_split
|
feature_artist.py
|
# (C) British Crown Copyright 2011 - 2015, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
"""
This module defines the :class:`FeatureArtist` class, for drawing
:class:`Feature` instances with matplotlib.
"""
from __future__ import (absolute_import, division, print_function)
import warnings
import weakref
import matplotlib.artist
import matplotlib.collections
import cartopy.mpl.patch as cpatch
class FeatureArtist(matplotlib.artist.Artist):
|
"""
A subclass of :class:`~matplotlib.artist.Artist` capable of
drawing a :class:`cartopy.feature.Feature`.
"""
_geometry_to_path_cache = weakref.WeakKeyDictionary()
"""
A nested mapping from geometry and target projection to the
resulting transformed matplotlib paths::
{geom: {target_projection: list_of_paths}}
This provides a significant boost when producing multiple maps of the
same projection.
"""
def __init__(self, feature, **kwargs):
"""
Args:
* feature:
an instance of :class:`cartopy.feature.Feature` to draw.
* kwargs:
keyword arguments to be used when drawing the feature. These
will override those shared with the feature.
"""
super(FeatureArtist, self).__init__()
if kwargs is None:
kwargs = {}
self._kwargs = dict(kwargs)
# Set default zorder so that features are drawn before
# lines e.g. contours but after images.
# Note that the zorder of Patch, PatchCollection and PathCollection
# are all 1 by default. Assuming equal zorder drawing takes place in
# the following order: collections, patches, lines (default zorder=2),
# text (default zorder=3), then other artists e.g. FeatureArtist.
if self._kwargs.get('zorder') is not None:
self.set_zorder(self._kwargs['zorder'])
elif feature.kwargs.get('zorder') is not None:
self.set_zorder(feature.kwargs['zorder'])
else:
# The class attribute matplotlib.collections.PathCollection.zorder
# was removed after mpl v1.2.0, so the hard-coded value of 1 is
# used instead.
self.set_zorder(1)
self._feature = feature
@matplotlib.artist.allow_rasterization
def draw(self, renderer, *args, **kwargs):
"""
Draws the geometries of the feature that intersect with the extent of
the :class:`cartopy.mpl.GeoAxes` instance to which this
object has been added.
"""
if not self.get_visible():
return
ax = self.get_axes()
feature_crs = self._feature.crs
# Get geometries that we need to draw.
extent = None
try:
extent = ax.get_extent(feature_crs)
except ValueError:
warnings.warn('Unable to determine extent. Defaulting to global.')
geoms = self._feature.intersecting_geometries(extent)
# Project (if necessary) and convert geometries to matplotlib paths.
paths = []
key = ax.projection
for geom in geoms:
mapping = FeatureArtist._geometry_to_path_cache.setdefault(geom,
{})
geom_paths = mapping.get(key)
if geom_paths is None:
if ax.projection != feature_crs:
projected_geom = ax.projection.project_geometry(
geom, feature_crs)
else:
projected_geom = geom
geom_paths = cpatch.geos_to_path(projected_geom)
mapping[key] = geom_paths
paths.extend(geom_paths)
# Build path collection and draw it.
transform = ax.projection._as_mpl_transform(ax)
# Combine all the keyword args in priority order
final_kwargs = dict(self._feature.kwargs)
final_kwargs.update(self._kwargs)
final_kwargs.update(kwargs)
c = matplotlib.collections.PathCollection(paths,
transform=transform,
**final_kwargs)
c.set_clip_path(ax.patch)
c.set_figure(ax.figure)
return c.draw(renderer)
|
identifier_body
|
|
feature_artist.py
|
# (C) British Crown Copyright 2011 - 2015, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
"""
This module defines the :class:`FeatureArtist` class, for drawing
:class:`Feature` instances with matplotlib.
"""
from __future__ import (absolute_import, division, print_function)
import warnings
import weakref
import matplotlib.artist
import matplotlib.collections
import cartopy.mpl.patch as cpatch
class FeatureArtist(matplotlib.artist.Artist):
"""
A subclass of :class:`~matplotlib.artist.Artist` capable of
drawing a :class:`cartopy.feature.Feature`.
"""
_geometry_to_path_cache = weakref.WeakKeyDictionary()
"""
A nested mapping from geometry and target projection to the
resulting transformed matplotlib paths::
{geom: {target_projection: list_of_paths}}
This provides a significant boost when producing multiple maps of the
same projection.
"""
def
|
(self, feature, **kwargs):
"""
Args:
* feature:
an instance of :class:`cartopy.feature.Feature` to draw.
* kwargs:
keyword arguments to be used when drawing the feature. These
will override those shared with the feature.
"""
super(FeatureArtist, self).__init__()
if kwargs is None:
kwargs = {}
self._kwargs = dict(kwargs)
# Set default zorder so that features are drawn before
# lines e.g. contours but after images.
# Note that the zorder of Patch, PatchCollection and PathCollection
# are all 1 by default. Assuming equal zorder drawing takes place in
# the following order: collections, patches, lines (default zorder=2),
# text (default zorder=3), then other artists e.g. FeatureArtist.
if self._kwargs.get('zorder') is not None:
self.set_zorder(self._kwargs['zorder'])
elif feature.kwargs.get('zorder') is not None:
self.set_zorder(feature.kwargs['zorder'])
else:
# The class attribute matplotlib.collections.PathCollection.zorder
# was removed after mpl v1.2.0, so the hard-coded value of 1 is
# used instead.
self.set_zorder(1)
self._feature = feature
@matplotlib.artist.allow_rasterization
def draw(self, renderer, *args, **kwargs):
"""
Draws the geometries of the feature that intersect with the extent of
the :class:`cartopy.mpl.GeoAxes` instance to which this
object has been added.
"""
if not self.get_visible():
return
ax = self.get_axes()
feature_crs = self._feature.crs
# Get geometries that we need to draw.
extent = None
try:
extent = ax.get_extent(feature_crs)
except ValueError:
warnings.warn('Unable to determine extent. Defaulting to global.')
geoms = self._feature.intersecting_geometries(extent)
# Project (if necessary) and convert geometries to matplotlib paths.
paths = []
key = ax.projection
for geom in geoms:
mapping = FeatureArtist._geometry_to_path_cache.setdefault(geom,
{})
geom_paths = mapping.get(key)
if geom_paths is None:
if ax.projection != feature_crs:
projected_geom = ax.projection.project_geometry(
geom, feature_crs)
else:
projected_geom = geom
geom_paths = cpatch.geos_to_path(projected_geom)
mapping[key] = geom_paths
paths.extend(geom_paths)
# Build path collection and draw it.
transform = ax.projection._as_mpl_transform(ax)
# Combine all the keyword args in priority order
final_kwargs = dict(self._feature.kwargs)
final_kwargs.update(self._kwargs)
final_kwargs.update(kwargs)
c = matplotlib.collections.PathCollection(paths,
transform=transform,
**final_kwargs)
c.set_clip_path(ax.patch)
c.set_figure(ax.figure)
return c.draw(renderer)
|
__init__
|
identifier_name
|
feature_artist.py
|
# (C) British Crown Copyright 2011 - 2015, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
"""
This module defines the :class:`FeatureArtist` class, for drawing
:class:`Feature` instances with matplotlib.
"""
from __future__ import (absolute_import, division, print_function)
import warnings
import weakref
import matplotlib.artist
import matplotlib.collections
import cartopy.mpl.patch as cpatch
class FeatureArtist(matplotlib.artist.Artist):
"""
A subclass of :class:`~matplotlib.artist.Artist` capable of
drawing a :class:`cartopy.feature.Feature`.
"""
_geometry_to_path_cache = weakref.WeakKeyDictionary()
"""
A nested mapping from geometry and target projection to the
resulting transformed matplotlib paths::
{geom: {target_projection: list_of_paths}}
This provides a significant boost when producing multiple maps of the
same projection.
"""
def __init__(self, feature, **kwargs):
"""
Args:
* feature:
an instance of :class:`cartopy.feature.Feature` to draw.
* kwargs:
keyword arguments to be used when drawing the feature. These
will override those shared with the feature.
"""
super(FeatureArtist, self).__init__()
if kwargs is None:
|
self._kwargs = dict(kwargs)
# Set default zorder so that features are drawn before
# lines e.g. contours but after images.
# Note that the zorder of Patch, PatchCollection and PathCollection
# are all 1 by default. Assuming equal zorder drawing takes place in
# the following order: collections, patches, lines (default zorder=2),
# text (default zorder=3), then other artists e.g. FeatureArtist.
if self._kwargs.get('zorder') is not None:
self.set_zorder(self._kwargs['zorder'])
elif feature.kwargs.get('zorder') is not None:
self.set_zorder(feature.kwargs['zorder'])
else:
# The class attribute matplotlib.collections.PathCollection.zorder
# was removed after mpl v1.2.0, so the hard-coded value of 1 is
# used instead.
self.set_zorder(1)
self._feature = feature
@matplotlib.artist.allow_rasterization
def draw(self, renderer, *args, **kwargs):
"""
Draws the geometries of the feature that intersect with the extent of
the :class:`cartopy.mpl.GeoAxes` instance to which this
object has been added.
"""
if not self.get_visible():
return
ax = self.get_axes()
feature_crs = self._feature.crs
# Get geometries that we need to draw.
extent = None
try:
extent = ax.get_extent(feature_crs)
except ValueError:
warnings.warn('Unable to determine extent. Defaulting to global.')
geoms = self._feature.intersecting_geometries(extent)
# Project (if necessary) and convert geometries to matplotlib paths.
paths = []
key = ax.projection
for geom in geoms:
mapping = FeatureArtist._geometry_to_path_cache.setdefault(geom,
{})
geom_paths = mapping.get(key)
if geom_paths is None:
if ax.projection != feature_crs:
projected_geom = ax.projection.project_geometry(
geom, feature_crs)
else:
projected_geom = geom
geom_paths = cpatch.geos_to_path(projected_geom)
mapping[key] = geom_paths
paths.extend(geom_paths)
# Build path collection and draw it.
transform = ax.projection._as_mpl_transform(ax)
# Combine all the keyword args in priority order
final_kwargs = dict(self._feature.kwargs)
final_kwargs.update(self._kwargs)
final_kwargs.update(kwargs)
c = matplotlib.collections.PathCollection(paths,
transform=transform,
**final_kwargs)
c.set_clip_path(ax.patch)
c.set_figure(ax.figure)
return c.draw(renderer)
|
kwargs = {}
|
conditional_block
|
feature_artist.py
|
# (C) British Crown Copyright 2011 - 2015, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
"""
This module defines the :class:`FeatureArtist` class, for drawing
:class:`Feature` instances with matplotlib.
"""
from __future__ import (absolute_import, division, print_function)
import warnings
import weakref
import matplotlib.artist
import matplotlib.collections
import cartopy.mpl.patch as cpatch
class FeatureArtist(matplotlib.artist.Artist):
"""
A subclass of :class:`~matplotlib.artist.Artist` capable of
drawing a :class:`cartopy.feature.Feature`.
"""
_geometry_to_path_cache = weakref.WeakKeyDictionary()
"""
A nested mapping from geometry and target projection to the
resulting transformed matplotlib paths::
{geom: {target_projection: list_of_paths}}
This provides a significant boost when producing multiple maps of the
same projection.
"""
def __init__(self, feature, **kwargs):
"""
Args:
* feature:
an instance of :class:`cartopy.feature.Feature` to draw.
* kwargs:
keyword arguments to be used when drawing the feature. These
will override those shared with the feature.
"""
super(FeatureArtist, self).__init__()
if kwargs is None:
kwargs = {}
self._kwargs = dict(kwargs)
# Set default zorder so that features are drawn before
# lines e.g. contours but after images.
# Note that the zorder of Patch, PatchCollection and PathCollection
# are all 1 by default. Assuming equal zorder drawing takes place in
# the following order: collections, patches, lines (default zorder=2),
# text (default zorder=3), then other artists e.g. FeatureArtist.
if self._kwargs.get('zorder') is not None:
self.set_zorder(self._kwargs['zorder'])
elif feature.kwargs.get('zorder') is not None:
self.set_zorder(feature.kwargs['zorder'])
else:
# The class attribute matplotlib.collections.PathCollection.zorder
# was removed after mpl v1.2.0, so the hard-coded value of 1 is
# used instead.
self.set_zorder(1)
|
def draw(self, renderer, *args, **kwargs):
"""
Draws the geometries of the feature that intersect with the extent of
the :class:`cartopy.mpl.GeoAxes` instance to which this
object has been added.
"""
if not self.get_visible():
return
ax = self.get_axes()
feature_crs = self._feature.crs
# Get geometries that we need to draw.
extent = None
try:
extent = ax.get_extent(feature_crs)
except ValueError:
warnings.warn('Unable to determine extent. Defaulting to global.')
geoms = self._feature.intersecting_geometries(extent)
# Project (if necessary) and convert geometries to matplotlib paths.
paths = []
key = ax.projection
for geom in geoms:
mapping = FeatureArtist._geometry_to_path_cache.setdefault(geom,
{})
geom_paths = mapping.get(key)
if geom_paths is None:
if ax.projection != feature_crs:
projected_geom = ax.projection.project_geometry(
geom, feature_crs)
else:
projected_geom = geom
geom_paths = cpatch.geos_to_path(projected_geom)
mapping[key] = geom_paths
paths.extend(geom_paths)
# Build path collection and draw it.
transform = ax.projection._as_mpl_transform(ax)
# Combine all the keyword args in priority order
final_kwargs = dict(self._feature.kwargs)
final_kwargs.update(self._kwargs)
final_kwargs.update(kwargs)
c = matplotlib.collections.PathCollection(paths,
transform=transform,
**final_kwargs)
c.set_clip_path(ax.patch)
c.set_figure(ax.figure)
return c.draw(renderer)
|
self._feature = feature
@matplotlib.artist.allow_rasterization
|
random_line_split
|
wsregex.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
class
|
(object):
"""
A class containing all regular expressions used throughout the DataHound
application.
"""
# Class Members
# Potentially better email regex
# "([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4})"
# http://www.webmonkey.com/2008/08/four_regular_expressions_to_check_email_addresses/
caps_alpha_regex = re.compile("^[A-Z]+$")
cc_last_four_regex = re.compile("^[0-9]{4}$")
docker_log_entry_regex = re.compile("^\[\d{4}-\d{2}-\d{2}")
# domain_name_regex = re.compile("^[a-zA-Z0-9-*]+(\.[a-zA-Z0-9-]+)*$")
domain_name_regex = re.compile("^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,63}$")
email_regex = re.compile("^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,10}$")
export_file_name_regex = re.compile("^[0-9A-Za-z_-]{1,32}$")
file_log_entry_regex = re.compile("^\[\d{2}/\d{2}/\d{2} ")
file_name_regex = re.compile("^[A-Za-z-_0-9]+$")
first_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
hostname_regex = re.compile(
"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z]|[A-Za-z][A-Za-z\-]*[A-Za-z])$",
flags=re.IGNORECASE
)
html_form_regex = re.compile("<form.*?</form>", flags=re.IGNORECASE | re.DOTALL)
integer_regex = re.compile("^[0-9]+$")
ipv4_address_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
flags=re.IGNORECASE
)
ipv4_cidr_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|[1-2][0-9]|3[0-2]))$",
flags=re.IGNORECASE
)
last_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
log_entry_stub_regex = re.compile("\[(.*?)\]")
mime_string_regex = re.compile("^[a-z\-]+/[a-z\.\-_0-9]+(;(\s?[\w=\.\-]+)+)?$", flags=re.IGNORECASE)
order_name_regex = re.compile("^[A-Za-z-_0-9]+$")
protocol_regex = re.compile("^([A-Z]{1,10})://", flags=re.IGNORECASE)
query_string_regex = re.compile(
"^([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?(&[\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?)*)$",
flags=re.IGNORECASE,
)
url_port_regex = re.compile(".+:([1-9]([0-9]{1,10})?)$", flags=re.IGNORECASE)
url_protocol_regex = re.compile("^([A-Z0-9-_]+?):", flags=re.IGNORECASE)
url_scheme_regex = re.compile("^([A-Z0-9]{1,25})://", flags=re.IGNORECASE)
user_name_regex = re.compile("^[A-Z0-9]{1,32}$", flags=re.IGNORECASE)
uuid4_string_regex = re.compile(
"^[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$",
flags=re.IGNORECASE,
)
zmap_bandwidth_regex = re.compile("^\d+[GMK]$")
zmap_empty_bandwidth_regex = re.compile("^0+[GMK]$")
ssl_certificate_regex = re.compile("(-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----)", flags=re.DOTALL)
authority_info_uri_regex = re.compile("URI:(.*)")
basic_auth_realm_regex = re.compile("realm=\"(.*?)\"")
card_last_four_regex = re.compile("^\d\d\d\d$")
# Instantiation
# Static Methods
# Class Methods
# Public Methods
# Protected Methods
# Private Methods
# Properties
# Representation and Comparison
|
RegexLib
|
identifier_name
|
wsregex.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
class RegexLib(object):
|
"""
A class containing all regular expressions used throughout the DataHound
application.
"""
# Class Members
# Potentially better email regex
# "([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4})"
# http://www.webmonkey.com/2008/08/four_regular_expressions_to_check_email_addresses/
caps_alpha_regex = re.compile("^[A-Z]+$")
cc_last_four_regex = re.compile("^[0-9]{4}$")
docker_log_entry_regex = re.compile("^\[\d{4}-\d{2}-\d{2}")
# domain_name_regex = re.compile("^[a-zA-Z0-9-*]+(\.[a-zA-Z0-9-]+)*$")
domain_name_regex = re.compile("^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,63}$")
email_regex = re.compile("^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,10}$")
export_file_name_regex = re.compile("^[0-9A-Za-z_-]{1,32}$")
file_log_entry_regex = re.compile("^\[\d{2}/\d{2}/\d{2} ")
file_name_regex = re.compile("^[A-Za-z-_0-9]+$")
first_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
hostname_regex = re.compile(
"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z]|[A-Za-z][A-Za-z\-]*[A-Za-z])$",
flags=re.IGNORECASE
)
html_form_regex = re.compile("<form.*?</form>", flags=re.IGNORECASE | re.DOTALL)
integer_regex = re.compile("^[0-9]+$")
ipv4_address_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
flags=re.IGNORECASE
)
ipv4_cidr_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|[1-2][0-9]|3[0-2]))$",
flags=re.IGNORECASE
)
last_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
log_entry_stub_regex = re.compile("\[(.*?)\]")
mime_string_regex = re.compile("^[a-z\-]+/[a-z\.\-_0-9]+(;(\s?[\w=\.\-]+)+)?$", flags=re.IGNORECASE)
order_name_regex = re.compile("^[A-Za-z-_0-9]+$")
protocol_regex = re.compile("^([A-Z]{1,10})://", flags=re.IGNORECASE)
query_string_regex = re.compile(
"^([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?(&[\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?)*)$",
flags=re.IGNORECASE,
)
url_port_regex = re.compile(".+:([1-9]([0-9]{1,10})?)$", flags=re.IGNORECASE)
url_protocol_regex = re.compile("^([A-Z0-9-_]+?):", flags=re.IGNORECASE)
url_scheme_regex = re.compile("^([A-Z0-9]{1,25})://", flags=re.IGNORECASE)
user_name_regex = re.compile("^[A-Z0-9]{1,32}$", flags=re.IGNORECASE)
uuid4_string_regex = re.compile(
"^[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$",
flags=re.IGNORECASE,
)
zmap_bandwidth_regex = re.compile("^\d+[GMK]$")
zmap_empty_bandwidth_regex = re.compile("^0+[GMK]$")
ssl_certificate_regex = re.compile("(-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----)", flags=re.DOTALL)
authority_info_uri_regex = re.compile("URI:(.*)")
basic_auth_realm_regex = re.compile("realm=\"(.*?)\"")
card_last_four_regex = re.compile("^\d\d\d\d$")
# Instantiation
# Static Methods
# Class Methods
# Public Methods
# Protected Methods
# Private Methods
# Properties
# Representation and Comparison
|
identifier_body
|
|
wsregex.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import re
class RegexLib(object):
"""
A class containing all regular expressions used throughout the DataHound
application.
"""
# Class Members
# Potentially better email regex
# "([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4})"
# http://www.webmonkey.com/2008/08/four_regular_expressions_to_check_email_addresses/
caps_alpha_regex = re.compile("^[A-Z]+$")
cc_last_four_regex = re.compile("^[0-9]{4}$")
docker_log_entry_regex = re.compile("^\[\d{4}-\d{2}-\d{2}")
# domain_name_regex = re.compile("^[a-zA-Z0-9-*]+(\.[a-zA-Z0-9-]+)*$")
domain_name_regex = re.compile("^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\.)+[A-Za-z]{2,63}$")
email_regex = re.compile("^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,10}$")
export_file_name_regex = re.compile("^[0-9A-Za-z_-]{1,32}$")
file_log_entry_regex = re.compile("^\[\d{2}/\d{2}/\d{2} ")
file_name_regex = re.compile("^[A-Za-z-_0-9]+$")
first_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
hostname_regex = re.compile(
"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z]|[A-Za-z][A-Za-z\-]*[A-Za-z])$",
flags=re.IGNORECASE
)
html_form_regex = re.compile("<form.*?</form>", flags=re.IGNORECASE | re.DOTALL)
integer_regex = re.compile("^[0-9]+$")
ipv4_address_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$",
flags=re.IGNORECASE
)
ipv4_cidr_regex = re.compile(
"^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|[1-2][0-9]|3[0-2]))$",
flags=re.IGNORECASE
)
last_name_regex = re.compile("^[A-Za-z\-']{1,32}$")
log_entry_stub_regex = re.compile("\[(.*?)\]")
mime_string_regex = re.compile("^[a-z\-]+/[a-z\.\-_0-9]+(;(\s?[\w=\.\-]+)+)?$", flags=re.IGNORECASE)
|
protocol_regex = re.compile("^([A-Z]{1,10})://", flags=re.IGNORECASE)
query_string_regex = re.compile(
"^([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?(&[\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]+=([\\\\\w\-!@\$%\^\*\(\)_`~+\[\]{}|;'\",<>]*)?)*)$",
flags=re.IGNORECASE,
)
url_port_regex = re.compile(".+:([1-9]([0-9]{1,10})?)$", flags=re.IGNORECASE)
url_protocol_regex = re.compile("^([A-Z0-9-_]+?):", flags=re.IGNORECASE)
url_scheme_regex = re.compile("^([A-Z0-9]{1,25})://", flags=re.IGNORECASE)
user_name_regex = re.compile("^[A-Z0-9]{1,32}$", flags=re.IGNORECASE)
uuid4_string_regex = re.compile(
"^[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}$",
flags=re.IGNORECASE,
)
zmap_bandwidth_regex = re.compile("^\d+[GMK]$")
zmap_empty_bandwidth_regex = re.compile("^0+[GMK]$")
ssl_certificate_regex = re.compile("(-----BEGIN CERTIFICATE-----.*?-----END CERTIFICATE-----)", flags=re.DOTALL)
authority_info_uri_regex = re.compile("URI:(.*)")
basic_auth_realm_regex = re.compile("realm=\"(.*?)\"")
card_last_four_regex = re.compile("^\d\d\d\d$")
# Instantiation
# Static Methods
# Class Methods
# Public Methods
# Protected Methods
# Private Methods
# Properties
# Representation and Comparison
|
order_name_regex = re.compile("^[A-Za-z-_0-9]+$")
|
random_line_split
|
__init__.py
|
"""
Read & write Java .properties files
``javaproperties`` provides support for reading & writing Java ``.properties``
files (both the simple line-oriented format and XML) with a simple API based on
the ``json`` module — though, for recovering Java addicts, it also includes a
``Properties`` class intended to match the behavior of Java 8's
``java.util.Properties`` as much as is Pythonically possible.
Visit <https://github.com/jwodder/javaproperties> or
<http://javaproperties.rtfd.io> for more information.
"""
import codecs
from .propclass import Properties
from .propfile import PropertiesFile
from .reading import (
Comment,
InvalidUEscapeError,
KeyValue,
PropertiesElement,
Whitespace,
load,
loads,
parse,
unescape,
)
from .writing import (
dump,
dumps,
escape,
java_timestamp,
javapropertiesreplace_errors,
join_key_value,
to_comment,
)
from .xmlprops import dump_xml, dumps_xml, load_xml, loads_xml
__version__ = "0.8.1"
__author__ = "John Thorvald Wodder II"
__author_email__ = "[email protected]"
__license__ = "MIT"
__url__ = "https://github.com/jwodder/javaproperties"
__all__ = [
"Comment",
"InvalidUEscapeError",
"KeyValue",
"Properties",
"PropertiesElement",
"PropertiesFile",
"Whitespace",
"dump",
"dump_xml",
"dumps",
"dumps_xml",
"escape",
"java_timestamp",
"javapropertiesreplace_errors",
|
"join_key_value",
"load",
"load_xml",
"loads",
"loads_xml",
"parse",
"to_comment",
"unescape",
]
codecs.register_error("javapropertiesreplace", javapropertiesreplace_errors)
|
random_line_split
|
|
menu-panel.js
|
'use strict';
define([
'jquery',
'underscore',
'view/widget/panel/panel',
'view/widget/body/body',
'view/widget/navigation/navigation',
], function ($, _, Panel, Body, Navigation) {
return Panel.extend({
initialize: function () {
|
positionFixed: true,
display: 'overlay',
body: new Body({
items: {
navigation: new Navigation({
activeState: false,
rows: [
{ dashboard: {text: '<i class="fa fa-list-ul"></i> Dashboard'} },
{ providers: {text: '<i class="fa fa-copyright"></i> Providers'} },
{ products: {text: '<i class="fa fa-database"></i> Products'} },
{ blocks: {text: '<i class="fa fa-th-large"></i> Blocks'} },
{ beds: {text: '<i class="fa fa-bars"></i> Beds'} },
{ crops: {text: '<i class="fa fa-crop"></i> Crops'} },
{ tasks: {text: '<i class="fa fa-tasks"></i> Tasks'} },
{ works: {text: '<i class="fa fa-wrench"></i> Works'} },
{ varieties: {text: '<i class="fa fa-pagelines"></i> Varieties'} },
{ poss: {text: '<i class="fa fa-truck"></i> Points of sale'} },
{ logout: {text: '<i class="fa fa-sign-out"></i> Logout'} },
],
events: {
click: function (route) {
this.redirect(route)
}.bind(this),
},
}),
},
}),
});
},
redirect: function (route) {
this.close().done(function() {
app.router.navigate(route);
}.bind(this));
},
});
});
|
Panel.prototype.initialize.call(this, {
id: 'menu-panel',
position: 'right',
|
random_line_split
|
block-scoping.js
|
import traverse from "../../../traversal";
import object from "../../../helpers/object";
import * as util from "../../../util";
import * as t from "../../../types";
import values from "lodash/object/values";
import extend from "lodash/object/extend";
function isLet(node, parent) {
if (!t.isVariableDeclaration(node)) return false;
if (node._let) return true;
if (node.kind !== "let") return false;
// https://github.com/babel/babel/issues/255
if (isLetInitable(node, parent)) {
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
declar.init ||= t.identifier("undefined");
}
}
node._let = true;
node.kind = "var";
return true;
}
function isLetInitable(node, parent) {
return !t.isFor(parent) || !t.isFor(parent, { left: node });
}
function isVar(node, parent) {
return t.isVariableDeclaration(node, { kind: "var" }) && !isLet(node, parent);
}
function standardizeLets(declars) {
for (var i = 0; i < declars.length; i++) {
delete declars[i]._let;
}
}
export function check(node) {
return t.isVariableDeclaration(node) && (node.kind === "let" || node.kind === "const");
}
export function VariableDeclaration(node, parent, scope, file) {
if (!isLet(node, parent)) return;
if (isLetInitable(node) && file.transformers["es6.blockScopingTDZ"].canRun()) {
var nodes = [node];
for (var i = 0; i < node.declarations.length; i++) {
var decl = node.declarations[i];
if (decl.init) {
var assign = t.assignmentExpression("=", decl.id, decl.init);
assign._ignoreBlockScopingTDZ = true;
nodes.push(t.expressionStatement(assign));
}
decl.init = file.addHelper("temporal-undefined");
}
node._blockHoist = 2;
return nodes;
}
}
export function Loop(node, parent, scope, file) {
var init = node.left || node.init;
if (isLet(init, node)) {
t.ensureBlock(node);
node.body._letDeclarators = [init];
}
var blockScoping = new BlockScoping(this, node.body, parent, scope, file);
return blockScoping.run();
}
export function BlockStatement(block, parent, scope, file) {
if (!t.isLoop(parent)) {
var blockScoping = new BlockScoping(null, block, parent, scope, file);
blockScoping.run();
}
}
export { BlockStatement as Program };
function replace(node, parent, scope, remaps) {
if (!t.isReferencedIdentifier(node, parent)) return;
var remap = remaps[node.name];
if (!remap) return;
var ownBinding = scope.getBindingIdentifier(node.name);
if (ownBinding === remap.binding) {
node.name = remap.uid;
} else {
// scope already has it's own binding that doesn't
// match the one we have a stored replacement for
if (this) this.skip();
}
}
var replaceVisitor = {
enter: replace
};
function traverseReplace(node, parent, scope, remaps) {
replace(node, parent, scope, remaps);
scope.traverse(node, replaceVisitor, remaps);
}
var letReferenceBlockVisitor = {
enter(node, parent, scope, state) {
if (this.isFunction()) {
scope.traverse(node, letReferenceFunctionVisitor, state);
return this.skip();
}
}
};
var letReferenceFunctionVisitor = {
enter(node, parent, scope, state) {
// not a direct reference
if (!this.isReferencedIdentifier()) return;
// this scope has a variable with the same name so it couldn't belong
// to our let scope
if (scope.hasOwnBinding(node.name)) return;
// not a part of our scope
if (!state.letReferences[node.name]) return;
state.closurify = true;
}
};
var hoistVarDeclarationsVisitor = {
enter(node, parent, scope, self) {
if (this.isForStatement()) {
if (isVar(node.init, node)) {
node.init = t.sequenceExpression(self.pushDeclar(node.init));
}
} else if (this.isFor()) {
if (isVar(node.left, node)) {
node.left = node.left.declarations[0].id;
}
} else if (isVar(node, parent)) {
return self.pushDeclar(node).map(t.expressionStatement);
} else if (this.isFunction()) {
return this.skip();
}
}
};
var loopLabelVisitor = {
enter(node, parent, scope, state) {
if (this.isLabeledStatement()) {
state.innerLabels.push(node.label.name);
}
}
};
var loopNodeTo = function (node) {
if (t.isBreakStatement(node)) {
return "break";
} else if (t.isContinueStatement(node)) {
return "continue";
}
};
var loopVisitor = {
enter(node, parent, scope, state) {
var replace;
if (this.isLoop()) {
state.ignoreLabeless = true;
scope.traverse(node, loopVisitor, state);
state.ignoreLabeless = false;
}
if (this.isFunction() || this.isLoop()) {
return this.skip();
}
var loopText = loopNodeTo(node);
if (loopText) {
if (node.label) {
// we shouldn't be transforming this because it exists somewhere inside
if (state.innerLabels.indexOf(node.label.name) >= 0) {
return;
}
loopText = `${loopText}|${node.label.name}`;
} else {
// we shouldn't be transforming these statements because
// they don't refer to the actual loop we're scopifying
if (state.ignoreLabeless) return;
// break statements mean something different in this context
if (t.isBreakStatement(node) && t.isSwitchCase(parent)) return;
}
state.hasBreakContinue = true;
state.map[loopText] = node;
replace = t.literal(loopText);
}
if (this.isReturnStatement()) {
state.hasReturn = true;
replace = t.objectExpression([
t.property("init", t.identifier("v"), node.argument || t.identifier("undefined"))
]);
}
if (replace) {
replace = t.returnStatement(replace);
return t.inherits(replace, node);
}
}
};
class BlockScoping {
/**
* Description
*/
constructor(loopPath?: TraversalPath, block: Object, parent: Object, scope: Scope, file: File) {
this.parent = parent;
this.scope = scope;
this.block = block;
this.file = file;
this.outsideLetReferences = object();
this.hasLetReferences = false;
this.letReferences = block._letReferences = object();
this.body = [];
if (loopPath) {
this.loopParent = loopPath.parent;
this.loopLabel = t.isLabeledStatement(this.loopParent) && this.loopParent.label;
this.loop = loopPath.node;
}
}
/**
* Start the ball rolling.
*/
run()
|
/**
* Description
*/
remap() {
var hasRemaps = false;
var letRefs = this.letReferences;
var scope = this.scope;
// alright, so since we aren't wrapping this block in a closure
// we have to check if any of our let variables collide with
// those in upper scopes and then if they do, generate a uid
// for them and replace all references with it
var remaps = object();
for (var key in letRefs) {
// just an Identifier node we collected in `getLetReferences`
// this is the defining identifier of a declaration
var ref = letRefs[key];
if (scope.parentHasBinding(key) || scope.hasGlobal(key)) {
var uid = scope.generateUidIdentifier(ref.name).name;
ref.name = uid;
hasRemaps = true;
remaps[key] = remaps[uid] = {
binding: ref,
uid: uid
};
}
}
if (!hasRemaps) return;
//
var loop = this.loop;
if (loop) {
traverseReplace(loop.right, loop, scope, remaps);
traverseReplace(loop.test, loop, scope, remaps);
traverseReplace(loop.update, loop, scope, remaps);
}
scope.traverse(this.block, replaceVisitor, remaps);
}
/**
* Description
*/
wrapClosure() {
var block = this.block;
var outsideRefs = this.outsideLetReferences;
// remap loop heads with colliding variables
if (this.loop) {
for (var name in outsideRefs) {
var id = outsideRefs[name];
if (this.scope.hasGlobal(id.name) || this.scope.parentHasBinding(id.name)) {
delete outsideRefs[id.name];
delete this.letReferences[id.name];
this.scope.rename(id.name);
this.letReferences[id.name] = id;
outsideRefs[id.name] = id;
}
}
}
// if we're inside of a for loop then we search to see if there are any
// `break`s, `continue`s, `return`s etc
this.has = this.checkLoop();
// hoist var references to retain scope
this.hoistVarDeclarations();
// turn outsideLetReferences into an array
var params = values(outsideRefs);
// build the closure that we're going to wrap the block with
var fn = t.functionExpression(null, params, t.blockStatement(block.body));
fn._aliasFunction = true;
// replace the current block body with the one we're going to build
block.body = this.body;
// build a call and a unique id that we can assign the return value to
var call = t.callExpression(fn, params);
var ret = this.scope.generateUidIdentifier("ret");
// handle generators
var hasYield = traverse.hasType(fn.body, this.scope, "YieldExpression", t.FUNCTION_TYPES);
if (hasYield) {
fn.generator = true;
call = t.yieldExpression(call, true);
}
// handlers async functions
var hasAsync = traverse.hasType(fn.body, this.scope, "AwaitExpression", t.FUNCTION_TYPES);
if (hasAsync) {
fn.async = true;
call = t.awaitExpression(call, true);
}
this.build(ret, call);
}
/**
* Description
*/
getLetReferences() {
var block = this.block;
var declarators = block._letDeclarators || [];
var declar;
//
for (var i = 0; i < declarators.length; i++) {
declar = declarators[i];
extend(this.outsideLetReferences, t.getBindingIdentifiers(declar));
}
//
if (block.body) {
for (i = 0; i < block.body.length; i++) {
declar = block.body[i];
if (isLet(declar, block)) {
declarators = declarators.concat(declar.declarations);
}
}
}
//
for (i = 0; i < declarators.length; i++) {
declar = declarators[i];
var keys = t.getBindingIdentifiers(declar);
extend(this.letReferences, keys);
this.hasLetReferences = true;
}
// no let references so we can just quit
if (!this.hasLetReferences) return;
// set let references to plain var references
standardizeLets(declarators);
var state = {
letReferences: this.letReferences,
closurify: false
};
// traverse through this block, stopping on functions and checking if they
// contain any local let references
this.scope.traverse(this.block, letReferenceBlockVisitor, state);
return state.closurify;
}
/**
* If we're inside of a loop then traverse it and check if it has one of
* the following node types `ReturnStatement`, `BreakStatement`,
* `ContinueStatement` and replace it with a return value that we can track
* later on.
*
* @returns {Object}
*/
checkLoop() {
var state = {
hasBreakContinue: false,
ignoreLabeless: false,
innerLabels: [],
hasReturn: false,
isLoop: !!this.loop,
map: {}
};
this.scope.traverse(this.block, loopLabelVisitor, state);
this.scope.traverse(this.block, loopVisitor, state);
return state;
}
/**
* Hoist all var declarations in this block to before it so they retain scope
* once we wrap everything in a closure.
*/
hoistVarDeclarations() {
traverse(this.block, hoistVarDeclarationsVisitor, this.scope, this);
}
/**
* Turn a `VariableDeclaration` into an array of `AssignmentExpressions` with
* their declarations hoisted to before the closure wrapper.
*/
pushDeclar(node: { type: "VariableDeclaration" }): Array<Object> {
this.body.push(t.variableDeclaration(node.kind, node.declarations.map(function (declar) {
return t.variableDeclarator(declar.id);
})));
var replace = [];
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
if (!declar.init) continue;
var expr = t.assignmentExpression("=", declar.id, declar.init);
replace.push(t.inherits(expr, declar));
}
return replace;
}
/**
* Push the closure to the body.
*/
build(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var has = this.has;
if (has.hasReturn || has.hasBreakContinue) {
this.buildHas(ret, call);
} else {
this.body.push(t.expressionStatement(call));
}
}
/**
* Description
*/
buildHas(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var body = this.body;
body.push(t.variableDeclaration("var", [
t.variableDeclarator(ret, call)
]));
var loop = this.loop;
var retCheck;
var has = this.has;
var cases = [];
if (has.hasReturn) {
// typeof ret === "object"
retCheck = util.template("let-scoping-return", {
RETURN: ret
});
}
if (has.hasBreakContinue) {
for (var key in has.map) {
cases.push(t.switchCase(t.literal(key), [has.map[key]]));
}
if (has.hasReturn) {
cases.push(t.switchCase(null, [retCheck]));
}
if (cases.length === 1) {
var single = cases[0];
body.push(this.file.attachAuxiliaryComment(t.ifStatement(
t.binaryExpression("===", ret, single.test),
single.consequent[0]
)));
} else {
// #998
for (var i = 0; i < cases.length; i++) {
var caseConsequent = cases[i].consequent[0];
if (t.isBreakStatement(caseConsequent) && !caseConsequent.label) {
caseConsequent.label = this.loopLabel ||= this.file.scope.generateUidIdentifier("loop");
}
}
body.push(this.file.attachAuxiliaryComment(t.switchStatement(ret, cases)));
}
} else {
if (has.hasReturn) {
body.push(this.file.attachAuxiliaryComment(retCheck));
}
}
}
}
|
{
var block = this.block;
if (block._letDone) return;
block._letDone = true;
var needsClosure = this.getLetReferences();
// this is a block within a `Function/Program` so we can safely leave it be
if (t.isFunction(this.parent) || t.isProgram(this.block)) return;
// we can skip everything
if (!this.hasLetReferences) return;
if (needsClosure) {
this.wrapClosure();
} else {
this.remap();
}
if (this.loopLabel && !t.isLabeledStatement(this.loopParent)) {
return t.labeledStatement(this.loopLabel, this.loop);
}
}
|
identifier_body
|
block-scoping.js
|
import traverse from "../../../traversal";
import object from "../../../helpers/object";
import * as util from "../../../util";
import * as t from "../../../types";
import values from "lodash/object/values";
import extend from "lodash/object/extend";
function isLet(node, parent) {
if (!t.isVariableDeclaration(node)) return false;
if (node._let) return true;
if (node.kind !== "let") return false;
// https://github.com/babel/babel/issues/255
if (isLetInitable(node, parent)) {
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
declar.init ||= t.identifier("undefined");
}
}
node._let = true;
node.kind = "var";
return true;
}
function isLetInitable(node, parent) {
return !t.isFor(parent) || !t.isFor(parent, { left: node });
}
function isVar(node, parent) {
return t.isVariableDeclaration(node, { kind: "var" }) && !isLet(node, parent);
}
function standardizeLets(declars) {
for (var i = 0; i < declars.length; i++) {
delete declars[i]._let;
}
}
export function check(node) {
return t.isVariableDeclaration(node) && (node.kind === "let" || node.kind === "const");
}
export function VariableDeclaration(node, parent, scope, file) {
if (!isLet(node, parent)) return;
if (isLetInitable(node) && file.transformers["es6.blockScopingTDZ"].canRun()) {
var nodes = [node];
for (var i = 0; i < node.declarations.length; i++) {
var decl = node.declarations[i];
|
var assign = t.assignmentExpression("=", decl.id, decl.init);
assign._ignoreBlockScopingTDZ = true;
nodes.push(t.expressionStatement(assign));
}
decl.init = file.addHelper("temporal-undefined");
}
node._blockHoist = 2;
return nodes;
}
}
export function Loop(node, parent, scope, file) {
var init = node.left || node.init;
if (isLet(init, node)) {
t.ensureBlock(node);
node.body._letDeclarators = [init];
}
var blockScoping = new BlockScoping(this, node.body, parent, scope, file);
return blockScoping.run();
}
export function BlockStatement(block, parent, scope, file) {
if (!t.isLoop(parent)) {
var blockScoping = new BlockScoping(null, block, parent, scope, file);
blockScoping.run();
}
}
export { BlockStatement as Program };
function replace(node, parent, scope, remaps) {
if (!t.isReferencedIdentifier(node, parent)) return;
var remap = remaps[node.name];
if (!remap) return;
var ownBinding = scope.getBindingIdentifier(node.name);
if (ownBinding === remap.binding) {
node.name = remap.uid;
} else {
// scope already has it's own binding that doesn't
// match the one we have a stored replacement for
if (this) this.skip();
}
}
var replaceVisitor = {
enter: replace
};
function traverseReplace(node, parent, scope, remaps) {
replace(node, parent, scope, remaps);
scope.traverse(node, replaceVisitor, remaps);
}
var letReferenceBlockVisitor = {
enter(node, parent, scope, state) {
if (this.isFunction()) {
scope.traverse(node, letReferenceFunctionVisitor, state);
return this.skip();
}
}
};
var letReferenceFunctionVisitor = {
enter(node, parent, scope, state) {
// not a direct reference
if (!this.isReferencedIdentifier()) return;
// this scope has a variable with the same name so it couldn't belong
// to our let scope
if (scope.hasOwnBinding(node.name)) return;
// not a part of our scope
if (!state.letReferences[node.name]) return;
state.closurify = true;
}
};
var hoistVarDeclarationsVisitor = {
enter(node, parent, scope, self) {
if (this.isForStatement()) {
if (isVar(node.init, node)) {
node.init = t.sequenceExpression(self.pushDeclar(node.init));
}
} else if (this.isFor()) {
if (isVar(node.left, node)) {
node.left = node.left.declarations[0].id;
}
} else if (isVar(node, parent)) {
return self.pushDeclar(node).map(t.expressionStatement);
} else if (this.isFunction()) {
return this.skip();
}
}
};
var loopLabelVisitor = {
enter(node, parent, scope, state) {
if (this.isLabeledStatement()) {
state.innerLabels.push(node.label.name);
}
}
};
var loopNodeTo = function (node) {
if (t.isBreakStatement(node)) {
return "break";
} else if (t.isContinueStatement(node)) {
return "continue";
}
};
var loopVisitor = {
enter(node, parent, scope, state) {
var replace;
if (this.isLoop()) {
state.ignoreLabeless = true;
scope.traverse(node, loopVisitor, state);
state.ignoreLabeless = false;
}
if (this.isFunction() || this.isLoop()) {
return this.skip();
}
var loopText = loopNodeTo(node);
if (loopText) {
if (node.label) {
// we shouldn't be transforming this because it exists somewhere inside
if (state.innerLabels.indexOf(node.label.name) >= 0) {
return;
}
loopText = `${loopText}|${node.label.name}`;
} else {
// we shouldn't be transforming these statements because
// they don't refer to the actual loop we're scopifying
if (state.ignoreLabeless) return;
// break statements mean something different in this context
if (t.isBreakStatement(node) && t.isSwitchCase(parent)) return;
}
state.hasBreakContinue = true;
state.map[loopText] = node;
replace = t.literal(loopText);
}
if (this.isReturnStatement()) {
state.hasReturn = true;
replace = t.objectExpression([
t.property("init", t.identifier("v"), node.argument || t.identifier("undefined"))
]);
}
if (replace) {
replace = t.returnStatement(replace);
return t.inherits(replace, node);
}
}
};
class BlockScoping {
/**
* Description
*/
constructor(loopPath?: TraversalPath, block: Object, parent: Object, scope: Scope, file: File) {
this.parent = parent;
this.scope = scope;
this.block = block;
this.file = file;
this.outsideLetReferences = object();
this.hasLetReferences = false;
this.letReferences = block._letReferences = object();
this.body = [];
if (loopPath) {
this.loopParent = loopPath.parent;
this.loopLabel = t.isLabeledStatement(this.loopParent) && this.loopParent.label;
this.loop = loopPath.node;
}
}
/**
* Start the ball rolling.
*/
run() {
var block = this.block;
if (block._letDone) return;
block._letDone = true;
var needsClosure = this.getLetReferences();
// this is a block within a `Function/Program` so we can safely leave it be
if (t.isFunction(this.parent) || t.isProgram(this.block)) return;
// we can skip everything
if (!this.hasLetReferences) return;
if (needsClosure) {
this.wrapClosure();
} else {
this.remap();
}
if (this.loopLabel && !t.isLabeledStatement(this.loopParent)) {
return t.labeledStatement(this.loopLabel, this.loop);
}
}
/**
* Description
*/
remap() {
var hasRemaps = false;
var letRefs = this.letReferences;
var scope = this.scope;
// alright, so since we aren't wrapping this block in a closure
// we have to check if any of our let variables collide with
// those in upper scopes and then if they do, generate a uid
// for them and replace all references with it
var remaps = object();
for (var key in letRefs) {
// just an Identifier node we collected in `getLetReferences`
// this is the defining identifier of a declaration
var ref = letRefs[key];
if (scope.parentHasBinding(key) || scope.hasGlobal(key)) {
var uid = scope.generateUidIdentifier(ref.name).name;
ref.name = uid;
hasRemaps = true;
remaps[key] = remaps[uid] = {
binding: ref,
uid: uid
};
}
}
if (!hasRemaps) return;
//
var loop = this.loop;
if (loop) {
traverseReplace(loop.right, loop, scope, remaps);
traverseReplace(loop.test, loop, scope, remaps);
traverseReplace(loop.update, loop, scope, remaps);
}
scope.traverse(this.block, replaceVisitor, remaps);
}
/**
* Description
*/
wrapClosure() {
var block = this.block;
var outsideRefs = this.outsideLetReferences;
// remap loop heads with colliding variables
if (this.loop) {
for (var name in outsideRefs) {
var id = outsideRefs[name];
if (this.scope.hasGlobal(id.name) || this.scope.parentHasBinding(id.name)) {
delete outsideRefs[id.name];
delete this.letReferences[id.name];
this.scope.rename(id.name);
this.letReferences[id.name] = id;
outsideRefs[id.name] = id;
}
}
}
// if we're inside of a for loop then we search to see if there are any
// `break`s, `continue`s, `return`s etc
this.has = this.checkLoop();
// hoist var references to retain scope
this.hoistVarDeclarations();
// turn outsideLetReferences into an array
var params = values(outsideRefs);
// build the closure that we're going to wrap the block with
var fn = t.functionExpression(null, params, t.blockStatement(block.body));
fn._aliasFunction = true;
// replace the current block body with the one we're going to build
block.body = this.body;
// build a call and a unique id that we can assign the return value to
var call = t.callExpression(fn, params);
var ret = this.scope.generateUidIdentifier("ret");
// handle generators
var hasYield = traverse.hasType(fn.body, this.scope, "YieldExpression", t.FUNCTION_TYPES);
if (hasYield) {
fn.generator = true;
call = t.yieldExpression(call, true);
}
// handlers async functions
var hasAsync = traverse.hasType(fn.body, this.scope, "AwaitExpression", t.FUNCTION_TYPES);
if (hasAsync) {
fn.async = true;
call = t.awaitExpression(call, true);
}
this.build(ret, call);
}
/**
* Description
*/
getLetReferences() {
var block = this.block;
var declarators = block._letDeclarators || [];
var declar;
//
for (var i = 0; i < declarators.length; i++) {
declar = declarators[i];
extend(this.outsideLetReferences, t.getBindingIdentifiers(declar));
}
//
if (block.body) {
for (i = 0; i < block.body.length; i++) {
declar = block.body[i];
if (isLet(declar, block)) {
declarators = declarators.concat(declar.declarations);
}
}
}
//
for (i = 0; i < declarators.length; i++) {
declar = declarators[i];
var keys = t.getBindingIdentifiers(declar);
extend(this.letReferences, keys);
this.hasLetReferences = true;
}
// no let references so we can just quit
if (!this.hasLetReferences) return;
// set let references to plain var references
standardizeLets(declarators);
var state = {
letReferences: this.letReferences,
closurify: false
};
// traverse through this block, stopping on functions and checking if they
// contain any local let references
this.scope.traverse(this.block, letReferenceBlockVisitor, state);
return state.closurify;
}
/**
* If we're inside of a loop then traverse it and check if it has one of
* the following node types `ReturnStatement`, `BreakStatement`,
* `ContinueStatement` and replace it with a return value that we can track
* later on.
*
* @returns {Object}
*/
checkLoop() {
var state = {
hasBreakContinue: false,
ignoreLabeless: false,
innerLabels: [],
hasReturn: false,
isLoop: !!this.loop,
map: {}
};
this.scope.traverse(this.block, loopLabelVisitor, state);
this.scope.traverse(this.block, loopVisitor, state);
return state;
}
/**
* Hoist all var declarations in this block to before it so they retain scope
* once we wrap everything in a closure.
*/
hoistVarDeclarations() {
traverse(this.block, hoistVarDeclarationsVisitor, this.scope, this);
}
/**
* Turn a `VariableDeclaration` into an array of `AssignmentExpressions` with
* their declarations hoisted to before the closure wrapper.
*/
pushDeclar(node: { type: "VariableDeclaration" }): Array<Object> {
this.body.push(t.variableDeclaration(node.kind, node.declarations.map(function (declar) {
return t.variableDeclarator(declar.id);
})));
var replace = [];
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
if (!declar.init) continue;
var expr = t.assignmentExpression("=", declar.id, declar.init);
replace.push(t.inherits(expr, declar));
}
return replace;
}
/**
* Push the closure to the body.
*/
build(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var has = this.has;
if (has.hasReturn || has.hasBreakContinue) {
this.buildHas(ret, call);
} else {
this.body.push(t.expressionStatement(call));
}
}
/**
* Description
*/
buildHas(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var body = this.body;
body.push(t.variableDeclaration("var", [
t.variableDeclarator(ret, call)
]));
var loop = this.loop;
var retCheck;
var has = this.has;
var cases = [];
if (has.hasReturn) {
// typeof ret === "object"
retCheck = util.template("let-scoping-return", {
RETURN: ret
});
}
if (has.hasBreakContinue) {
for (var key in has.map) {
cases.push(t.switchCase(t.literal(key), [has.map[key]]));
}
if (has.hasReturn) {
cases.push(t.switchCase(null, [retCheck]));
}
if (cases.length === 1) {
var single = cases[0];
body.push(this.file.attachAuxiliaryComment(t.ifStatement(
t.binaryExpression("===", ret, single.test),
single.consequent[0]
)));
} else {
// #998
for (var i = 0; i < cases.length; i++) {
var caseConsequent = cases[i].consequent[0];
if (t.isBreakStatement(caseConsequent) && !caseConsequent.label) {
caseConsequent.label = this.loopLabel ||= this.file.scope.generateUidIdentifier("loop");
}
}
body.push(this.file.attachAuxiliaryComment(t.switchStatement(ret, cases)));
}
} else {
if (has.hasReturn) {
body.push(this.file.attachAuxiliaryComment(retCheck));
}
}
}
}
|
if (decl.init) {
|
random_line_split
|
block-scoping.js
|
import traverse from "../../../traversal";
import object from "../../../helpers/object";
import * as util from "../../../util";
import * as t from "../../../types";
import values from "lodash/object/values";
import extend from "lodash/object/extend";
function isLet(node, parent) {
if (!t.isVariableDeclaration(node)) return false;
if (node._let) return true;
if (node.kind !== "let") return false;
// https://github.com/babel/babel/issues/255
if (isLetInitable(node, parent)) {
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
declar.init ||= t.identifier("undefined");
}
}
node._let = true;
node.kind = "var";
return true;
}
function isLetInitable(node, parent) {
return !t.isFor(parent) || !t.isFor(parent, { left: node });
}
function isVar(node, parent) {
return t.isVariableDeclaration(node, { kind: "var" }) && !isLet(node, parent);
}
function standardizeLets(declars) {
for (var i = 0; i < declars.length; i++) {
delete declars[i]._let;
}
}
export function check(node) {
return t.isVariableDeclaration(node) && (node.kind === "let" || node.kind === "const");
}
export function VariableDeclaration(node, parent, scope, file) {
if (!isLet(node, parent)) return;
if (isLetInitable(node) && file.transformers["es6.blockScopingTDZ"].canRun()) {
var nodes = [node];
for (var i = 0; i < node.declarations.length; i++) {
var decl = node.declarations[i];
if (decl.init) {
var assign = t.assignmentExpression("=", decl.id, decl.init);
assign._ignoreBlockScopingTDZ = true;
nodes.push(t.expressionStatement(assign));
}
decl.init = file.addHelper("temporal-undefined");
}
node._blockHoist = 2;
return nodes;
}
}
export function Loop(node, parent, scope, file) {
var init = node.left || node.init;
if (isLet(init, node)) {
t.ensureBlock(node);
node.body._letDeclarators = [init];
}
var blockScoping = new BlockScoping(this, node.body, parent, scope, file);
return blockScoping.run();
}
export function BlockStatement(block, parent, scope, file) {
if (!t.isLoop(parent)) {
var blockScoping = new BlockScoping(null, block, parent, scope, file);
blockScoping.run();
}
}
export { BlockStatement as Program };
function replace(node, parent, scope, remaps) {
if (!t.isReferencedIdentifier(node, parent)) return;
var remap = remaps[node.name];
if (!remap) return;
var ownBinding = scope.getBindingIdentifier(node.name);
if (ownBinding === remap.binding) {
node.name = remap.uid;
} else {
// scope already has it's own binding that doesn't
// match the one we have a stored replacement for
if (this) this.skip();
}
}
var replaceVisitor = {
enter: replace
};
function traverseReplace(node, parent, scope, remaps) {
replace(node, parent, scope, remaps);
scope.traverse(node, replaceVisitor, remaps);
}
var letReferenceBlockVisitor = {
enter(node, parent, scope, state) {
if (this.isFunction()) {
scope.traverse(node, letReferenceFunctionVisitor, state);
return this.skip();
}
}
};
var letReferenceFunctionVisitor = {
enter(node, parent, scope, state) {
// not a direct reference
if (!this.isReferencedIdentifier()) return;
// this scope has a variable with the same name so it couldn't belong
// to our let scope
if (scope.hasOwnBinding(node.name)) return;
// not a part of our scope
if (!state.letReferences[node.name]) return;
state.closurify = true;
}
};
var hoistVarDeclarationsVisitor = {
enter(node, parent, scope, self) {
if (this.isForStatement()) {
if (isVar(node.init, node)) {
node.init = t.sequenceExpression(self.pushDeclar(node.init));
}
} else if (this.isFor()) {
if (isVar(node.left, node)) {
node.left = node.left.declarations[0].id;
}
} else if (isVar(node, parent)) {
return self.pushDeclar(node).map(t.expressionStatement);
} else if (this.isFunction()) {
return this.skip();
}
}
};
var loopLabelVisitor = {
enter(node, parent, scope, state) {
if (this.isLabeledStatement()) {
state.innerLabels.push(node.label.name);
}
}
};
var loopNodeTo = function (node) {
if (t.isBreakStatement(node)) {
return "break";
} else if (t.isContinueStatement(node)) {
return "continue";
}
};
var loopVisitor = {
enter(node, parent, scope, state) {
var replace;
if (this.isLoop()) {
state.ignoreLabeless = true;
scope.traverse(node, loopVisitor, state);
state.ignoreLabeless = false;
}
if (this.isFunction() || this.isLoop()) {
return this.skip();
}
var loopText = loopNodeTo(node);
if (loopText)
|
if (this.isReturnStatement()) {
state.hasReturn = true;
replace = t.objectExpression([
t.property("init", t.identifier("v"), node.argument || t.identifier("undefined"))
]);
}
if (replace) {
replace = t.returnStatement(replace);
return t.inherits(replace, node);
}
}
};
class BlockScoping {
/**
* Description
*/
constructor(loopPath?: TraversalPath, block: Object, parent: Object, scope: Scope, file: File) {
this.parent = parent;
this.scope = scope;
this.block = block;
this.file = file;
this.outsideLetReferences = object();
this.hasLetReferences = false;
this.letReferences = block._letReferences = object();
this.body = [];
if (loopPath) {
this.loopParent = loopPath.parent;
this.loopLabel = t.isLabeledStatement(this.loopParent) && this.loopParent.label;
this.loop = loopPath.node;
}
}
/**
* Start the ball rolling.
*/
run() {
var block = this.block;
if (block._letDone) return;
block._letDone = true;
var needsClosure = this.getLetReferences();
// this is a block within a `Function/Program` so we can safely leave it be
if (t.isFunction(this.parent) || t.isProgram(this.block)) return;
// we can skip everything
if (!this.hasLetReferences) return;
if (needsClosure) {
this.wrapClosure();
} else {
this.remap();
}
if (this.loopLabel && !t.isLabeledStatement(this.loopParent)) {
return t.labeledStatement(this.loopLabel, this.loop);
}
}
/**
* Description
*/
remap() {
var hasRemaps = false;
var letRefs = this.letReferences;
var scope = this.scope;
// alright, so since we aren't wrapping this block in a closure
// we have to check if any of our let variables collide with
// those in upper scopes and then if they do, generate a uid
// for them and replace all references with it
var remaps = object();
for (var key in letRefs) {
// just an Identifier node we collected in `getLetReferences`
// this is the defining identifier of a declaration
var ref = letRefs[key];
if (scope.parentHasBinding(key) || scope.hasGlobal(key)) {
var uid = scope.generateUidIdentifier(ref.name).name;
ref.name = uid;
hasRemaps = true;
remaps[key] = remaps[uid] = {
binding: ref,
uid: uid
};
}
}
if (!hasRemaps) return;
//
var loop = this.loop;
if (loop) {
traverseReplace(loop.right, loop, scope, remaps);
traverseReplace(loop.test, loop, scope, remaps);
traverseReplace(loop.update, loop, scope, remaps);
}
scope.traverse(this.block, replaceVisitor, remaps);
}
/**
* Description
*/
wrapClosure() {
var block = this.block;
var outsideRefs = this.outsideLetReferences;
// remap loop heads with colliding variables
if (this.loop) {
for (var name in outsideRefs) {
var id = outsideRefs[name];
if (this.scope.hasGlobal(id.name) || this.scope.parentHasBinding(id.name)) {
delete outsideRefs[id.name];
delete this.letReferences[id.name];
this.scope.rename(id.name);
this.letReferences[id.name] = id;
outsideRefs[id.name] = id;
}
}
}
// if we're inside of a for loop then we search to see if there are any
// `break`s, `continue`s, `return`s etc
this.has = this.checkLoop();
// hoist var references to retain scope
this.hoistVarDeclarations();
// turn outsideLetReferences into an array
var params = values(outsideRefs);
// build the closure that we're going to wrap the block with
var fn = t.functionExpression(null, params, t.blockStatement(block.body));
fn._aliasFunction = true;
// replace the current block body with the one we're going to build
block.body = this.body;
// build a call and a unique id that we can assign the return value to
var call = t.callExpression(fn, params);
var ret = this.scope.generateUidIdentifier("ret");
// handle generators
var hasYield = traverse.hasType(fn.body, this.scope, "YieldExpression", t.FUNCTION_TYPES);
if (hasYield) {
fn.generator = true;
call = t.yieldExpression(call, true);
}
// handlers async functions
var hasAsync = traverse.hasType(fn.body, this.scope, "AwaitExpression", t.FUNCTION_TYPES);
if (hasAsync) {
fn.async = true;
call = t.awaitExpression(call, true);
}
this.build(ret, call);
}
/**
* Description
*/
getLetReferences() {
var block = this.block;
var declarators = block._letDeclarators || [];
var declar;
//
for (var i = 0; i < declarators.length; i++) {
declar = declarators[i];
extend(this.outsideLetReferences, t.getBindingIdentifiers(declar));
}
//
if (block.body) {
for (i = 0; i < block.body.length; i++) {
declar = block.body[i];
if (isLet(declar, block)) {
declarators = declarators.concat(declar.declarations);
}
}
}
//
for (i = 0; i < declarators.length; i++) {
declar = declarators[i];
var keys = t.getBindingIdentifiers(declar);
extend(this.letReferences, keys);
this.hasLetReferences = true;
}
// no let references so we can just quit
if (!this.hasLetReferences) return;
// set let references to plain var references
standardizeLets(declarators);
var state = {
letReferences: this.letReferences,
closurify: false
};
// traverse through this block, stopping on functions and checking if they
// contain any local let references
this.scope.traverse(this.block, letReferenceBlockVisitor, state);
return state.closurify;
}
/**
* If we're inside of a loop then traverse it and check if it has one of
* the following node types `ReturnStatement`, `BreakStatement`,
* `ContinueStatement` and replace it with a return value that we can track
* later on.
*
* @returns {Object}
*/
checkLoop() {
var state = {
hasBreakContinue: false,
ignoreLabeless: false,
innerLabels: [],
hasReturn: false,
isLoop: !!this.loop,
map: {}
};
this.scope.traverse(this.block, loopLabelVisitor, state);
this.scope.traverse(this.block, loopVisitor, state);
return state;
}
/**
* Hoist all var declarations in this block to before it so they retain scope
* once we wrap everything in a closure.
*/
hoistVarDeclarations() {
traverse(this.block, hoistVarDeclarationsVisitor, this.scope, this);
}
/**
* Turn a `VariableDeclaration` into an array of `AssignmentExpressions` with
* their declarations hoisted to before the closure wrapper.
*/
pushDeclar(node: { type: "VariableDeclaration" }): Array<Object> {
this.body.push(t.variableDeclaration(node.kind, node.declarations.map(function (declar) {
return t.variableDeclarator(declar.id);
})));
var replace = [];
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
if (!declar.init) continue;
var expr = t.assignmentExpression("=", declar.id, declar.init);
replace.push(t.inherits(expr, declar));
}
return replace;
}
/**
* Push the closure to the body.
*/
build(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var has = this.has;
if (has.hasReturn || has.hasBreakContinue) {
this.buildHas(ret, call);
} else {
this.body.push(t.expressionStatement(call));
}
}
/**
* Description
*/
buildHas(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var body = this.body;
body.push(t.variableDeclaration("var", [
t.variableDeclarator(ret, call)
]));
var loop = this.loop;
var retCheck;
var has = this.has;
var cases = [];
if (has.hasReturn) {
// typeof ret === "object"
retCheck = util.template("let-scoping-return", {
RETURN: ret
});
}
if (has.hasBreakContinue) {
for (var key in has.map) {
cases.push(t.switchCase(t.literal(key), [has.map[key]]));
}
if (has.hasReturn) {
cases.push(t.switchCase(null, [retCheck]));
}
if (cases.length === 1) {
var single = cases[0];
body.push(this.file.attachAuxiliaryComment(t.ifStatement(
t.binaryExpression("===", ret, single.test),
single.consequent[0]
)));
} else {
// #998
for (var i = 0; i < cases.length; i++) {
var caseConsequent = cases[i].consequent[0];
if (t.isBreakStatement(caseConsequent) && !caseConsequent.label) {
caseConsequent.label = this.loopLabel ||= this.file.scope.generateUidIdentifier("loop");
}
}
body.push(this.file.attachAuxiliaryComment(t.switchStatement(ret, cases)));
}
} else {
if (has.hasReturn) {
body.push(this.file.attachAuxiliaryComment(retCheck));
}
}
}
}
|
{
if (node.label) {
// we shouldn't be transforming this because it exists somewhere inside
if (state.innerLabels.indexOf(node.label.name) >= 0) {
return;
}
loopText = `${loopText}|${node.label.name}`;
} else {
// we shouldn't be transforming these statements because
// they don't refer to the actual loop we're scopifying
if (state.ignoreLabeless) return;
// break statements mean something different in this context
if (t.isBreakStatement(node) && t.isSwitchCase(parent)) return;
}
state.hasBreakContinue = true;
state.map[loopText] = node;
replace = t.literal(loopText);
}
|
conditional_block
|
block-scoping.js
|
import traverse from "../../../traversal";
import object from "../../../helpers/object";
import * as util from "../../../util";
import * as t from "../../../types";
import values from "lodash/object/values";
import extend from "lodash/object/extend";
function isLet(node, parent) {
if (!t.isVariableDeclaration(node)) return false;
if (node._let) return true;
if (node.kind !== "let") return false;
// https://github.com/babel/babel/issues/255
if (isLetInitable(node, parent)) {
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
declar.init ||= t.identifier("undefined");
}
}
node._let = true;
node.kind = "var";
return true;
}
function isLetInitable(node, parent) {
return !t.isFor(parent) || !t.isFor(parent, { left: node });
}
function isVar(node, parent) {
return t.isVariableDeclaration(node, { kind: "var" }) && !isLet(node, parent);
}
function standardizeLets(declars) {
for (var i = 0; i < declars.length; i++) {
delete declars[i]._let;
}
}
export function check(node) {
return t.isVariableDeclaration(node) && (node.kind === "let" || node.kind === "const");
}
export function VariableDeclaration(node, parent, scope, file) {
if (!isLet(node, parent)) return;
if (isLetInitable(node) && file.transformers["es6.blockScopingTDZ"].canRun()) {
var nodes = [node];
for (var i = 0; i < node.declarations.length; i++) {
var decl = node.declarations[i];
if (decl.init) {
var assign = t.assignmentExpression("=", decl.id, decl.init);
assign._ignoreBlockScopingTDZ = true;
nodes.push(t.expressionStatement(assign));
}
decl.init = file.addHelper("temporal-undefined");
}
node._blockHoist = 2;
return nodes;
}
}
export function Loop(node, parent, scope, file) {
var init = node.left || node.init;
if (isLet(init, node)) {
t.ensureBlock(node);
node.body._letDeclarators = [init];
}
var blockScoping = new BlockScoping(this, node.body, parent, scope, file);
return blockScoping.run();
}
export function BlockStatement(block, parent, scope, file) {
if (!t.isLoop(parent)) {
var blockScoping = new BlockScoping(null, block, parent, scope, file);
blockScoping.run();
}
}
export { BlockStatement as Program };
function replace(node, parent, scope, remaps) {
if (!t.isReferencedIdentifier(node, parent)) return;
var remap = remaps[node.name];
if (!remap) return;
var ownBinding = scope.getBindingIdentifier(node.name);
if (ownBinding === remap.binding) {
node.name = remap.uid;
} else {
// scope already has it's own binding that doesn't
// match the one we have a stored replacement for
if (this) this.skip();
}
}
var replaceVisitor = {
enter: replace
};
function traverseReplace(node, parent, scope, remaps) {
replace(node, parent, scope, remaps);
scope.traverse(node, replaceVisitor, remaps);
}
var letReferenceBlockVisitor = {
enter(node, parent, scope, state) {
if (this.isFunction()) {
scope.traverse(node, letReferenceFunctionVisitor, state);
return this.skip();
}
}
};
var letReferenceFunctionVisitor = {
enter(node, parent, scope, state) {
// not a direct reference
if (!this.isReferencedIdentifier()) return;
// this scope has a variable with the same name so it couldn't belong
// to our let scope
if (scope.hasOwnBinding(node.name)) return;
// not a part of our scope
if (!state.letReferences[node.name]) return;
state.closurify = true;
}
};
var hoistVarDeclarationsVisitor = {
enter(node, parent, scope, self) {
if (this.isForStatement()) {
if (isVar(node.init, node)) {
node.init = t.sequenceExpression(self.pushDeclar(node.init));
}
} else if (this.isFor()) {
if (isVar(node.left, node)) {
node.left = node.left.declarations[0].id;
}
} else if (isVar(node, parent)) {
return self.pushDeclar(node).map(t.expressionStatement);
} else if (this.isFunction()) {
return this.skip();
}
}
};
var loopLabelVisitor = {
enter(node, parent, scope, state) {
if (this.isLabeledStatement()) {
state.innerLabels.push(node.label.name);
}
}
};
var loopNodeTo = function (node) {
if (t.isBreakStatement(node)) {
return "break";
} else if (t.isContinueStatement(node)) {
return "continue";
}
};
var loopVisitor = {
enter(node, parent, scope, state) {
var replace;
if (this.isLoop()) {
state.ignoreLabeless = true;
scope.traverse(node, loopVisitor, state);
state.ignoreLabeless = false;
}
if (this.isFunction() || this.isLoop()) {
return this.skip();
}
var loopText = loopNodeTo(node);
if (loopText) {
if (node.label) {
// we shouldn't be transforming this because it exists somewhere inside
if (state.innerLabels.indexOf(node.label.name) >= 0) {
return;
}
loopText = `${loopText}|${node.label.name}`;
} else {
// we shouldn't be transforming these statements because
// they don't refer to the actual loop we're scopifying
if (state.ignoreLabeless) return;
// break statements mean something different in this context
if (t.isBreakStatement(node) && t.isSwitchCase(parent)) return;
}
state.hasBreakContinue = true;
state.map[loopText] = node;
replace = t.literal(loopText);
}
if (this.isReturnStatement()) {
state.hasReturn = true;
replace = t.objectExpression([
t.property("init", t.identifier("v"), node.argument || t.identifier("undefined"))
]);
}
if (replace) {
replace = t.returnStatement(replace);
return t.inherits(replace, node);
}
}
};
class BlockScoping {
/**
* Description
*/
|
(loopPath?: TraversalPath, block: Object, parent: Object, scope: Scope, file: File) {
this.parent = parent;
this.scope = scope;
this.block = block;
this.file = file;
this.outsideLetReferences = object();
this.hasLetReferences = false;
this.letReferences = block._letReferences = object();
this.body = [];
if (loopPath) {
this.loopParent = loopPath.parent;
this.loopLabel = t.isLabeledStatement(this.loopParent) && this.loopParent.label;
this.loop = loopPath.node;
}
}
/**
* Start the ball rolling.
*/
run() {
var block = this.block;
if (block._letDone) return;
block._letDone = true;
var needsClosure = this.getLetReferences();
// this is a block within a `Function/Program` so we can safely leave it be
if (t.isFunction(this.parent) || t.isProgram(this.block)) return;
// we can skip everything
if (!this.hasLetReferences) return;
if (needsClosure) {
this.wrapClosure();
} else {
this.remap();
}
if (this.loopLabel && !t.isLabeledStatement(this.loopParent)) {
return t.labeledStatement(this.loopLabel, this.loop);
}
}
/**
* Description
*/
remap() {
var hasRemaps = false;
var letRefs = this.letReferences;
var scope = this.scope;
// alright, so since we aren't wrapping this block in a closure
// we have to check if any of our let variables collide with
// those in upper scopes and then if they do, generate a uid
// for them and replace all references with it
var remaps = object();
for (var key in letRefs) {
// just an Identifier node we collected in `getLetReferences`
// this is the defining identifier of a declaration
var ref = letRefs[key];
if (scope.parentHasBinding(key) || scope.hasGlobal(key)) {
var uid = scope.generateUidIdentifier(ref.name).name;
ref.name = uid;
hasRemaps = true;
remaps[key] = remaps[uid] = {
binding: ref,
uid: uid
};
}
}
if (!hasRemaps) return;
//
var loop = this.loop;
if (loop) {
traverseReplace(loop.right, loop, scope, remaps);
traverseReplace(loop.test, loop, scope, remaps);
traverseReplace(loop.update, loop, scope, remaps);
}
scope.traverse(this.block, replaceVisitor, remaps);
}
/**
* Description
*/
wrapClosure() {
var block = this.block;
var outsideRefs = this.outsideLetReferences;
// remap loop heads with colliding variables
if (this.loop) {
for (var name in outsideRefs) {
var id = outsideRefs[name];
if (this.scope.hasGlobal(id.name) || this.scope.parentHasBinding(id.name)) {
delete outsideRefs[id.name];
delete this.letReferences[id.name];
this.scope.rename(id.name);
this.letReferences[id.name] = id;
outsideRefs[id.name] = id;
}
}
}
// if we're inside of a for loop then we search to see if there are any
// `break`s, `continue`s, `return`s etc
this.has = this.checkLoop();
// hoist var references to retain scope
this.hoistVarDeclarations();
// turn outsideLetReferences into an array
var params = values(outsideRefs);
// build the closure that we're going to wrap the block with
var fn = t.functionExpression(null, params, t.blockStatement(block.body));
fn._aliasFunction = true;
// replace the current block body with the one we're going to build
block.body = this.body;
// build a call and a unique id that we can assign the return value to
var call = t.callExpression(fn, params);
var ret = this.scope.generateUidIdentifier("ret");
// handle generators
var hasYield = traverse.hasType(fn.body, this.scope, "YieldExpression", t.FUNCTION_TYPES);
if (hasYield) {
fn.generator = true;
call = t.yieldExpression(call, true);
}
// handlers async functions
var hasAsync = traverse.hasType(fn.body, this.scope, "AwaitExpression", t.FUNCTION_TYPES);
if (hasAsync) {
fn.async = true;
call = t.awaitExpression(call, true);
}
this.build(ret, call);
}
/**
* Description
*/
getLetReferences() {
var block = this.block;
var declarators = block._letDeclarators || [];
var declar;
//
for (var i = 0; i < declarators.length; i++) {
declar = declarators[i];
extend(this.outsideLetReferences, t.getBindingIdentifiers(declar));
}
//
if (block.body) {
for (i = 0; i < block.body.length; i++) {
declar = block.body[i];
if (isLet(declar, block)) {
declarators = declarators.concat(declar.declarations);
}
}
}
//
for (i = 0; i < declarators.length; i++) {
declar = declarators[i];
var keys = t.getBindingIdentifiers(declar);
extend(this.letReferences, keys);
this.hasLetReferences = true;
}
// no let references so we can just quit
if (!this.hasLetReferences) return;
// set let references to plain var references
standardizeLets(declarators);
var state = {
letReferences: this.letReferences,
closurify: false
};
// traverse through this block, stopping on functions and checking if they
// contain any local let references
this.scope.traverse(this.block, letReferenceBlockVisitor, state);
return state.closurify;
}
/**
* If we're inside of a loop then traverse it and check if it has one of
* the following node types `ReturnStatement`, `BreakStatement`,
* `ContinueStatement` and replace it with a return value that we can track
* later on.
*
* @returns {Object}
*/
checkLoop() {
var state = {
hasBreakContinue: false,
ignoreLabeless: false,
innerLabels: [],
hasReturn: false,
isLoop: !!this.loop,
map: {}
};
this.scope.traverse(this.block, loopLabelVisitor, state);
this.scope.traverse(this.block, loopVisitor, state);
return state;
}
/**
* Hoist all var declarations in this block to before it so they retain scope
* once we wrap everything in a closure.
*/
hoistVarDeclarations() {
traverse(this.block, hoistVarDeclarationsVisitor, this.scope, this);
}
/**
* Turn a `VariableDeclaration` into an array of `AssignmentExpressions` with
* their declarations hoisted to before the closure wrapper.
*/
pushDeclar(node: { type: "VariableDeclaration" }): Array<Object> {
this.body.push(t.variableDeclaration(node.kind, node.declarations.map(function (declar) {
return t.variableDeclarator(declar.id);
})));
var replace = [];
for (var i = 0; i < node.declarations.length; i++) {
var declar = node.declarations[i];
if (!declar.init) continue;
var expr = t.assignmentExpression("=", declar.id, declar.init);
replace.push(t.inherits(expr, declar));
}
return replace;
}
/**
* Push the closure to the body.
*/
build(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var has = this.has;
if (has.hasReturn || has.hasBreakContinue) {
this.buildHas(ret, call);
} else {
this.body.push(t.expressionStatement(call));
}
}
/**
* Description
*/
buildHas(ret: { type: "Identifier" }, call: { type: "CallExpression" }) {
var body = this.body;
body.push(t.variableDeclaration("var", [
t.variableDeclarator(ret, call)
]));
var loop = this.loop;
var retCheck;
var has = this.has;
var cases = [];
if (has.hasReturn) {
// typeof ret === "object"
retCheck = util.template("let-scoping-return", {
RETURN: ret
});
}
if (has.hasBreakContinue) {
for (var key in has.map) {
cases.push(t.switchCase(t.literal(key), [has.map[key]]));
}
if (has.hasReturn) {
cases.push(t.switchCase(null, [retCheck]));
}
if (cases.length === 1) {
var single = cases[0];
body.push(this.file.attachAuxiliaryComment(t.ifStatement(
t.binaryExpression("===", ret, single.test),
single.consequent[0]
)));
} else {
// #998
for (var i = 0; i < cases.length; i++) {
var caseConsequent = cases[i].consequent[0];
if (t.isBreakStatement(caseConsequent) && !caseConsequent.label) {
caseConsequent.label = this.loopLabel ||= this.file.scope.generateUidIdentifier("loop");
}
}
body.push(this.file.attachAuxiliaryComment(t.switchStatement(ret, cases)));
}
} else {
if (has.hasReturn) {
body.push(this.file.attachAuxiliaryComment(retCheck));
}
}
}
}
|
constructor
|
identifier_name
|
import_visits.py
|
import csv, os
from Products.CMFCore.utils import getToolByName
def get_folder(self, type, name):
folder_brains = self.queryCatalog({'portal_type':type, 'title':name})[0]
return folder_brains.getObject()
def
|
(self, container, type):
id = container.generateUniqueId(type)
container.invokeFactory(id=id, type_name=type)
return container[id]
def get_type_or_create(self, type, folder, cmp, val):
brains = self.queryCatalog({'portal_type':type, cmp:val})
if len(brains) > 0:
return brains[0].getObject()
else:
return create_object_in_directory(self, folder, type)
def set_reference(self, object, visit):
existing_visits = object.getVisits()
if visit not in existing_visits:
existing_visits.append(visit)
object.setVisits(existing_visits)
def import_visits(self):
reader = csv.reader(self.REQUEST.get('csv-file-contents').split(os.linesep), delimiter="\t")
for row in reader:
if not row: continue
header = ['School', 'Student Name', 'Instrument', 'Student Email', 'Student Phone', 'Student Address', 'Student City',
'Student Zip', 'Contact Name', 'Contact Title', 'Contact Phone', 'Contact Email', 'Is Contact Alumni', 'Date']
school_name = row[0].strip().strip('"').strip("'")
student_name = row[1].strip().strip('"').strip("'")
instrument = row[2].strip().strip('"').strip("'")
student_email = row[3].strip().strip('"').strip("'")
student_phone = row[4].strip().strip('"').strip("'")
student_address = row[5].strip().strip('"').strip("'")
student_city = row[6].strip().strip('"').strip("'")
student_zip = row[7].strip().strip('"').strip("'")
contact_name = row[8].strip().strip('"').strip("'")
contact_title = row[9].strip().strip('"').strip("'")
contact_phone = row[10].strip().strip('"').strip("'")
contact_email = row[11].strip().strip('"').strip("'")
is_contact_alumni = row[12].strip().upper() == 'TRUE'
date = row[13].strip().strip('"').strip("'")
user_id = self.portal_membership.getAuthenticatedMember().id
student = get_type_or_create(self, 'Student', get_folder(self, 'StudentFolder', 'Students'), 'title', student_name)
contact = get_type_or_create(self, 'Contact', get_folder(self, 'ContactFolder', 'Contacts'), 'title', contact_name)
faculty = get_type_or_create(self, 'FacultyMember', get_folder(self, 'FacultyMemberFolder', 'FacultyMembers'), 'title', user_id)
school = get_type_or_create(self, 'School', get_folder(self, 'SchoolFolder', 'Schools'), 'title', school_name)
visit = create_object_in_directory(self, get_folder(self,'VisitFolder', 'Visits'), 'Visit')
set_reference(student, visit)
set_reference(contact, visit)
set_reference(faculty, visit)
set_reference(school, visit)
school.edit(title = school_name)
student.edit(title=student_name, instrument=instrument, email=student_email, phone=student_phone, address=student_address, city=student_city, zip=student_zip)
contact.edit(title=contact_name, type=contact_title, phone=contact_phone, email=contact_email, isAlumni=is_contact_alumni)
faculty.edit(title=user_id)
visit_title = "%s-%s-%s-%s" % (school_name, student_name, contact_name, user_id)
visit.edit(title=visit_title, dateOfVisit = date, schools = school, contacts = contact, students = student, facultymembers = faculty)
|
create_object_in_directory
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.