file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
hakari.rs
|
// Copyright (c) The cargo-guppy Contributors
// SPDX-License-Identifier: MIT OR Apache-2.0
use crate::{
toml_out::{write_toml, TomlOptions},
CargoTomlError, HakariCargoToml, TomlOutError,
};
use guppy::{
debug_ignore::DebugIgnore,
graph::{
cargo::{BuildPlatform, CargoOptions, CargoResolverVersion, CargoSet, InitialsPlatform},
feature::{FeatureId, FeatureSet, StandardFeatures},
DependencyDirection, PackageGraph, PackageMetadata,
},
PackageId, Platform,
};
use rayon::prelude::*;
use std::{
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
fmt,
};
/// Configures and constructs [`Hakari`](Hakari) instances.
///
/// This struct provides a number of options that determine how `Hakari` instances are generated.
#[derive(Clone, Debug)]
pub struct HakariBuilder<'g, 'a> {
graph: DebugIgnore<&'g PackageGraph>,
hakari_package: Option<PackageMetadata<'g>>,
platforms: Vec<Platform<'a>>,
version: CargoResolverVersion,
verify_mode: bool,
omitted_packages: HashSet<&'g PackageId>,
unify_target_host: UnifyTargetHost,
unify_all: bool,
}
impl<'g, 'a> HakariBuilder<'g, 'a> {
/// Creates a new `HakariBuilder` instance from a `PackageGraph`.
///
/// The Hakari package itself is usually present in the workspace. If so, specify its
/// package ID, otherwise pass in `None`. If specified, this package is marked as special: see
/// the documentation for [`set_verify_mode`](Self::set_verify_mode) for more.
///
/// Returns an error if a Hakari package ID is specified but it isn't known to the graph, or
/// isn't in the workspace.
pub fn new(
graph: &'g PackageGraph,
hakari_id: Option<&PackageId>,
) -> Result<Self, guppy::Error> {
let hakari_package = hakari_id
.map(|package_id| {
let package = graph.metadata(package_id)?;
if !package.in_workspace() {
return Err(guppy::Error::UnknownWorkspaceName(
package.name().to_string(),
));
}
Ok(package)
})
.transpose()?;
Ok(Self {
graph: DebugIgnore(graph),
hakari_package,
platforms: vec![],
version: CargoResolverVersion::V1,
verify_mode: false,
omitted_packages: HashSet::new(),
unify_target_host: UnifyTargetHost::default(),
unify_all: false,
})
}
/// Returns the `PackageGraph` used to construct this `Hakari` instance.
pub fn graph(&self) -> &'g PackageGraph {
*self.graph
}
/// Returns the Hakari package, or `None` if it wasn't passed into [`new`](Self::new).
pub fn hakari_package(&self) -> Option<&PackageMetadata<'g>> {
self.hakari_package.as_ref()
}
/// Reads the existing TOML file for the Hakari package from disk, returning a
/// `HakariCargoToml`.
///
/// This can be used with [`Hakari::to_toml_string`](Hakari::to_toml_string) to manage the
/// contents of the Hakari package's TOML file on disk.
///
/// Returns an error if there was an issue reading the TOML file from disk, or `None` if
/// this builder was created without a Hakari package.
pub fn read_toml(&self) -> Option<Result<HakariCargoToml, CargoTomlError>> {
let hakari_package = self.hakari_package()?;
let workspace_path = hakari_package
.source()
.workspace_path()
.expect("hakari_package is in workspace");
Some(HakariCargoToml::new_relative(
self.graph.workspace().root(),
workspace_path,
))
}
/// Sets a list of platforms for `hakari` to use.
///
/// By default, `hakari` unifies features across all platforms. This may not always be desired,
/// so it is possible to set a list of platforms. If the features for a particular dependency
/// only need to be unified on some platforms, `hakari` will output platform-specific
/// instructions.
///
/// Call `set_platforms` with an empty list to reset to default behavior.
pub fn set_platforms(
&mut self,
platforms: impl IntoIterator<Item = Platform<'a>>,
) -> &mut Self {
self.platforms = platforms.into_iter().collect();
self
}
/// Returns the platforms set through `set_platforms`, or an empty list if no platforms are
/// set.
pub fn platforms(&self) -> &[Platform<'a>] {
&self.platforms
}
/// Sets the Cargo resolver version.
///
/// By default, `HakariBuilder` uses [version 1](CargoResolverVersion::V1) of the Cargo
/// resolver. For more about Cargo resolvers, see the documentation for
/// [`CargoResolverVersion`](CargoResolverVersion).
pub fn set_resolver_version(&mut self, version: CargoResolverVersion) -> &mut Self {
self.version = version;
self
}
/// Returns the current Cargo resolver version.
pub fn resolver_version(&self) -> CargoResolverVersion {
self.version
}
/// Adds packages to not consider while performing unification.
///
/// Users may wish to not consider certain packages while figuring out the unified feature set.
/// Setting this option prevents those packages from being considered.
///
/// Practically, this means that:
/// * If a workspace package is specified, Cargo build simulations for it will not be run.
/// * If a third-party package is specified, it will not be present in the output, nor will
/// any features enabled by it that aren't enabled any other way.
///
/// Returns an error if any package IDs specified aren't known to the graph.
pub fn add_omitted_packages<'b>(
&mut self,
omitted_packages: impl IntoIterator<Item = &'b PackageId>,
) -> Result<&mut Self, guppy::Error> {
let omitted_packages: Vec<&'g PackageId> = omitted_packages
.into_iter()
.map(|package_id| Ok(self.graph.metadata(package_id)?.id()))
.collect::<Result<_, _>>()?;
self.omitted_packages.extend(omitted_packages);
Ok(self)
}
/// Returns the currently omitted packages.
///
/// If `verify_mode` is currently false (the default), also returns the Hakari package if
/// specified. This is because the Hakari package is treated as omitted by the algorithm.
pub fn omitted_packages<'b>(&'b self) -> impl Iterator<Item = &'g PackageId> + 'b {
let hakari_omitted = self.make_hakari_omitted();
hakari_omitted.iter()
}
/// Returns true if a package ID is currently omitted from the set.
///
/// If `verify_mode` is currently false (the default), also returns true for the Hakari package
/// if specified. This is because the Hakari package is treated as omitted by the algorithm.
///
/// Returns an error if this package ID isn't known to the underlying graph.
pub fn omits_package(&self, package_id: &PackageId) -> Result<bool, guppy::Error> {
self.graph.metadata(package_id)?;
let hakari_omitted = self.make_hakari_omitted();
Ok(hakari_omitted.is_omitted(package_id))
}
/// If set to true, runs Hakari in verify mode.
///
/// By default, Hakari runs in generate mode: the goal of this mode is to update an existing
/// Hakari package's TOML. In this mode, the Hakari package is always omitted from
/// consideration and added to the omitted packages.
///
/// In verify mode, the goal is to ensure that Cargo builds actually produce a unique set of
/// features. In this mode, instead of being omitted, the Hakari package is always *included*
/// in feature resolution (default features), through the `features_only` argument to
/// [`CargoSet::new`](CargoSet::new). If, in the result, [`output_map`](Hakari::output_map)
/// is empty, then features were unified.
///
/// Setting this to true has no effect if the Hakari package is not specified at construction
/// time.
pub fn set_verify_mode(&mut self, verify_mode: bool) -> &mut Self {
self.verify_mode = verify_mode;
self
}
/// Returns the current value of `verify_mode`.
pub fn verify_mode(&self) -> bool {
self.verify_mode
}
/// Whether to unify feature sets across target and host platforms.
///
/// By default, `hakari` does not perform any unification across the target and host platforms.
/// This means that if a dependency is a target (regular) dependency with one set of features,
/// and a host (build) dependency with a different set of features, the two are treated
/// separately.
///
/// For more information about this option, see the documentation for
/// [`UnifyTargetHost`](UnifyTargetHost).
pub fn set_unify_target_host(&mut self, unify_target_host: UnifyTargetHost) -> &mut Self {
self.unify_target_host = unify_target_host;
self
}
/// Returns the current value of `unify_target_host`.
pub fn unify_target_host(&self) -> UnifyTargetHost {
self.unify_target_host
}
/// Whether to unify feature sets for all dependencies.
///
/// By default, Hakari only produces output for dependencies that are built with more
/// than one feature set. If set to true, Hakari will produce outputs for all dependencies,
/// including those that don't need to be unified.
///
/// This is rarely needed in production, and is most useful for testing and debugging scenarios.
pub fn set_unify_all(&mut self, unify_all: bool) -> &mut Self {
self.unify_all = unify_all;
self
}
/// Returns the current value of `unify_all`.
pub fn unify_all(&self) -> bool {
self.unify_all
}
/// Computes the `Hakari` for this builder.
pub fn compute(self) -> Hakari<'g, 'a> {
Hakari::build(self)
}
// ---
// Helper methods
// ---
#[cfg(feature = "summaries")]
pub(crate) fn omitted_packages_only<'b>(&'b self) -> impl Iterator<Item = &'g PackageId> + 'b {
self.omitted_packages.iter().copied()
}
fn make_hakari_omitted<'b>(&'b self) -> HakariOmitted<'g, 'b> {
let hakari_package = if self.verify_mode {
None
} else {
self.hakari_package.map(|package| package.id())
};
HakariOmitted {
omitted: &self.omitted_packages,
hakari_package,
}
}
fn make_features_only<'b>(&'b self) -> FeatureSet<'g> {
if self.verify_mode {
match &self.hakari_package {
Some(package) => package.to_package_set(),
None => self.graph.resolve_none(),
}
.to_feature_set(StandardFeatures::Default)
} else {
self.graph.feature_graph().resolve_none()
}
}
}
impl<'g, 'a> PartialEq for HakariBuilder<'g, 'a> {
fn eq(&self, other: &Self) -> bool {
std::ptr::eq(self.graph.0, other.graph.0)
&& self.hakari_package().map(|package| package.id())
== other.hakari_package.map(|package| package.id())
&& self.platforms == other.platforms
&& self.version == other.version
&& self.verify_mode == other.verify_mode
&& self.omitted_packages == other.omitted_packages
&& self.unify_target_host == other.unify_target_host
&& self.unify_all == other.unify_all
}
}
impl<'g, 'a> Eq for HakariBuilder<'g, 'a> {}
#[cfg(feature = "summaries")]
mod summaries {
use super::*;
use crate::summaries::HakariBuilderSummary;
impl<'g> HakariBuilder<'g, 'static> {
/// Constructs a `HakariBuilder` from a `PackageGraph` and a serialized summary.
///
/// Requires the `summaries` feature to be enabled.
///
/// Returns an error if the summary references a package that's not present, or if there was
/// some other issue while creating a `HakariBuilder` from the summary.
pub fn from_summary(
graph: &'g PackageGraph,
summary: &HakariBuilderSummary,
|
) -> Result<Self, guppy::Error> {
let hakari_package = summary
.hakari_package
.as_ref()
.map(|name| graph.workspace().member_by_name(name))
.transpose()?;
let platforms = summary
.platforms
.iter()
.map(|platform| {
platform.to_platform().map_err(|err| {
guppy::Error::TargetSpecError("deserializing Hakari platforms".into(), err)
})
})
.collect::<Result<Vec<_>, _>>()?;
let omitted_packages = summary
.omitted_packages
.iter()
.map(|summary_id| Ok(graph.metadata_by_summary_id(&summary_id)?.id()))
.collect::<Result<HashSet<_>, _>>()?;
Ok(Self {
graph: DebugIgnore(graph),
hakari_package,
version: summary.version,
verify_mode: summary.verify_mode,
unify_target_host: summary.unify_target_host,
unify_all: summary.unify_all,
platforms,
omitted_packages,
})
}
}
}
/// Whether to unify feature sets for a given dependency across target and host platforms.
///
/// Call `HakariBuilder::set_unify_target_host` to configure this option.
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
#[cfg_attr(feature = "proptest1", derive(proptest_derive::Arbitrary))]
#[cfg_attr(feature = "summaries", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "summaries", serde(rename_all = "kebab-case"))]
#[non_exhaustive]
pub enum UnifyTargetHost {
/// Perform no unification across the target and host feature sets.
///
/// This is the default behavior.
None,
/// Perform unification across target and host feature sets, but only if a dependency is built
/// on both the target and the host.
///
/// This is useful if cross-compilations are uncommon and one wishes to avoid the same package
/// being built two different ways: once for the target and once for the host.
UnifyOnBoth,
/// Perform unification across target and host feature sets, and also replicate all target-only
/// lines to the host.
///
/// This is most useful if every package in the workspace depends on the Hakari package, and
/// some of those packages are built on the host (e.g. proc macros or build dependencies).
ReplicateTargetAsHost,
}
/// The default for `UnifyTargetHost`: perform no unification.
impl Default for UnifyTargetHost {
fn default() -> Self {
UnifyTargetHost::None
}
}
/// A key representing a platform and host/target. Returned by `Hakari`.
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct OutputKey {
/// The index of the build platform for this key, or `None` if the computation was done in a
/// platform-independent manner.
pub platform_idx: Option<usize>,
/// The build platform: target or host.
pub build_platform: BuildPlatform,
}
/// The result of a Hakari computation.
///
/// This contains all the data required to generate a workspace package.
///
/// Produced by [`HakariBuilder::compute`](HakariBuilder::compute).
#[derive(Clone, Debug)]
#[non_exhaustive]
pub struct Hakari<'g, 'a> {
builder: HakariBuilder<'g, 'a>,
/// The map built by Hakari of dependencies that need to be unified.
///
/// This map is used to construct the TOML output. Public access is provided in case some
/// post-processing needs to be done.
pub output_map: OutputMap<'g>,
/// The complete map of dependency build results built by Hakari.
///
/// This map is not used to generate the TOML output.
pub computed_map: ComputedMap<'g>,
}
impl<'g, 'a> Hakari<'g, 'a> {
/// Returns the `HakariBuilder` used to create this instance.
pub fn builder(&self) -> &HakariBuilder<'g, 'a> {
&self.builder
}
/// Reads the existing TOML file for the Hakari package from disk, returning a
/// `HakariCargoToml`.
///
/// This can be used with [`to_toml_string`](Self::to_toml_string) to manage the contents of
/// the given TOML file on disk.
///
/// Returns an error if there was an issue reading the TOML file from disk, or `None` if
/// the builder's [`hakari_package`](HakariBuilder::hakari_package) is `None`.
pub fn read_toml(&self) -> Option<Result<HakariCargoToml, CargoTomlError>> {
self.builder.read_toml()
}
/// Writes `[dependencies]` and other `Cargo.toml` lines to the given `fmt::Write` instance.
///
/// `&mut String` and `fmt::Formatter` both implement `fmt::Write`.
pub fn write_toml(
&self,
options: &TomlOptions,
out: impl fmt::Write,
) -> Result<(), TomlOutError> {
write_toml(&self.builder, &self.output_map, options, out)
}
/// A convenience method around `write_toml` that returns a new string with `Cargo.toml` lines.
///
/// The returned string is guaranteed to be valid TOML, and can be provided to
/// a [`HakariCargoToml`](crate::HakariCargoToml) obtained from [`read_toml`](Self::read_toml).
pub fn to_toml_string(&self, options: &TomlOptions) -> Result<String, TomlOutError> {
let mut out = String::new();
self.write_toml(options, &mut out)?;
Ok(out)
}
// ---
// Helper methods
// ---
fn build(builder: HakariBuilder<'g, 'a>) -> Self {
let graph = *builder.graph;
let computed_map_build = ComputedMapBuild::new(&builder);
// Collect all the dependencies that need to be unified, by platform and build type.
let mut map_build: OutputMapBuild<'g> = OutputMapBuild::new(graph);
map_build.insert_all(
computed_map_build.iter(),
builder.unify_all,
builder.unify_target_host,
);
if !builder.unify_all {
// Adding packages might cause different feature sets for some dependencies. Simulate
// further builds with the given target and host features, and use that to add in any
// extra features that need to be considered.
loop {
let mut add_extra = HashMap::new();
for (output_key, features) in map_build.iter_feature_sets() {
let initials_platform = match output_key.build_platform {
BuildPlatform::Target => InitialsPlatform::Standard,
BuildPlatform::Host => InitialsPlatform::Host,
};
let mut cargo_opts = CargoOptions::new();
// Third-party dependencies are built without including dev.
cargo_opts
.set_include_dev(false)
.set_initials_platform(initials_platform)
.set_platform(
output_key
.platform_idx
.map(|platform_idx| &builder.platforms[platform_idx]),
)
.set_version(builder.version)
.add_omitted_packages(computed_map_build.hakari_omitted.iter());
let cargo_set = features
.into_cargo_set(&cargo_opts)
.expect("into_cargo_set processed successfully");
// Check the features for the cargo set to see if any further dependencies were
// built with a different result and weren't included in the hakari map
// originally.
for &(build_platform, feature_set) in cargo_set.all_features().iter() {
for feature_list in
feature_set.packages_with_features(DependencyDirection::Forward)
{
let dep = feature_list.package();
let dep_id = dep.id();
let v = computed_map_build
.get(output_key.platform_idx, dep_id)
.expect("full value should be present");
let new_key = OutputKey {
platform_idx: output_key.platform_idx,
build_platform,
};
if map_build.is_inserted(new_key, dep_id) {
continue;
}
// Figure out what *would* be inserted for this key. Does it match?
let mut any_inserted = false;
let mut to_insert = BTreeSet::new();
v.describe().insert(
true,
builder.unify_target_host,
|insert_platform, inner_map| {
if insert_platform == build_platform {
any_inserted = true;
to_insert.extend(
inner_map.keys().flat_map(|f| f.iter().copied()),
);
}
},
);
if any_inserted
&& feature_list.features()
!= to_insert.iter().copied().collect::<Vec<_>>()
{
// The feature list added by this dependency is non-unique.
add_extra.insert((output_key.platform_idx, dep_id), v);
}
}
}
}
if add_extra.is_empty() {
break;
}
map_build.insert_all(
add_extra
.iter()
.map(|(&(platform_idx, dep_id), &v)| (platform_idx, dep_id, v)),
// Force insert by setting unify_all to true.
true,
builder.unify_target_host,
);
}
}
let computed_map = computed_map_build.computed_map;
Self {
builder,
output_map: map_build.output_map,
computed_map,
}
}
}
/// The map used by Hakari to generate output TOML.
///
/// This is a two-level `BTreeMap`, where:
/// * the top-level keys are [`OutputKey`](OutputKey) instances.
/// * the inner map is keyed by dependency [`PackageId`](PackageId) instances, and the values are
/// the corresponding [`PackageMetadata`](PackageMetadata) for this dependency, and the set of
/// features enabled for this package.
///
/// This is an alias for the type of [`Hakari::output_map`](Hakari::output_map).
pub type OutputMap<'g> =
BTreeMap<OutputKey, BTreeMap<&'g PackageId, (PackageMetadata<'g>, BTreeSet<&'g str>)>>;
/// The map of all build results computed by Hakari.
///
/// The keys are the platform index and the dependency's package ID, and the values are
/// [`ComputedValue`](ComputedValue) instances that represent the different feature sets this
/// dependency is built with on both the host and target platforms.
///
/// The values that are most interesting are the ones where maps have two elements or more: they indicate dependencies with features that need to be unified.
///
/// This is an alias for the type of [`Hakari::computed_map`](Hakari::computed_map).
pub type ComputedMap<'g> = BTreeMap<(Option<usize>, &'g PackageId), ComputedValue<'g>>;
/// The values of a [`ComputedMap`](ComputedMap).
///
/// This represents a pair of `ComputedInnerMap` instances: one for the target platform and one for
/// the host. For more about the values, see the documentation for
/// [`ComputedInnerMap`](ComputedInnerMap).
#[derive(Clone, Debug, Default)]
pub struct ComputedValue<'g> {
/// The feature sets built on the target platform.
pub target_inner: ComputedInnerMap<'g>,
/// The feature sets built on the host platform.
pub host_inner: ComputedInnerMap<'g>,
}
/// A target map or a host map in a [`ComputedValue`](ComputedValue).
///
/// * The keys are sets of feature names (or empty for no features).
/// * The values are the workspace packages and selected features that cause the key in
/// `ComputedMap` to be built with the given feature set. They are not defined to be in any
/// particular order.
pub type ComputedInnerMap<'g> =
BTreeMap<BTreeSet<&'g str>, Vec<(PackageMetadata<'g>, StandardFeatures)>>;
#[derive(Debug)]
struct HakariOmitted<'g, 'b> {
omitted: &'b HashSet<&'g PackageId>,
hakari_package: Option<&'g PackageId>,
}
impl<'g, 'b> HakariOmitted<'g, 'b> {
fn iter(&self) -> impl Iterator<Item = &'g PackageId> + 'b {
self.omitted.iter().copied().chain(self.hakari_package)
}
fn is_omitted(&self, package_id: &PackageId) -> bool {
self.hakari_package == Some(package_id) || self.omitted.contains(package_id)
}
}
/// Intermediate build state used by Hakari.
#[derive(Debug)]
struct ComputedMapBuild<'g, 'b> {
hakari_omitted: HakariOmitted<'g, 'b>,
computed_map: ComputedMap<'g>,
}
impl<'g, 'b> ComputedMapBuild<'g, 'b> {
fn new(builder: &'b HakariBuilder<'g, '_>) -> Self {
let platforms_features: Vec<_> = if builder.platforms.is_empty() {
StandardFeatures::VALUES
.iter()
.map(|&features| (None, None, features))
.collect()
} else {
StandardFeatures::VALUES
.iter()
.flat_map(|&features| {
builder
.platforms
.iter()
.enumerate()
.map(move |(idx, platform)| (Some(idx), Some(platform), features))
})
.collect()
};
let workspace = builder.graph.workspace();
let hakari_omitted = builder.make_hakari_omitted();
let features_only = builder.make_features_only();
let hakari_omitted_ref = &hakari_omitted;
let features_only_ref = &features_only;
let computed_map: ComputedMap<'g> = platforms_features
.into_par_iter()
// The cargo_set computation in the inner iterator is the most expensive part of the
// process, so use flat_map instead of flat_map_iter.
.flat_map(|(platform_idx, platform, feature_filter)| {
let mut cargo_options = CargoOptions::new();
cargo_options
.set_include_dev(true)
.set_version(builder.version)
.set_platform(platform)
.add_omitted_packages(hakari_omitted.iter());
workspace.par_iter().map(move |workspace_package| {
if hakari_omitted_ref.is_omitted(workspace_package.id()) {
// Skip this package since it was omitted.
return BTreeMap::new();
}
let initials = workspace_package
.to_package_set()
.to_feature_set(feature_filter);
let cargo_set =
CargoSet::new(initials, features_only_ref.clone(), &cargo_options)
.expect("cargo resolution should succeed");
let all_features = cargo_set.all_features();
let values = all_features.iter().flat_map(|&(build_platform, features)| {
features
.packages_with_features(DependencyDirection::Forward)
.filter_map(move |feature_list| {
let dep = feature_list.package();
if dep.in_workspace() {
// Only looking at third-party packages for hakari.
return None;
}
let features: BTreeSet<&'g str> =
feature_list.features().iter().copied().collect();
Some((
platform_idx,
build_platform,
dep.id(),
features,
workspace_package,
feature_filter,
))
})
});
let mut map = ComputedMap::new();
for (
platform_idx,
build_platform,
package_id,
features,
package,
feature_filter,
) in values
{
// Accumulate the features and package for each key.
map.entry((platform_idx, package_id)).or_default().insert(
build_platform,
features,
package,
feature_filter,
);
}
map
})
})
.reduce(ComputedMap::new, |mut acc, map| {
// Accumulate across all threads.
for (k, v) in map {
acc.entry(k).or_default().merge(v);
}
acc
});
Self {
hakari_omitted,
computed_map,
}
}
fn get(
&self,
platform_idx: Option<usize>,
package_id: &'g PackageId,
) -> Option<&ComputedValue<'g>> {
self.computed_map.get(&(platform_idx, package_id))
}
fn iter<'a>(
&'a self,
) -> impl Iterator<Item = (Option<usize>, &'g PackageId, &'a ComputedValue<'g>)> + 'a {
self.computed_map
.iter()
.map(move |(&(platform_idx, package_id), v)| (platform_idx, package_id, v))
}
}
impl<'g> ComputedValue<'g> {
/// Returns both the inner maps along with the build platforms they represent.
pub fn inner_maps(&self) -> [(BuildPlatform, &ComputedInnerMap<'g>); 2] {
[
(BuildPlatform::Target, &self.target_inner),
(BuildPlatform::Host, &self.host_inner),
]
}
/// Returns a mutable reference to the inner map corresponding to the given build platform.
pub fn get_inner_mut(&mut self, build_platform: BuildPlatform) -> &mut ComputedInnerMap<'g> {
match build_platform {
BuildPlatform::Target => &mut self.target_inner,
BuildPlatform::Host => &mut self.host_inner,
}
}
/// Adds all the instances in `other` to `self`.
fn merge(&mut self, other: ComputedValue<'g>) {
for (features, details) in other.target_inner {
self.target_inner
.entry(features)
.or_default()
.extend(details);
}
for (features, details) in other.host_inner {
self.host_inner.entry(features).or_default().extend(details);
}
}
fn insert(
&mut self,
build_platform: BuildPlatform,
features: BTreeSet<&'g str>,
package: PackageMetadata<'g>,
feature_filter: StandardFeatures,
) {
self.get_inner_mut(build_platform)
.entry(features)
.or_default()
.push((package, feature_filter));
}
fn describe<'a>(&'a self) -> ValueDescribe<'g, 'a> {
match (self.target_inner.len(), self.host_inner.len()) {
(0, 0) => ValueDescribe::None,
(0, 1) => ValueDescribe::SingleHost(&self.host_inner),
(1, 0) => ValueDescribe::SingleTarget(&self.target_inner),
(1, 1) => {
let target_features = self.target_inner.keys().next().expect("1 element");
let host_features = self.host_inner.keys().next().expect("1 element");
if target_features == host_features {
ValueDescribe::SingleMatchingBoth {
target_inner: &self.target_inner,
host_inner: &self.host_inner,
}
} else {
ValueDescribe::SingleNonMatchingBoth {
target_inner: &self.target_inner,
host_inner: &self.host_inner,
}
}
}
(_m, 0) => ValueDescribe::MultiTarget(&self.target_inner),
(_m, 1) => ValueDescribe::MultiTargetSingleHost {
target_inner: &self.target_inner,
host_inner: &self.host_inner,
},
(0, _n) => ValueDescribe::MultiHost(&self.host_inner),
(1, _n) => ValueDescribe::MultiHostSingleTarget {
target_inner: &self.target_inner,
host_inner: &self.host_inner,
},
(_m, _n) => ValueDescribe::MultiBoth {
target_inner: &self.target_inner,
host_inner: &self.host_inner,
},
}
}
}
#[derive(Copy, Clone, Debug)]
enum ValueDescribe<'g, 'a> {
None,
SingleTarget(&'a ComputedInnerMap<'g>),
SingleHost(&'a ComputedInnerMap<'g>),
MultiTarget(&'a ComputedInnerMap<'g>),
MultiHost(&'a ComputedInnerMap<'g>),
SingleMatchingBoth {
target_inner: &'a ComputedInnerMap<'g>,
host_inner: &'a ComputedInnerMap<'g>,
},
SingleNonMatchingBoth {
target_inner: &'a ComputedInnerMap<'g>,
host_inner: &'a ComputedInnerMap<'g>,
},
MultiTargetSingleHost {
target_inner: &'a ComputedInnerMap<'g>,
host_inner: &'a ComputedInnerMap<'g>,
},
MultiHostSingleTarget {
target_inner: &'a ComputedInnerMap<'g>,
host_inner: &'a ComputedInnerMap<'g>,
},
MultiBoth {
target_inner: &'a ComputedInnerMap<'g>,
host_inner: &'a ComputedInnerMap<'g>,
},
}
impl<'g, 'a> ValueDescribe<'g, 'a> {
fn insert(
self,
unify_all: bool,
unify_target_host: UnifyTargetHost,
mut insert_cb: impl FnMut(BuildPlatform, &'a ComputedInnerMap<'g>),
) {
use BuildPlatform::*;
match self {
ValueDescribe::None => {
// Empty, ignore. (This should probably never happen anyway.)
}
ValueDescribe::SingleTarget(target_inner) => {
// Just one way to unify these.
if unify_all {
insert_cb(Target, target_inner);
if unify_target_host == UnifyTargetHost::ReplicateTargetAsHost {
insert_cb(Host, target_inner);
}
}
}
ValueDescribe::SingleHost(host_inner) => {
// Just one way to unify other.
if unify_all {
insert_cb(Host, host_inner);
}
}
ValueDescribe::MultiTarget(target_inner) => {
// Unify features for target.
insert_cb(Target, target_inner);
if unify_target_host == UnifyTargetHost::ReplicateTargetAsHost {
insert_cb(Host, target_inner);
}
}
ValueDescribe::MultiHost(host_inner) => {
// Unify features for host.
insert_cb(Host, host_inner);
}
ValueDescribe::SingleMatchingBoth {
target_inner,
host_inner,
} => {
// Just one way to unify across both.
if unify_all {
insert_cb(Target, target_inner);
insert_cb(Host, host_inner);
}
}
ValueDescribe::SingleNonMatchingBoth {
target_inner,
host_inner,
} => {
// Unify features for both across both.
insert_cb(Target, target_inner);
insert_cb(Host, host_inner);
if unify_target_host != UnifyTargetHost::None {
insert_cb(Target, host_inner);
insert_cb(Host, target_inner);
}
}
ValueDescribe::MultiTargetSingleHost {
target_inner,
host_inner,
} => {
// Unify features for both across both.
insert_cb(Target, target_inner);
insert_cb(Host, host_inner);
if unify_target_host != UnifyTargetHost::None {
insert_cb(Target, host_inner);
insert_cb(Host, target_inner);
}
}
ValueDescribe::MultiHostSingleTarget {
target_inner,
host_inner,
} => {
// Unify features for both across both.
insert_cb(Target, target_inner);
insert_cb(Host, host_inner);
if unify_target_host != UnifyTargetHost::None {
insert_cb(Target, host_inner);
insert_cb(Host, target_inner);
}
}
ValueDescribe::MultiBoth {
target_inner,
host_inner,
} => {
// Unify features for both across both.
insert_cb(Target, target_inner);
insert_cb(Host, host_inner);
if unify_target_host != UnifyTargetHost::None {
insert_cb(Target, host_inner);
insert_cb(Host, target_inner);
}
}
}
}
}
#[derive(Debug)]
struct OutputMapBuild<'g> {
graph: &'g PackageGraph,
output_map: OutputMap<'g>,
}
impl<'g> OutputMapBuild<'g> {
fn new(graph: &'g PackageGraph) -> Self {
Self {
graph,
output_map: OutputMap::new(),
}
}
fn is_inserted(&self, output_key: OutputKey, package_id: &'g PackageId) -> bool {
match self.output_map.get(&output_key) {
Some(inner_map) => inner_map.contains_key(package_id),
None => false,
}
}
fn insert_all<'a>(
&mut self,
values: impl IntoIterator<Item = (Option<usize>, &'g PackageId, &'a ComputedValue<'g>)>,
unify_all: bool,
unify_target_host: UnifyTargetHost,
) where
'g: 'a,
{
for (platform_idx, dep_id, v) in values {
let describe = v.describe();
describe.insert(unify_all, unify_target_host, |build_platform, inner| {
self.insert_inner(platform_idx, build_platform, dep_id, inner);
});
}
}
fn insert_inner(
&mut self,
platform_idx: Option<usize>,
build_platform: BuildPlatform,
package_id: &'g PackageId,
inner: &ComputedInnerMap<'g>,
) {
let output_key = OutputKey {
platform_idx,
build_platform,
};
self.insert(
output_key,
package_id,
inner.keys().flat_map(|f| f.iter().copied()),
)
}
fn insert(
&mut self,
output_key: OutputKey,
package_id: &'g PackageId,
features: impl IntoIterator<Item = &'g str>,
) {
let map = self.output_map.entry(output_key).or_default();
let graph = self.graph;
let (_, inner) = map.entry(package_id).or_insert_with(|| {
(
graph.metadata(package_id).expect("valid package ID"),
BTreeSet::new(),
)
});
inner.extend(features);
}
fn iter_feature_sets<'a>(&'a self) -> impl Iterator<Item = (OutputKey, FeatureSet<'g>)> + 'a {
self.output_map.iter().map(move |(&output_key, deps)| {
let feature_ids = deps.iter().flat_map(|(&package_id, (_, features))| {
features
.iter()
.map(move |&feature| FeatureId::new(package_id, feature))
});
(
output_key,
self.graph
.feature_graph()
.resolve_ids(feature_ids)
.expect("specified feature IDs are valid"),
)
})
}
}
| |
test_timedelta.py
|
from datetime import timedelta
import numpy as np
import pytest
import pandas as pd
from pandas import (
|
Index,
Int64Index,
Series,
Timedelta,
TimedeltaIndex,
array,
date_range,
timedelta_range,
)
import pandas._testing as tm
from ..datetimelike import DatetimeLike
randn = np.random.randn
class TestTimedeltaIndex(DatetimeLike):
_holder = TimedeltaIndex
@pytest.fixture
def indices(self):
return tm.makeTimedeltaIndex(10)
def create_index(self) -> TimedeltaIndex:
index = pd.to_timedelta(range(5), unit="d")._with_freq("infer")
assert index.freq == "D"
return index + pd.offsets.Hour(1)
def test_numeric_compat(self):
# Dummy method to override super's version; this test is now done
# in test_arithmetic.py
pass
def test_shift(self):
pass # this is handled in test_arithmetic.py
def test_pickle_compat_construction(self):
pass
def test_isin(self):
index = tm.makeTimedeltaIndex(4)
result = index.isin(index)
assert result.all()
result = index.isin(list(index))
assert result.all()
tm.assert_almost_equal(
index.isin([index[2], 5]), np.array([False, False, True, False])
)
def test_factorize(self):
idx1 = TimedeltaIndex(["1 day", "1 day", "2 day", "2 day", "3 day", "3 day"])
exp_arr = np.array([0, 0, 1, 1, 2, 2], dtype=np.intp)
exp_idx = TimedeltaIndex(["1 day", "2 day", "3 day"])
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
arr, idx = idx1.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# freq must be preserved
idx3 = timedelta_range("1 day", periods=4, freq="s")
exp_arr = np.array([0, 1, 2, 3], dtype=np.intp)
arr, idx = idx3.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
def test_sort_values(self):
idx = TimedeltaIndex(["4d", "1d", "2d"])
ordered = idx.sort_values()
assert ordered.is_monotonic
ordered = idx.sort_values(ascending=False)
assert ordered[::-1].is_monotonic
ordered, dexer = idx.sort_values(return_indexer=True)
assert ordered.is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0]), check_dtype=False)
ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
assert ordered[::-1].is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1]), check_dtype=False)
@pytest.mark.parametrize("klass", [list, np.array, array, Series])
def test_searchsorted_different_argument_classes(self, klass):
idx = TimedeltaIndex(["1 day", "2 days", "3 days"])
result = idx.searchsorted(klass(idx))
expected = np.arange(len(idx), dtype=result.dtype)
tm.assert_numpy_array_equal(result, expected)
result = idx._data.searchsorted(klass(idx))
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize(
"arg",
[[1, 2], ["a", "b"], [pd.Timestamp("2020-01-01", tz="Europe/London")] * 2],
)
def test_searchsorted_invalid_argument_dtype(self, arg):
idx = TimedeltaIndex(["1 day", "2 days", "3 days"])
msg = "searchsorted requires compatible dtype"
with pytest.raises(TypeError, match=msg):
idx.searchsorted(arg)
def test_argmin_argmax(self):
idx = TimedeltaIndex(["1 day 00:00:05", "1 day 00:00:01", "1 day 00:00:02"])
assert idx.argmin() == 1
assert idx.argmax() == 0
def test_misc_coverage(self):
rng = timedelta_range("1 day", periods=5)
result = rng.groupby(rng.days)
assert isinstance(list(result.values())[0][0], Timedelta)
idx = TimedeltaIndex(["3d", "1d", "2d"])
assert not idx.equals(list(idx))
non_td = Index(list("abc"))
assert not idx.equals(list(non_td))
def test_map(self):
# test_map_dictlike generally tests
rng = timedelta_range("1 day", periods=10)
f = lambda x: x.days
result = rng.map(f)
exp = Int64Index([f(x) for x in rng])
tm.assert_index_equal(result, exp)
def test_pass_TimedeltaIndex_to_index(self):
rng = timedelta_range("1 days", "10 days")
idx = Index(rng, dtype=object)
expected = Index(rng.to_pytimedelta(), dtype=object)
tm.assert_numpy_array_equal(idx.values, expected.values)
def test_append_numpy_bug_1681(self):
td = timedelta_range("1 days", "10 days", freq="2D")
a = DataFrame()
c = DataFrame({"A": "foo", "B": td}, index=td)
str(c)
result = a.append(c)
assert (result["B"] == td).all()
def test_fields(self):
rng = timedelta_range("1 days, 10:11:12.100123456", periods=2, freq="s")
tm.assert_index_equal(rng.days, Index([1, 1], dtype="int64"))
tm.assert_index_equal(
rng.seconds,
Index([10 * 3600 + 11 * 60 + 12, 10 * 3600 + 11 * 60 + 13], dtype="int64"),
)
tm.assert_index_equal(
rng.microseconds, Index([100 * 1000 + 123, 100 * 1000 + 123], dtype="int64")
)
tm.assert_index_equal(rng.nanoseconds, Index([456, 456], dtype="int64"))
msg = "'TimedeltaIndex' object has no attribute '{}'"
with pytest.raises(AttributeError, match=msg.format("hours")):
rng.hours
with pytest.raises(AttributeError, match=msg.format("minutes")):
rng.minutes
with pytest.raises(AttributeError, match=msg.format("milliseconds")):
rng.milliseconds
# with nat
s = Series(rng)
s[1] = np.nan
tm.assert_series_equal(s.dt.days, Series([1, np.nan], index=[0, 1]))
tm.assert_series_equal(
s.dt.seconds, Series([10 * 3600 + 11 * 60 + 12, np.nan], index=[0, 1])
)
# preserve name (GH15589)
rng.name = "name"
assert rng.days.name == "name"
def test_freq_conversion(self):
# doc example
# series
td = Series(date_range("20130101", periods=4)) - Series(
date_range("20121201", periods=4)
)
td[2] += timedelta(minutes=5, seconds=3)
td[3] = np.nan
result = td / np.timedelta64(1, "D")
expected = Series([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Series([31, 31, 31, np.nan])
tm.assert_series_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Series([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_series_equal(result, expected)
# tdi
td = TimedeltaIndex(td)
result = td / np.timedelta64(1, "D")
expected = Index([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Index([31, 31, 31, np.nan])
tm.assert_index_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Index([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_index_equal(result, expected)
|
DataFrame,
|
transaction.go
|
// This source file is part of the EdgeDB open source project.
//
// Copyright 2020-present EdgeDB Inc. and the EdgeDB authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package edgedb
import (
"context"
"fmt"
"github.com/edgedb/edgedb-go/internal/cardinality"
"github.com/edgedb/edgedb-go/internal/format"
)
type transactionState int
const (
newTx transactionState = iota
startedTx
committedTx
rolledBackTx
failedTx
)
// Tx is a transaction. Use RetryingTx() or RawTx() to get a transaction.
type Tx struct {
conn *baseConn
state transactionState
options TxOptions
}
func (t *Tx) execute(
ctx context.Context,
cmd string,
sucessState transactionState,
) error {
err := t.conn.ScriptFlow(ctx, sfQuery{cmd: cmd})
switch err {
case nil:
t.state = sucessState
default:
t.state = failedTx
}
return err
}
// assertNotDone returns an error if the transaction is in a done state.
func (t *Tx) assertNotDone(opName string) error {
switch t.state {
case committedTx:
return &interfaceError{msg: fmt.Sprintf(
"cannot %v; the transaction is already committed", opName,
)}
case rolledBackTx:
return &interfaceError{msg: fmt.Sprintf(
"cannot %v; the transaction is already rolled back", opName,
)}
case failedTx:
return &interfaceError{msg: fmt.Sprintf(
"cannot %v; the transaction is in error state", opName,
)}
default:
return nil
}
}
// assertStarted returns an error if the transaction is not in Started state.
func (t *Tx) assertStarted(opName string) error {
switch t.state {
case startedTx:
return nil
case newTx:
return &interfaceError{msg: fmt.Sprintf(
"cannot %v; the transaction is not yet started", opName,
)}
default:
return t.assertNotDone(opName)
}
}
func (t *Tx) start(ctx context.Context) error {
if e := t.assertNotDone("start"); e != nil {
return e
}
if t.state == startedTx {
return &interfaceError{
msg: "cannot start; the transaction is already started",
}
}
query := t.options.startTxQuery()
return t.execute(ctx, query, startedTx)
}
func (t *Tx) commit(ctx context.Context) error {
if e := t.assertStarted("commit"); e != nil {
return e
}
return t.execute(ctx, "COMMIT;", committedTx)
}
func (t *Tx) rollback(ctx context.Context) error {
if e := t.assertStarted("rollback"); e != nil {
return e
}
return t.execute(ctx, "ROLLBACK;", rolledBackTx)
}
// Execute an EdgeQL command (or commands).
func (t *Tx) Execute(ctx context.Context, cmd string) error {
if e := t.assertStarted("Execute"); e != nil {
return e
}
return t.conn.ScriptFlow(ctx, sfQuery{cmd: cmd})
}
// Query runs a query and returns the results.
func (t *Tx) Query(
ctx context.Context,
cmd string,
out interface{},
args ...interface{},
) error {
if e := t.assertStarted("Query"); e != nil {
return e
}
q, err := newQuery(cmd, format.Binary, cardinality.Many, args, nil, out)
if err != nil {
return err
}
return t.conn.GranularFlow(ctx, q)
}
// QueryOne runs a singleton-returning query and returns its element.
// If the query executes successfully but doesn't return a result
// a NoDataError is returned.
func (t *Tx) QueryOne(
ctx context.Context,
cmd string,
out interface{},
args ...interface{},
) error {
if e := t.assertStarted("QueryOne"); e != nil {
return e
}
q, err := newQuery(cmd, format.Binary, cardinality.One, args, nil, out)
if err != nil {
return err
}
return t.conn.GranularFlow(ctx, q)
}
// QueryJSON runs a query and return the results as JSON.
func (t *Tx) QueryJSON(
ctx context.Context,
cmd string,
out *[]byte,
args ...interface{},
) error {
if e := t.assertStarted("QueryJSON"); e != nil {
return e
}
q, err := newQuery(cmd, format.JSON, cardinality.Many, args, nil, out)
if err != nil {
return err
}
return t.conn.GranularFlow(ctx, q)
|
// If the query executes successfully but doesn't have a result
// a NoDataError is returned.
func (t *Tx) QueryOneJSON(
ctx context.Context,
cmd string,
out *[]byte,
args ...interface{},
) error {
if e := t.assertStarted("QueryJSON"); e != nil {
return e
}
q, err := newQuery(cmd, format.JSON, cardinality.One, args, nil, out)
if err != nil {
return err
}
return t.conn.GranularFlow(ctx, q)
}
|
}
// QueryOneJSON runs a singleton-returning query.
|
48.py
|
"""
PASSENGERS
"""
numPassengers = 26645
passenger_arriving = (
(9, 10, 5, 5, 3, 2, 2, 3, 3, 1, 1, 0, 0, 6, 9, 0, 8, 12, 3, 4, 1, 0, 2, 2, 2, 0), # 0
(5, 10, 9, 11, 6, 2, 0, 5, 1, 1, 1, 0, 0, 11, 5, 5, 6, 6, 1, 2, 2, 1, 4, 0, 0, 0), # 1
(7, 9, 3, 3, 3, 3, 3, 5, 4, 4, 2, 0, 0, 9, 6, 5, 7, 9, 1, 5, 4, 2, 2, 2, 2, 0), # 2
(5, 10, 11, 9, 12, 3, 6, 6, 4, 1, 0, 0, 0, 6, 10, 2, 8, 8, 8, 2, 2, 3, 2, 2, 0, 0), # 3
(11, 9, 6, 8, 7, 3, 4, 5, 4, 1, 0, 0, 0, 8, 12, 3, 5, 10, 3, 0, 4, 1, 1, 0, 3, 0), # 4
(11, 9, 9, 13, 3, 3, 7, 7, 4, 2, 0, 2, 0, 7, 13, 7, 8, 9, 5, 5, 1, 2, 5, 1, 1, 0), # 5
(12, 13, 8, 8, 8, 4, 1, 4, 3, 4, 3, 2, 0, 9, 8, 7, 9, 6, 2, 7, 3, 6, 3, 0, 1, 0), # 6
(12, 8, 8, 11, 13, 3, 3, 1, 4, 1, 1, 0, 0, 12, 7, 11, 4, 9, 4, 3, 2, 3, 2, 1, 1, 0), # 7
(14, 11, 17, 10, 8, 3, 3, 2, 3, 1, 3, 0, 0, 11, 7, 9, 6, 11, 2, 7, 4, 1, 3, 1, 0, 0), # 8
(11, 10, 7, 9, 8, 8, 7, 5, 5, 0, 3, 3, 0, 17, 12, 9, 6, 10, 9, 4, 2, 3, 2, 4, 1, 0), # 9
(10, 12, 11, 10, 7, 3, 6, 3, 7, 3, 2, 2, 0, 18, 13, 12, 9, 12, 6, 2, 3, 6, 2, 3, 2, 0), # 10
(15, 11, 10, 11, 12, 7, 4, 3, 3, 1, 3, 2, 0, 10, 5, 13, 8, 12, 6, 6, 3, 1, 1, 4, 1, 0), # 11
(11, 16, 10, 11, 9, 2, 7, 4, 3, 3, 1, 2, 0, 17, 8, 9, 5, 8, 5, 5, 0, 6, 4, 1, 1, 0), # 12
(11, 17, 10, 10, 6, 7, 4, 2, 6, 1, 3, 1, 0, 14, 12, 4, 3, 5, 6, 4, 5, 3, 7, 3, 1, 0), # 13
(12, 21, 12, 12, 11, 7, 4, 4, 4, 3, 5, 0, 0, 10, 12, 6, 7, 12, 6, 7, 1, 4, 4, 0, 0, 0), # 14
(6, 12, 9, 21, 12, 3, 3, 4, 6, 2, 1, 2, 0, 9, 19, 8, 3, 6, 7, 7, 6, 8, 0, 0, 2, 0), # 15
(7, 12, 10, 13, 12, 7, 5, 4, 4, 3, 2, 3, 0, 12, 8, 4, 6, 14, 10, 4, 2, 4, 4, 1, 1, 0), # 16
(15, 15, 18, 15, 7, 9, 4, 3, 5, 6, 3, 1, 0, 12, 10, 7, 11, 10, 6, 2, 5, 7, 3, 1, 0, 0), # 17
(14, 14, 16, 17, 15, 4, 6, 4, 4, 1, 2, 1, 0, 16, 17, 7, 4, 7, 7, 9, 5, 6, 4, 2, 2, 0), # 18
(23, 16, 12, 13, 6, 6, 6, 3, 7, 0, 1, 1, 0, 17, 18, 11, 11, 15, 13, 6, 3, 5, 6, 4, 2, 0), # 19
(12, 12, 13, 13, 10, 7, 6, 4, 6, 2, 2, 1, 0, 10, 17, 13, 7, 11, 7, 5, 5, 6, 5, 4, 1, 0), # 20
(16, 18, 12, 13, 8, 6, 2, 6, 11, 2, 1, 1, 0, 7, 12, 10, 10, 16, 4, 4, 7, 4, 3, 1, 2, 0), # 21
(14, 13, 8, 11, 7, 7, 2, 3, 9, 4, 2, 0, 0, 18, 9, 6, 12, 12, 7, 3, 3, 3, 8, 0, 3, 0), # 22
(16, 9, 10, 8, 14, 9, 6, 2, 9, 3, 2, 1, 0, 16, 21, 9, 4, 5, 7, 6, 6, 1, 4, 1, 1, 0), # 23
(11, 13, 11, 10, 6, 4, 10, 6, 6, 1, 5, 1, 0, 18, 17, 16, 3, 12, 13, 8, 2, 4, 1, 4, 1, 0), # 24
(11, 16, 17, 7, 12, 5, 8, 5, 6, 2, 0, 2, 0, 10, 14, 10, 7, 20, 5, 12, 4, 3, 3, 3, 2, 0), # 25
(18, 15, 18, 8, 12, 4, 14, 2, 7, 1, 1, 1, 0, 9, 10, 10, 9, 16, 3, 4, 2, 4, 4, 1, 1, 0), # 26
(9, 9, 8, 9, 5, 7, 11, 5, 8, 2, 3, 1, 0, 10, 13, 9, 8, 12, 13, 3, 1, 3, 4, 1, 1, 0), # 27
(18, 14, 17, 16, 17, 2, 7, 7, 7, 7, 2, 2, 0, 20, 13, 12, 8, 7, 1, 4, 2, 6, 1, 2, 2, 0), # 28
(16, 16, 9, 15, 6, 4, 4, 7, 5, 3, 8, 1, 0, 12, 14, 11, 9, 11, 7, 2, 7, 2, 7, 6, 0, 0), # 29
(13, 12, 17, 14, 12, 7, 9, 4, 2, 1, 3, 0, 0, 22, 14, 9, 9, 20, 6, 9, 3, 11, 3, 0, 2, 0), # 30
(17, 9, 10, 22, 14, 6, 4, 2, 3, 5, 3, 0, 0, 19, 13, 9, 7, 15, 3, 2, 3, 5, 2, 1, 1, 0), # 31
(12, 14, 16, 12, 11, 11, 5, 5, 6, 1, 2, 1, 0, 15, 12, 13, 7, 8, 13, 4, 5, 4, 7, 1, 2, 0), # 32
(11, 16, 15, 17, 3, 7, 1, 6, 6, 1, 4, 1, 0, 18, 10, 9, 8, 14, 2, 3, 3, 5, 8, 1, 1, 0), # 33
(8, 11, 12, 15, 18, 9, 4, 6, 5, 2, 4, 0, 0, 13, 13, 5, 4, 11, 7, 6, 5, 2, 2, 1, 0, 0), # 34
(23, 16, 11, 5, 13, 3, 4, 4, 4, 3, 3, 1, 0, 13, 12, 8, 6, 9, 7, 4, 5, 6, 6, 6, 1, 0), # 35
(13, 12, 17, 16, 14, 3, 4, 4, 5, 3, 1, 0, 0, 11, 5, 6, 11, 6, 9, 6, 3, 6, 5, 1, 1, 0), # 36
(15, 9, 13, 12, 8, 5, 9, 8, 8, 1, 1, 1, 0, 22, 18, 11, 7, 14, 11, 8, 2, 5, 7, 3, 0, 0), # 37
(15, 14, 16, 13, 8, 4, 4, 4, 4, 6, 1, 1, 0, 13, 8, 16, 3, 4, 7, 7, 3, 7, 6, 2, 1, 0), # 38
(16, 17, 8, 14, 9, 4, 4, 3, 6, 0, 4, 0, 0, 17, 15, 11, 7, 13, 6, 4, 3, 5, 6, 0, 0, 0), # 39
(11, 13, 11, 7, 9, 3, 1, 6, 8, 3, 3, 0, 0, 15, 7, 6, 12, 11, 6, 5, 7, 5, 6, 3, 1, 0), # 40
(15, 11, 11, 7, 10, 5, 6, 3, 8, 5, 1, 1, 0, 9, 9, 13, 7, 9, 12, 6, 3, 3, 3, 3, 1, 0), # 41
(21, 12, 14, 12, 7, 0, 4, 5, 4, 0, 0, 2, 0, 22, 14, 7, 4, 14, 13, 6, 5, 7, 6, 1, 0, 0), # 42
(17, 16, 8, 12, 13, 1, 7, 4, 6, 1, 2, 0, 0, 15, 5, 9, 8, 13, 4, 9, 4, 1, 3, 2, 1, 0), # 43
(11, 17, 16, 10, 7, 5, 7, 4, 4, 3, 1, 4, 0, 18, 14, 8, 8, 15, 7, 5, 7, 4, 4, 0, 3, 0), # 44
(12, 14, 12, 14, 10, 4, 4, 4, 3, 2, 1, 2, 0, 13, 13, 13, 8, 6, 5, 5, 2, 4, 2, 4, 1, 0), # 45
(18, 21, 10, 16, 12, 4, 4, 6, 6, 3, 1, 0, 0, 8, 5, 7, 7, 13, 8, 5, 4, 10, 2, 0, 3, 0), # 46
(7, 10, 11, 16, 9, 8, 5, 3, 8, 0, 3, 1, 0, 20, 17, 8, 3, 7, 12, 6, 4, 7, 4, 2, 3, 0), # 47
(19, 12, 8, 5, 7, 4, 3, 3, 5, 3, 1, 1, 0, 9, 7, 12, 11, 19, 9, 8, 4, 8, 2, 3, 1, 0), # 48
(9, 7, 14, 20, 17, 2, 7, 3, 4, 3, 4, 3, 0, 13, 11, 10, 9, 13, 7, 4, 3, 3, 6, 1, 1, 0), # 49
(15, 21, 13, 12, 7, 6, 7, 4, 4, 1, 1, 2, 0, 18, 12, 11, 8, 13, 10, 7, 2, 4, 6, 3, 2, 0), # 50
(11, 9, 13, 15, 9, 2, 6, 2, 4, 7, 1, 2, 0, 21, 18, 9, 9, 14, 5, 5, 1, 5, 3, 3, 3, 0), # 51
(15, 15, 11, 10, 8, 4, 3, 5, 6, 1, 1, 1, 0, 7, 7, 12, 11, 14, 5, 5, 2, 4, 0, 1, 3, 0), # 52
(11, 11, 8, 13, 9, 4, 5, 5, 2, 3, 3, 1, 0, 13, 15, 8, 4, 16, 5, 6, 6, 5, 3, 2, 1, 0), # 53
(17, 18, 6, 15, 9, 4, 5, 6, 10, 4, 2, 2, 0, 21, 17, 14, 8, 16, 6, 4, 6, 6, 3, 1, 1, 0), # 54
(10, 12, 18, 10, 11, 7, 1, 7, 7, 3, 1, 0, 0, 14, 7, 15, 7, 7, 7, 5, 3, 1, 10, 0, 0, 0), # 55
(6, 12, 13, 14, 15, 2, 4, 4, 4, 1, 3, 1, 0, 21, 16, 11, 6, 15, 4, 7, 7, 5, 4, 4, 1, 0), # 56
(6, 16, 19, 17, 4, 4, 7, 6, 8, 2, 0, 0, 0, 19, 14, 6, 2, 13, 10, 6, 4, 4, 0, 2, 1, 0), # 57
(15, 13, 15, 19, 8, 3, 6, 6, 5, 0, 2, 2, 0, 14, 9, 8, 6, 14, 3, 9, 6, 7, 4, 4, 1, 0), # 58
(13, 4, 10, 14, 7, 4, 7, 3, 7, 6, 0, 2, 0, 10, 12, 9, 10, 11, 6, 3, 5, 7, 4, 2, 0, 0), # 59
(14, 24, 6, 17, 14, 6, 4, 1, 1, 2, 1, 3, 0, 14, 10, 12, 12, 19, 5, 9, 1, 6, 6, 1, 0, 0), # 60
(13, 20, 9, 10, 11, 7, 5, 9, 9, 1, 1, 2, 0, 16, 12, 11, 7, 16, 13, 3, 6, 6, 4, 0, 1, 0), # 61
(17, 7, 12, 12, 8, 5, 5, 4, 6, 3, 1, 0, 0, 13, 8, 10, 10, 11, 4, 4, 5, 3, 6, 4, 3, 0), # 62
(8, 14, 19, 12, 9, 5, 5, 5, 2, 0, 7, 0, 0, 12, 13, 3, 7, 10, 5, 1, 4, 3, 1, 1, 0, 0), # 63
(16, 10, 11, 8, 12, 2, 7, 8, 5, 1, 4, 0, 0, 14, 16, 8, 11, 14, 7, 5, 4, 8, 4, 1, 1, 0), # 64
(7, 13, 15, 14, 9, 2, 4, 4, 3, 1, 2, 0, 0, 16, 12, 19, 4, 13, 6, 5, 1, 11, 4, 1, 1, 0), # 65
(18, 14, 11, 11, 11, 2, 0, 5, 6, 6, 1, 1, 0, 15, 10, 7, 8, 14, 10, 2, 2, 5, 4, 0, 1, 0), # 66
(15, 17, 9, 12, 15, 4, 7, 7, 8, 0, 2, 3, 0, 17, 10, 6, 11, 9, 4, 12, 2, 1, 8, 3, 0, 0), # 67
(10, 11, 6, 11, 12, 4, 7, 4, 8, 2, 2, 1, 0, 10, 8, 9, 7, 11, 3, 8, 3, 4, 4, 5, 0, 0), # 68
(14, 10, 9, 16, 6, 4, 8, 7, 3, 1, 0, 4, 0, 12, 10, 9, 5, 12, 7, 9, 3, 4, 4, 2, 0, 0), # 69
(19, 11, 8, 18, 13, 6, 7, 4, 4, 0, 5, 1, 0, 16, 8, 10, 6, 11, 6, 6, 2, 6, 4, 1, 2, 0), # 70
(17, 6, 13, 11, 15, 9, 2, 1, 9, 4, 2, 0, 0, 11, 10, 8, 4, 7, 5, 9, 4, 6, 5, 0, 0, 0), # 71
(15, 9, 19, 17, 10, 5, 9, 6, 7, 3, 1, 0, 0, 16, 11, 12, 17, 9, 2, 8, 7, 7, 4, 2, 3, 0), # 72
(17, 11, 11, 15, 9, 6, 3, 7, 9, 2, 2, 0, 0, 16, 9, 5, 5, 16, 6, 7, 5, 2, 1, 2, 0, 0), # 73
(9, 8, 8, 14, 12, 11, 3, 2, 3, 3, 0, 0, 0, 18, 17, 13, 4, 13, 5, 5, 1, 5, 4, 4, 0, 0), # 74
(11, 13, 14, 11, 13, 7, 6, 3, 7, 3, 2, 1, 0, 14, 14, 9, 9, 7, 5, 6, 1, 8, 4, 3, 1, 0), # 75
(19, 12, 16, 10, 11, 5, 8, 3, 3, 6, 1, 0, 0, 7, 12, 10, 8, 19, 9, 7, 2, 5, 6, 4, 0, 0), # 76
(10, 8, 14, 12, 13, 5, 9, 5, 5, 1, 3, 0, 0, 18, 21, 9, 11, 6, 2, 2, 4, 8, 2, 3, 1, 0), # 77
(12, 14, 6, 17, 15, 6, 4, 4, 6, 2, 2, 0, 0, 11, 12, 9, 5, 11, 6, 4, 2, 5, 7, 0, 0, 0), # 78
(12, 8, 10, 13, 10, 6, 4, 6, 6, 3, 2, 0, 0, 16, 7, 7, 8, 5, 4, 5, 6, 9, 2, 1, 0, 0), # 79
(17, 17, 12, 9, 15, 8, 2, 3, 5, 1, 1, 1, 0, 10, 15, 13, 5, 13, 4, 6, 4, 7, 3, 1, 0, 0), # 80
(13, 11, 12, 8, 12, 6, 8, 6, 8, 2, 3, 0, 0, 19, 9, 12, 10, 12, 4, 5, 2, 5, 1, 1, 1, 0), # 81
(13, 13, 7, 15, 11, 7, 6, 6, 5, 2, 0, 7, 0, 18, 14, 7, 12, 7, 3, 3, 8, 3, 6, 3, 2, 0), # 82
(12, 6, 13, 6, 6, 5, 8, 3, 6, 4, 2, 0, 0, 17, 9, 8, 8, 12, 7, 5, 3, 7, 4, 2, 0, 0), # 83
(11, 13, 14, 16, 11, 7, 8, 7, 6, 2, 1, 1, 0, 7, 16, 8, 5, 5, 3, 6, 4, 5, 3, 2, 0, 0), # 84
(12, 14, 21, 14, 14, 7, 6, 3, 8, 4, 1, 1, 0, 9, 15, 10, 2, 13, 3, 6, 4, 6, 3, 3, 0, 0), # 85
(16, 11, 11, 11, 15, 4, 4, 4, 6, 1, 2, 1, 0, 11, 7, 8, 11, 10, 8, 3, 3, 5, 8, 2, 0, 0), # 86
(17, 7, 11, 12, 6, 3, 4, 2, 5, 2, 1, 0, 0, 11, 16, 8, 10, 7, 8, 5, 6, 7, 9, 1, 0, 0), # 87
(11, 17, 10, 9, 10, 6, 6, 2, 3, 4, 5, 0, 0, 21, 12, 9, 10, 13, 1, 2, 5, 6, 5, 2, 1, 0), # 88
(15, 9, 14, 15, 7, 4, 4, 5, 4, 1, 3, 1, 0, 18, 14, 9, 4, 9, 6, 9, 4, 5, 4, 2, 0, 0), # 89
(13, 8, 9, 11, 11, 9, 7, 2, 3, 2, 0, 0, 0, 13, 13, 7, 3, 6, 9, 4, 4, 6, 1, 3, 1, 0), # 90
(18, 13, 7, 14, 9, 4, 4, 0, 8, 2, 2, 0, 0, 14, 10, 11, 5, 7, 4, 10, 3, 2, 2, 5, 2, 0), # 91
(12, 12, 7, 13, 13, 7, 1, 8, 5, 4, 5, 1, 0, 14, 17, 8, 8, 11, 4, 5, 3, 5, 5, 2, 1, 0), # 92
(11, 5, 12, 12, 4, 4, 3, 4, 10, 3, 1, 0, 0, 13, 10, 10, 6, 21, 6, 4, 3, 2, 3, 2, 1, 0), # 93
(14, 13, 12, 13, 13, 2, 4, 7, 3, 2, 2, 1, 0, 12, 13, 4, 7, 15, 6, 5, 1, 6, 5, 0, 0, 0), # 94
(9, 19, 11, 11, 7, 5, 2, 4, 3, 4, 0, 3, 0, 12, 17, 7, 11, 11, 6, 5, 2, 3, 3, 0, 2, 0), # 95
(9, 8, 14, 10, 7, 6, 8, 7, 9, 3, 1, 2, 0, 10, 10, 9, 8, 7, 6, 3, 9, 9, 5, 5, 0, 0), # 96
(13, 10, 8, 14, 10, 4, 2, 10, 5, 2, 2, 3, 0, 11, 3, 8, 8, 12, 7, 5, 1, 8, 1, 2, 1, 0), # 97
(17, 8, 12, 12, 12, 7, 4, 2, 4, 4, 0, 2, 0, 8, 9, 5, 6, 10, 5, 3, 1, 7, 4, 4, 2, 0), # 98
(14, 10, 11, 15, 12, 4, 5, 3, 4, 1, 0, 2, 0, 14, 13, 13, 7, 9, 4, 4, 0, 2, 6, 4, 2, 0), # 99
(9, 9, 11, 10, 11, 5, 2, 5, 8, 1, 0, 5, 0, 12, 8, 12, 8, 12, 2, 7, 3, 10, 4, 4, 1, 0), # 100
(16, 11, 10, 7, 12, 3, 2, 3, 6, 1, 1, 1, 0, 13, 12, 4, 5, 10, 9, 6, 3, 6, 4, 2, 0, 0), # 101
(17, 12, 8, 14, 5, 6, 5, 5, 4, 2, 1, 0, 0, 15, 7, 5, 12, 9, 6, 2, 5, 3, 7, 3, 3, 0), # 102
(14, 12, 8, 12, 8, 5, 4, 5, 7, 2, 1, 0, 0, 20, 14, 10, 8, 6, 4, 4, 2, 8, 3, 0, 1, 0), # 103
(14, 6, 11, 14, 11, 4, 4, 5, 5, 3, 1, 1, 0, 10, 6, 14, 6, 8, 9, 4, 5, 2, 3, 1, 0, 0), # 104
(16, 11, 9, 13, 12, 5, 8, 5, 8, 2, 2, 0, 0, 17, 10, 15, 8, 10, 3, 6, 1, 6, 4, 3, 0, 0), # 105
(8, 12, 13, 10, 6, 5, 5, 2, 8, 2, 1, 1, 0, 19, 14, 7, 4, 12, 4, 3, 4, 5, 3, 1, 1, 0), # 106
(11, 12, 16, 5, 3, 9, 3, 2, 7, 2, 0, 2, 0, 15, 13, 3, 3, 8, 5, 7, 4, 6, 4, 3, 1, 0), # 107
(12, 9, 7, 9, 7, 5, 4, 3, 2, 1, 2, 3, 0, 24, 10, 11, 9, 9, 5, 6, 2, 6, 4, 1, 4, 0), # 108
(10, 15, 14, 10, 7, 7, 9, 5, 8, 2, 1, 2, 0, 17, 9, 6, 5, 9, 9, 3, 5, 9, 1, 3, 0, 0), # 109
(15, 7, 10, 8, 8, 4, 4, 4, 5, 0, 0, 0, 0, 13, 9, 13, 9, 9, 6, 5, 5, 5, 4, 0, 2, 0), # 110
(19, 13, 9, 13, 16, 2, 2, 2, 9, 4, 1, 0, 0, 8, 8, 11, 4, 10, 1, 5, 2, 6, 2, 3, 0, 0), # 111
(15, 18, 10, 14, 4, 2, 3, 2, 9, 0, 1, 0, 0, 12, 10, 6, 11, 7, 2, 2, 3, 10, 3, 2, 0, 0), # 112
(9, 7, 13, 17, 5, 2, 0, 1, 8, 1, 3, 1, 0, 18, 9, 11, 6, 9, 11, 2, 2, 5, 5, 4, 1, 0), # 113
(14, 4, 12, 10, 8, 7, 2, 2, 6, 2, 2, 0, 0, 13, 11, 9, 6, 10, 1, 6, 3, 4, 3, 2, 1, 0), # 114
(15, 7, 9, 13, 10, 3, 7, 1, 4, 0, 2, 2, 0, 11, 11, 15, 4, 12, 4, 3, 3, 4, 5, 4, 3, 0), # 115
(4, 14, 12, 13, 12, 6, 1, 6, 4, 0, 0, 1, 0, 11, 9, 9, 4, 7, 11, 3, 2, 4, 5, 1, 1, 0), # 116
(15, 9, 13, 10, 9, 6, 4, 3, 9, 5, 1, 2, 0, 10, 10, 9, 9, 12, 1, 3, 4, 5, 2, 1, 0, 0), # 117
(10, 10, 13, 17, 10, 6, 3, 3, 4, 1, 2, 2, 0, 12, 15, 10, 9, 5, 4, 3, 3, 6, 4, 0, 1, 0), # 118
(5, 8, 8, 7, 11, 3, 3, 5, 5, 2, 4, 1, 0, 10, 9, 11, 3, 11, 7, 3, 3, 3, 2, 4, 3, 0), # 119
(9, 9, 9, 9, 11, 4, 4, 2, 5, 3, 1, 0, 0, 11, 12, 7, 8, 8, 8, 3, 3, 8, 2, 4, 2, 0), # 120
(10, 15, 17, 15, 15, 5, 4, 4, 10, 3, 3, 0, 0, 17, 9, 6, 7, 4, 4, 2, 3, 4, 5, 4, 0, 0), # 121
(23, 10, 9, 8, 11, 4, 2, 3, 9, 1, 2, 0, 0, 17, 12, 6, 7, 8, 5, 2, 3, 8, 3, 3, 0, 0), # 122
(16, 5, 5, 14, 10, 3, 4, 1, 4, 3, 2, 1, 0, 17, 11, 11, 4, 9, 3, 5, 5, 3, 2, 4, 1, 0), # 123
(10, 19, 10, 15, 5, 6, 5, 2, 5, 2, 0, 0, 0, 7, 11, 6, 6, 10, 6, 2, 4, 7, 3, 0, 1, 0), # 124
(7, 8, 9, 6, 11, 4, 5, 2, 4, 3, 1, 0, 0, 13, 11, 11, 10, 11, 6, 7, 4, 5, 2, 4, 0, 0), # 125
(15, 13, 6, 12, 4, 5, 6, 2, 0, 3, 1, 1, 0, 19, 6, 6, 6, 8, 4, 4, 3, 4, 3, 1, 1, 0), # 126
(7, 8, 9, 11, 12, 4, 4, 2, 6, 2, 0, 0, 0, 8, 7, 4, 3, 9, 5, 2, 2, 6, 1, 5, 1, 0), # 127
(15, 12, 9, 9, 4, 7, 4, 2, 8, 5, 1, 2, 0, 11, 5, 9, 5, 11, 3, 4, 3, 3, 3, 4, 1, 0), # 128
(13, 9, 14, 9, 10, 5, 4, 1, 6, 1, 1, 1, 0, 14, 10, 6, 5, 10, 9, 5, 2, 1, 8, 0, 0, 0), # 129
(8, 12, 10, 11, 11, 5, 2, 6, 5, 1, 2, 1, 0, 8, 5, 5, 6, 9, 3, 7, 1, 2, 2, 3, 0, 0), # 130
(5, 6, 19, 7, 5, 2, 1, 4, 6, 1, 0, 0, 0, 9, 10, 9, 5, 8, 7, 3, 4, 3, 1, 2, 0, 0), # 131
(12, 10, 10, 5, 10, 6, 4, 7, 2, 1, 1, 0, 0, 17, 9, 5, 10, 13, 3, 2, 3, 1, 4, 7, 1, 0), # 132
(6, 10, 12, 11, 5, 4, 4, 4, 3, 1, 2, 3, 0, 9, 11, 6, 4, 14, 4, 6, 4, 10, 5, 3, 1, 0), # 133
(15, 8, 11, 16, 9, 5, 2, 3, 7, 2, 1, 1, 0, 11, 5, 12, 5, 12, 1, 5, 5, 5, 4, 2, 0, 0), # 134
(12, 11, 13, 13, 13, 2, 0, 2, 4, 2, 3, 0, 0, 10, 11, 7, 5, 15, 7, 6, 2, 3, 3, 1, 3, 0), # 135
(17, 7, 13, 15, 7, 6, 5, 2, 5, 1, 0, 1, 0, 10, 7, 8, 4, 14, 4, 4, 4, 5, 1, 2, 0, 0), # 136
(10, 11, 12, 12, 12, 5, 2, 3, 2, 2, 2, 0, 0, 18, 11, 8, 6, 12, 4, 7, 2, 6, 3, 3, 1, 0), # 137
(17, 4, 5, 12, 11, 3, 7, 2, 6, 6, 0, 1, 0, 11, 11, 6, 5, 8, 2, 3, 3, 7, 6, 3, 1, 0), # 138
(16, 12, 12, 8, 8, 6, 6, 3, 4, 1, 3, 1, 0, 13, 8, 8, 7, 9, 8, 2, 4, 4, 4, 5, 1, 0), # 139
(13, 9, 5, 14, 8, 5, 3, 5, 6, 2, 3, 1, 0, 6, 9, 7, 3, 11, 6, 5, 7, 5, 6, 3, 1, 0), # 140
(16, 6, 11, 8, 7, 6, 6, 2, 6, 0, 1, 1, 0, 14, 13, 8, 7, 10, 2, 4, 1, 5, 2, 1, 0, 0), # 141
(7, 5, 9, 11, 8, 5, 2, 6, 5, 2, 0, 2, 0, 12, 5, 4, 2, 10, 6, 5, 3, 3, 4, 0, 3, 0), # 142
(17, 14, 13, 10, 15, 4, 5, 5, 7, 0, 3, 4, 0, 13, 10, 6, 6, 15, 5, 6, 3, 2, 5, 1, 1, 0), # 143
(11, 11, 15, 13, 5, 4, 6, 6, 1, 4, 5, 0, 0, 11, 11, 7, 4, 11, 4, 4, 8, 7, 3, 1, 0, 0), # 144
(6, 13, 11, 4, 7, 6, 2, 7, 7, 1, 1, 1, 0, 9, 7, 13, 5, 8, 11, 6, 8, 5, 5, 2, 3, 0), # 145
(15, 12, 8, 10, 9, 5, 4, 5, 4, 0, 1, 1, 0, 11, 9, 13, 5, 11, 4, 5, 2, 3, 3, 1, 0, 0), # 146
(12, 12, 9, 8, 7, 4, 2, 5, 2, 1, 2, 1, 0, 16, 12, 6, 5, 9, 4, 3, 3, 4, 3, 2, 2, 0), # 147
(10, 9, 15, 12, 8, 7, 6, 6, 4, 2, 1, 2, 0, 14, 8, 9, 6, 8, 4, 4, 3, 2, 4, 2, 0, 0), # 148
(18, 13, 6, 15, 12, 4, 6, 2, 5, 4, 2, 0, 0, 10, 7, 8, 5, 8, 2, 3, 4, 7, 2, 3, 2, 0), # 149
(18, 3, 10, 10, 4, 5, 2, 3, 2, 1, 2, 0, 0, 9, 11, 4, 5, 18, 3, 7, 4, 6, 3, 2, 2, 0), # 150
(9, 5, 4, 3, 7, 5, 3, 3, 5, 0, 3, 0, 0, 10, 12, 8, 6, 10, 5, 1, 1, 1, 3, 1, 1, 0), # 151
(9, 3, 9, 12, 8, 3, 1, 1, 4, 3, 1, 0, 0, 14, 6, 3, 4, 9, 4, 3, 7, 3, 5, 0, 0, 0), # 152
(7, 5, 7, 12, 5, 6, 3, 3, 4, 3, 2, 0, 0, 10, 11, 5, 8, 9, 5, 2, 4, 7, 3, 2, 1, 0), # 153
(7, 10, 8, 8, 8, 3, 3, 0, 2, 2, 1, 1, 0, 9, 11, 10, 7, 14, 4, 4, 2, 3, 2, 1, 2, 0), # 154
(17, 6, 10, 13, 6, 1, 5, 2, 1, 0, 1, 0, 0, 14, 3, 5, 3, 7, 3, 2, 4, 5, 4, 1, 0, 0), # 155
(4, 5, 4, 8, 4, 6, 3, 4, 3, 0, 1, 1, 0, 10, 9, 5, 4, 7, 10, 3, 7, 4, 5, 1, 0, 0), # 156
(3, 7, 6, 5, 14, 6, 3, 1, 2, 2, 1, 2, 0, 13, 8, 6, 5, 11, 2, 4, 3, 3, 5, 4, 0, 0), # 157
(9, 5, 16, 8, 8, 4, 6, 4, 7, 3, 2, 0, 0, 5, 9, 1, 8, 12, 5, 4, 2, 3, 1, 4, 0, 0), # 158
(7, 6, 10, 5, 8, 5, 4, 5, 6, 1, 1, 0, 0, 6, 13, 7, 6, 7, 6, 3, 4, 4, 1, 1, 0, 0), # 159
(13, 5, 12, 5, 6, 4, 1, 5, 2, 2, 2, 0, 0, 11, 7, 6, 5, 7, 4, 1, 5, 5, 1, 1, 0, 0), # 160
(11, 6, 9, 7, 4, 2, 1, 6, 7, 2, 0, 0, 0, 6, 11, 7, 3, 6, 7, 7, 3, 6, 3, 2, 1, 0), # 161
(12, 10, 5, 6, 9, 6, 2, 5, 7, 1, 1, 0, 0, 10, 5, 10, 4, 13, 3, 1, 1, 7, 2, 3, 0, 0), # 162
(9, 6, 12, 8, 4, 3, 2, 3, 7, 2, 3, 0, 0, 12, 6, 7, 1, 5, 2, 3, 6, 3, 4, 2, 0, 0), # 163
(9, 5, 7, 9, 8, 3, 2, 3, 6, 1, 0, 0, 0, 9, 5, 4, 2, 8, 3, 3, 4, 5, 3, 1, 1, 0), # 164
(11, 8, 12, 7, 5, 4, 6, 1, 6, 3, 1, 1, 0, 6, 9, 3, 1, 4, 5, 2, 3, 2, 4, 0, 0, 0), # 165
(5, 3, 13, 9, 4, 2, 1, 4, 5, 1, 1, 0, 0, 12, 5, 5, 5, 9, 6, 2, 1, 2, 4, 1, 0, 0), # 166
(6, 7, 8, 8, 4, 3, 6, 5, 6, 1, 3, 0, 0, 13, 6, 10, 5, 5, 4, 1, 3, 5, 1, 0, 1, 0), # 167
(9, 5, 11, 11, 5, 3, 3, 3, 2, 2, 1, 2, 0, 8, 8, 6, 3, 8, 3, 3, 1, 1, 2, 1, 0, 0), # 168
(5, 5, 8, 10, 3, 2, 5, 3, 4, 0, 2, 0, 0, 13, 6, 4, 2, 6, 2, 2, 3, 0, 2, 3, 0, 0), # 169
(11, 5, 5, 6, 5, 2, 3, 3, 3, 0, 2, 1, 0, 9, 6, 9, 6, 7, 3, 3, 4, 6, 1, 2, 2, 0), # 170
(10, 4, 5, 4, 11, 4, 1, 4, 2, 1, 1, 1, 0, 7, 6, 11, 4, 13, 7, 0, 0, 2, 4, 0, 0, 0), # 171
(9, 2, 4, 6, 3, 5, 1, 1, 2, 3, 2, 0, 0, 4, 5, 4, 3, 8, 2, 2, 3, 1, 1, 3, 0, 0), # 172
(8, 6, 2, 4, 6, 3, 2, 0, 2, 1, 0, 0, 0, 8, 7, 9, 5, 4, 1, 3, 2, 3, 6, 0, 1, 0), # 173
(6, 5, 6, 6, 7, 3, 2, 0, 2, 2, 0, 0, 0, 8, 6, 3, 2, 6, 4, 0, 2, 4, 2, 0, 0, 0), # 174
(8, 3, 5, 7, 5, 1, 2, 0, 2, 2, 3, 1, 0, 6, 3, 1, 5, 5, 1, 0, 3, 2, 2, 2, 2, 0), # 175
(5, 5, 4, 7, 8, 4, 2, 1, 2, 1, 1, 2, 0, 5, 0, 2, 2, 5, 3, 1, 1, 2, 3, 0, 0, 0), # 176
(11, 4, 9, 6, 5, 3, 3, 2, 1, 2, 0, 1, 0, 8, 1, 4, 3, 5, 1, 3, 0, 3, 1, 0, 0, 0), # 177
(5, 4, 4, 3, 6, 2, 1, 3, 5, 1, 0, 2, 0, 9, 1, 1, 2, 4, 0, 2, 0, 5, 2, 2, 1, 0), # 178
(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), # 179
)
station_arriving_intensity = (
(7.029211809720476, 7.735403983570434, 7.29579652145751, 8.700534883408807, 7.776559850653457, 4.394116904852274, 5.804449861523481, 6.514446642171193, 8.52613868703521, 5.541221021731318, 5.887371229439844, 6.857081109628643, 7.117432297609708), # 0
(7.496058012827964, 8.246084971802663, 7.777485227862214, 9.275201954587263, 8.291486472463932, 4.684377017659578, 6.187256517769172, 6.943319212067992, 9.089143456866074, 5.90657296918801, 6.2763345903385845, 7.309703325140097, 7.587708306415797), # 1
(7.9614122125716245, 8.754739239247371, 8.257259199766379, 9.847582786530712, 8.804548163249642, 4.9734791603174235, 6.568545911144986, 7.370475347066188, 9.64990152962857, 6.270479285028765, 6.663752408286839, 7.760525712874277, 8.056110759493567), # 2
(8.423460910405188, 9.259348702711026, 8.733215217047796, 10.415406970544904, 9.313726346402664, 5.260276871619158, 6.946805098307138, 7.79422162049231, 10.206189225289531, 6.631495777796654, 7.0480877765583365, 8.207759958902646, 8.520781928755916), # 3
(8.880390607782374, 9.757895279000085, 9.203450059584252, 10.976404097935598, 9.81700244531509, 5.543623690358135, 7.320521135911843, 8.212864605672882, 10.75578286381579, 6.988178256034751, 7.4278037884268056, 8.64961774929667, 8.979864086115745), # 4
(9.330387806156915, 10.248360884921025, 9.666060507253526, 11.528303760008551, 10.312357883378994, 5.822373155327701, 7.688181080615314, 8.62471087593443, 11.296458765174183, 7.339082528286129, 7.801363537165986, 9.084310770127807, 9.43149950348596), # 5
(9.771639006982534, 10.728727437280302, 10.119143339933412, 12.068835548069513, 10.79777408398646, 6.09537880532121, 8.048271989073768, 9.028067004603484, 11.825993249331543, 7.682764403093862, 8.167230116049597, 9.510050707467531, 9.87383045277945), # 6
(10.202330711712957, 11.196976852884385, 10.56079533750169, 12.595729053424249, 11.271232470529577, 6.36149417913201, 8.39928091794342, 9.421239565006573, 12.342162636254702, 8.017779689001022, 8.523866618351377, 9.925049247387301, 10.304999205909127), # 7
(10.62064942180191, 11.651091048539739, 10.989113279836156, 13.1067138673785, 11.730714466400421, 6.619572815553446, 8.739694923880478, 9.802535130470215, 12.842743245910489, 8.342684194550685, 8.86973613734505, 10.327518075958585, 10.723148034787885), # 8
(11.02478163870312, 12.089051941052832, 11.402193946814586, 13.599519581238038, 12.174201494991074, 6.868468253378878, 9.068001063541168, 10.170260274320949, 13.325511398265744, 8.65603372828592, 9.20330176630435, 10.71566887925284, 11.126419211328628), # 9
(11.412913863870306, 12.508841447230123, 11.798134118314776, 14.071875786308604, 12.599674979693622, 7.107034031401651, 9.382686393581697, 10.522721569885295, 13.7882434132873, 8.956384098749801, 9.523026598503003, 11.087713343341534, 11.512955007444255), # 10
(11.783232598757209, 12.90844148387809, 12.175030574214501, 14.521512073895957, 13.005116343900148, 7.334123688415116, 9.682237970658283, 10.85822559048978, 14.228715610941991, 9.242291114485408, 9.82737372721475, 11.441863154296136, 11.880897695047656), # 11
(12.133924344817538, 13.285833967803178, 12.530980094391557, 14.946158035305858, 13.38850701100273, 7.5485907632126175, 9.965142851427137, 11.17507890946093, 14.644704311196652, 9.512310584035802, 10.114806245713309, 11.776329998188096, 12.22838954605175), # 12
(12.463175603505027, 13.639000815811869, 12.864079458723728, 15.343543261844063, 13.747828404393443, 7.749288794587514, 10.22988809254448, 11.471588100125276, 15.033985834018106, 9.764998315944066, 10.383787247272418, 12.08932556108889, 12.55357283236943), # 13
(12.769172876273403, 13.965923944710624, 13.172425447088806, 15.71139734481631, 14.081061947464386, 7.935071321333148, 10.474960750666526, 11.746059735809345, 15.39433649937319, 9.998910118753269, 10.6327798251658, 12.379061529069986, 12.85458982591359), # 14
(13.050102664576398, 14.264585271305906, 13.45411483936456, 16.047449875528383, 14.386189063607633, 8.104791882242878, 10.698847882449478, 11.99680038983966, 15.723532627228748, 10.212601801006487, 10.860247072667189, 12.64374958820284, 13.129582798597134), # 15
(13.30415146986772, 14.532966712404187, 13.707244415428796, 16.349430445286004, 14.661191176215267, 8.257304016110044, 10.900036544549568, 12.222116635542745, 16.019350537551603, 10.404629171246796, 11.06465208305032, 12.881601424558916, 13.376694022332964), # 16
(13.529505793601107, 14.769050184811926, 13.929910955159293, 16.61506864539496, 14.904049708679375, 8.391461261728, 11.077013793622996, 12.420315046245145, 16.27956655030858, 10.573548038017254, 11.24445794958892, 13.090828724209679, 13.594065769033982), # 17
(13.724352137230287, 14.970817605335585, 14.120211238433834, 16.842094067160993, 15.112746084392025, 8.506117157890104, 11.228266686325993, 12.589702195273366, 16.501956985466535, 10.717914209860952, 11.398127765556712, 13.269643173226603, 13.779840310613086), # 18
(13.88687700220898, 15.136250890781643, 14.27624204513021, 17.02823630188984, 15.285261726745313, 8.600125243389693, 11.352282279314753, 12.728584655953943, 16.68429816299229, 10.83628349532096, 11.52412462422743, 13.416256457681136, 13.932159918983176), # 19
(14.015266889990915, 15.263331957956549, 14.396100155126206, 17.171224940887296, 15.419578059131322, 8.672339057020126, 11.44754762924551, 12.835269001613405, 16.82436640285268, 10.927211702940342, 11.62091161887481, 13.528880263644748, 14.049166866057154), # 20
(14.107708302029813, 15.350042723666784, 14.477882348299607, 17.26878957545908, 15.513676504942126, 8.72161213757475, 11.512549792774463, 12.908061805578273, 16.91993802501453, 10.989254641262178, 11.686951842772585, 13.60572627718891, 14.12900342374791), # 21
(14.162387739779412, 15.394365104718803, 14.5196854045282, 17.31865979691097, 15.565538487569807, 8.746798023846914, 11.54577582655784, 12.945269641175082, 16.968789349444684, 11.02096811882954, 11.720708389194478, 13.645006184385087, 14.16981186396836), # 22
(14.182550708679697, 15.39961303155007, 14.524892455418383, 17.324903137860087, 15.578824878445637, 8.75, 11.549725603163076, 12.949291358024693, 16.974896728395063, 11.024709181527207, 11.724941252026436, 13.649856607224509, 14.175), # 23
(14.197417378247815, 15.396551851851854, 14.524040740740743, 17.324134722222226, 15.586350659060795, 8.75, 11.547555337690634, 12.943700000000002, 16.974078333333335, 11.02241086419753, 11.724474410774413, 13.648720987654322, 14.175), # 24
(14.211970122296213, 15.390517832647463, 14.522359396433473, 17.322614454732513, 15.593710923832306, 8.75, 11.543278463648836, 12.932716049382718, 16.97246141975309, 11.01788637402835, 11.723548759196907, 13.646479195244629, 14.175), # 25
(14.226207826667249, 15.381603155006863, 14.519871467764064, 17.320359619341563, 15.600905415789548, 8.75, 11.53696140563221, 12.916546913580248, 16.97006672839506, 11.011210992226795, 11.722172677391198, 13.643161957018751, 14.175), # 26
(14.240129377203292, 15.3699, 14.5166, 17.3173875, 15.607933877961901, 8.75, 11.528670588235297, 12.895400000000002, 16.966915, 11.00246, 11.720354545454546, 13.638800000000003, 14.175), # 27
(14.253733659746702, 15.355500548696845, 14.51256803840878, 17.313715380658437, 15.614796053378763, 8.75, 11.518472436052612, 12.869482716049385, 16.963026975308644, 10.9917086785551, 11.718102743484225, 13.633424051211708, 14.175), # 28
(14.26701956013985, 15.338496982167355, 14.50779862825789, 17.30936054526749, 15.62149168506951, 8.75, 11.506433373678693, 12.839002469135803, 16.95842339506173, 10.979032309099225, 11.715425651577503, 13.627064837677183, 14.175), # 29
(14.279985964225098, 15.318981481481483, 14.502314814814815, 17.30434027777778, 15.628020516063533, 8.75, 11.492619825708061, 12.804166666666665, 16.953125, 10.964506172839508, 11.71233164983165, 13.619753086419752, 14.175), # 30
(14.292631757844802, 15.297046227709194, 14.496139643347053, 17.29867186213992, 15.634382289390214, 8.75, 11.477098216735257, 12.765182716049384, 16.947152530864198, 10.948205550983083, 11.708829118343933, 13.611519524462738, 14.175), # 31
(14.304955826841338, 15.27278340192044, 14.489296159122084, 17.29237258230453, 15.640576748078935, 8.75, 11.4599349713548, 12.72225802469136, 16.940526728395064, 10.930205724737084, 11.704926437211622, 13.602394878829449, 14.175), # 32
(14.316957057057056, 15.246285185185185, 14.481807407407409, 17.28545972222222, 15.646603635159089, 8.75, 11.441196514161222, 12.675600000000001, 16.933268333333334, 10.910581975308643, 11.700631986531986, 13.59240987654321, 14.175), # 33
(14.328634334334335, 15.217643758573388, 14.473696433470508, 17.27795056584362, 15.652462693660054, 8.75, 11.420949269749054, 12.625416049382716, 16.925398086419758, 10.889409583904893, 11.695954146402293, 13.581595244627344, 14.175), # 34
(14.339986544515531, 15.186951303155007, 14.464986282578877, 17.26986239711934, 15.65815366661122, 8.75, 11.399259662712824, 12.571913580246914, 16.916936728395065, 10.866763831732968, 11.690901296919815, 13.569981710105168, 14.175), # 35
(14.35101257344301, 15.1543, 14.455700000000002, 17.2612125, 15.663676297041972, 8.75, 11.37619411764706, 12.515300000000002, 16.907905, 10.84272, 11.685481818181819, 13.557600000000003, 14.175), # 36
(14.361711306959135, 15.119782030178326, 14.445860631001374, 17.252018158436215, 15.669030327981691, 8.75, 11.351819059146292, 12.455782716049384, 16.89832364197531, 10.817353369913125, 11.679704090285574, 13.544480841335163, 14.175), # 37
(14.372081630906267, 15.083489574759948, 14.43549122085048, 17.242296656378603, 15.674215502459768, 8.75, 11.326200911805053, 12.393569135802473, 16.88821339506173, 10.790739222679472, 11.673576493328346, 13.530654961133976, 14.175), # 38
(14.382122431126781, 15.045514814814815, 14.424614814814818, 17.232065277777778, 15.679231563505585, 8.75, 11.299406100217867, 12.328866666666666, 16.877595000000003, 10.762952839506175, 11.667107407407409, 13.516153086419752, 14.175), # 39
(14.39183259346303, 15.005949931412895, 14.413254458161866, 17.221341306584364, 15.684078254148528, 8.75, 11.271501048979264, 12.261882716049385, 16.866489197530868, 10.734069501600368, 11.660305212620028, 13.501005944215823, 14.175), # 40
(14.40121100375738, 14.964887105624143, 14.401433196159124, 17.210142026748972, 15.688755317417984, 8.75, 11.242552182683774, 12.192824691358027, 16.85491672839506, 10.704164490169182, 11.653178289063476, 13.485244261545498, 14.175), # 41
(14.410256547852201, 14.922418518518521, 14.389174074074077, 17.198484722222226, 15.693262496343333, 8.75, 11.212625925925927, 12.121900000000002, 16.842898333333338, 10.673313086419753, 11.645735016835017, 13.4688987654321, 14.175), # 42
(14.418968111589852, 14.878636351165984, 14.376500137174213, 17.186386676954736, 15.697599533953966, 8.75, 11.181788703300251, 12.049316049382718, 16.83045475308642, 10.641590571559215, 11.637983776031925, 13.452000182898951, 14.175), # 43
(14.427344580812699, 14.83363278463649, 14.363434430727025, 17.173865174897124, 15.701766173279264, 8.75, 11.150106939401276, 11.975280246913583, 16.817606728395063, 10.609072226794698, 11.629932946751465, 13.434579240969367, 14.175), # 44
(14.435384841363105, 14.787500000000001, 14.350000000000001, 17.160937500000003, 15.705762157348616, 8.75, 11.11764705882353, 11.9, 16.804375, 10.575833333333335, 11.62159090909091, 13.416666666666666, 14.175), # 45
(14.443087779083434, 14.740330178326476, 14.336219890260631, 17.147620936213993, 15.709587229191404, 8.75, 11.084475486161544, 11.823682716049385, 16.790780308641974, 10.541949172382258, 11.612966043147525, 13.398293187014175, 14.175), # 46
(14.45045227981605, 14.692215500685872, 14.322117146776408, 17.133932767489714, 15.713241131837016, 8.75, 11.050658646009847, 11.746535802469136, 16.776843395061732, 10.507495025148607, 11.604066729018582, 13.37948952903521, 14.175), # 47
(14.457477229403315, 14.64324814814815, 14.307714814814817, 17.11989027777778, 15.716723608314837, 8.75, 11.016262962962964, 11.668766666666668, 16.762585, 10.472546172839506, 11.594901346801347, 13.360286419753088, 14.175), # 48
(14.464161513687602, 14.593520301783265, 14.29303593964335, 17.10551075102881, 15.720034401654251, 8.75, 10.981354861615428, 11.590582716049383, 16.748025864197533, 10.437177896662096, 11.585478276593093, 13.340714586191131, 14.175), # 49
(14.470504018511264, 14.543124142661183, 14.278103566529495, 17.090811471193415, 15.723173254884642, 8.75, 10.94600076656177, 11.512191358024692, 16.73318672839506, 10.401465477823503, 11.575805898491085, 13.32080475537266, 14.175), # 50
(14.476503629716676, 14.492151851851853, 14.262940740740742, 17.075809722222225, 15.726139911035398, 8.75, 10.910267102396515, 11.433800000000002, 16.718088333333338, 10.365484197530865, 11.565892592592595, 13.30058765432099, 14.175), # 51
(14.482159233146191, 14.440695610425243, 14.247570507544584, 17.060522788065846, 15.728934113135901, 8.75, 10.874220293714194, 11.355616049382716, 16.70275141975309, 10.329309336991313, 11.555746738994888, 13.280094010059445, 14.175), # 52
(14.487469714642183, 14.388847599451307, 14.232015912208508, 17.0449679526749, 15.731555604215542, 8.75, 10.837926765109337, 11.277846913580248, 16.687196728395065, 10.293016177411982, 11.545376717795238, 13.259354549611341, 14.175), # 53
(14.492433960047004, 14.336700000000002, 14.2163, 17.0291625, 15.734004127303704, 8.75, 10.801452941176471, 11.2007, 16.671445000000002, 10.256680000000001, 11.534790909090908, 13.2384, 14.175), # 54
(14.497050855203032, 14.284344993141291, 14.200445816186559, 17.01312371399177, 15.736279425429768, 8.75, 10.764865246510128, 11.124382716049384, 16.655516975308643, 10.220376085962506, 11.523997692979176, 13.217261088248744, 14.175), # 55
(14.501319285952622, 14.231874759945132, 14.184476406035667, 16.996868878600825, 15.738381241623124, 8.75, 10.728230105704835, 11.049102469135804, 16.63943339506173, 10.184179716506632, 11.513005449557303, 13.195968541380887, 14.175), # 56
(14.505238138138138, 14.179381481481483, 14.168414814814819, 16.98041527777778, 15.740309318913155, 8.75, 10.69161394335512, 10.975066666666669, 16.623215000000002, 10.148166172839508, 11.50182255892256, 13.174553086419753, 14.175), # 57
(14.508806297601952, 14.126957338820304, 14.152284087791497, 16.96378019547325, 15.742063400329245, 8.75, 10.655083184055517, 10.902482716049382, 16.606882530864198, 10.112410736168268, 11.490457401172218, 13.153045450388662, 14.175), # 58
(14.51202265018642, 14.07469451303155, 14.136107270233198, 16.946980915637862, 15.743643228900785, 8.75, 10.61870425240055, 10.83155802469136, 16.590456728395065, 10.076988687700048, 11.478918356403542, 13.131476360310929, 14.175), # 59
(14.51488608173391, 14.022685185185187, 14.119907407407407, 16.930034722222224, 15.745048547657152, 8.75, 10.582543572984749, 10.762500000000001, 16.573958333333337, 10.041975308641977, 11.467213804713806, 13.109876543209879, 14.175), # 60
(14.517395478086781, 13.971021536351168, 14.10370754458162, 16.912958899176957, 15.746279099627737, 8.75, 10.546667570402647, 10.695516049382718, 16.557408086419755, 10.00744588020119, 11.455352126200275, 13.088276726108827, 14.175), # 61
(14.519549725087407, 13.919795747599453, 14.087530727023323, 16.89577073045268, 15.74733462784193, 8.75, 10.51114266924877, 10.630813580246915, 16.540826728395064, 9.973475683584821, 11.44334170096022, 13.066707636031095, 14.175), # 62
(14.521347708578144, 13.869100000000001, 14.071400000000002, 16.878487500000002, 15.7482148753291, 8.75, 10.476035294117647, 10.568600000000002, 16.524235, 9.94014, 11.43119090909091, 13.045200000000001, 14.175), # 63
(14.522788314401359, 13.819026474622772, 14.05533840877915, 16.86112649176955, 15.74891958511865, 8.75, 10.44141186960381, 10.509082716049384, 16.50765364197531, 9.907514110653864, 11.41890813068961, 13.023784545038868, 14.175), # 64
(14.523870428399414, 13.769667352537724, 14.03936899862826, 16.843704989711934, 15.749448500239955, 8.75, 10.407338820301785, 10.45246913580247, 16.49110339506173, 9.875673296753543, 11.4065017458536, 13.00249199817101, 14.175), # 65
(14.524592936414676, 13.721114814814818, 14.023514814814817, 16.826240277777778, 15.749801363722403, 8.75, 10.373882570806101, 10.398966666666668, 16.474605000000004, 9.844692839506173, 11.393980134680135, 12.981353086419755, 14.175), # 66
(14.524954724289511, 13.673461042524005, 14.00779890260631, 16.808749639917696, 15.749977918595382, 8.75, 10.341109545711289, 10.348782716049385, 16.458179197530864, 9.814648020118886, 11.381351677266494, 12.960398536808412, 14.175), # 67
(14.524708260273156, 13.626548095048452, 13.99216832990398, 16.7910984366613, 15.749829137416285, 8.74983761621704, 10.308921272761506, 10.301681390032009, 16.44172298811157, 9.785468618306034, 11.368400383956526, 12.939542030659641, 14.174825210048013), # 68
(14.522398389694043, 13.578943727598569, 13.976183796296295, 16.772396920289854, 15.748474945533768, 8.748553909465022, 10.27637545388526, 10.25513827160494, 16.424516975308645, 9.756328946986201, 11.35380797448166, 12.918106562703056, 14.17344039351852), # 69
(14.517840102582454, 13.5304294437807, 13.95977580589849, 16.752521973966722, 15.74579903978052, 8.746025758268557, 10.243324188385918, 10.208733424782809, 16.40646404892547, 9.727087334247829, 11.337408441136512, 12.895991865809934, 14.170705268347055), # 70
(14.511097524900102, 13.481034236028144, 13.942950120027435, 16.731502905260335, 15.74183531025579, 8.742294131992075, 10.209782323354585, 10.162482213077277, 16.387591095107457, 9.697744503079695, 11.319262319097408, 12.873214112097802, 14.166655842764062), # 71
(14.502234782608697, 13.430787096774193, 13.9257125, 16.709369021739132, 15.736617647058825, 8.737400000000001, 10.175764705882354, 10.1164, 16.367925000000003, 9.668301176470589, 11.299430143540672, 12.849789473684211, 14.161328125), # 72
(14.491316001669949, 13.379717018452144, 13.90806870713306, 16.686149630971553, 15.730179940288872, 8.73138433165676, 10.141286183060329, 10.070502149062644, 16.347492649748517, 9.63875807740929, 11.277972449642624, 12.825734122686688, 14.154758123285324), # 73
(14.478405308045566, 13.32785299349529, 13.890024502743485, 16.661874040526033, 15.722556080045187, 8.72428809632678, 10.106361601979613, 10.024804023776863, 16.3263209304984, 9.609115928884586, 11.254949772579598, 12.801064231222776, 14.146981845850483), # 74
(14.463566827697262, 13.275224014336917, 13.871585648148148, 16.636571557971017, 15.713779956427018, 8.716152263374488, 10.0710058097313, 9.979320987654322, 16.30443672839506, 9.579375453885259, 11.23042264752791, 12.775795971410007, 14.138035300925928), # 75
(14.44686468658675, 13.22185907341033, 13.852757904663925, 16.610271490874936, 15.703885459533609, 8.707017802164305, 10.035233653406493, 9.934068404206677, 16.281866929583906, 9.549537375400092, 11.20445160966389, 12.749945515365916, 14.127954496742113), # 76
(14.428363010675731, 13.167787163148816, 13.833547033607681, 16.583003146806227, 15.692906479464213, 8.696925682060662, 9.999059980096293, 9.88906163694559, 16.258638420210335, 9.519602416417872, 11.177097194163862, 12.723529035208049, 14.116775441529496), # 77
(14.408125925925928, 13.113037275985667, 13.813958796296298, 16.554795833333333, 15.680876906318085, 8.685916872427983, 9.962499636891796, 9.844316049382718, 16.23477808641975, 9.489571299927379, 11.148419936204148, 12.696562703053933, 14.10453414351852), # 78
(14.386217558299041, 13.057638404354178, 13.793998954046641, 16.525678858024694, 15.667830630194468, 8.674032342630696, 9.925567470884102, 9.799847005029722, 16.210312814357568, 9.4594447489174, 11.118480370961072, 12.669062691021107, 14.091266610939643), # 79
(14.362702033756786, 13.001619540687642, 13.773673268175584, 16.495681528448742, 15.653801541192612, 8.661313062033226, 9.888278329164315, 9.755669867398264, 16.185269490169183, 9.429223486376719, 11.087339033610965, 12.64104517122711, 14.07700885202332), # 80
(14.337643478260873, 12.945009677419357, 13.752987500000001, 16.464833152173917, 15.638823529411765, 8.6478, 9.85064705882353, 9.711800000000002, 16.159675, 9.398908235294119, 11.055056459330146, 12.612526315789475, 14.061796875), # 81
(14.311106017773009, 12.887837806982612, 13.731947410836765, 16.433163036768654, 15.622930484951183, 8.633534125895444, 9.812688506952853, 9.668252766346594, 16.133556229995428, 9.368499718658382, 11.02169318329494, 12.583522296825743, 14.045666688100141), # 82
(14.283153778254908, 12.8301329218107, 13.710558762002744, 16.400700489801395, 15.606156297910111, 8.618556409083983, 9.774417520643375, 9.625043529949703, 16.10694006630087, 9.337998659458297, 10.987309740681672, 12.554049286453447, 14.028654299554185), # 83
(14.253850885668278, 12.77192401433692, 13.688827314814816, 16.36747481884058, 15.588534858387801, 8.602907818930042, 9.735848946986202, 9.582187654320988, 16.07985339506173, 9.307405780682645, 10.951966666666667, 12.524123456790125, 14.010795717592593), # 84
(14.223261465974833, 12.713240076994557, 13.666758830589849, 16.333515331454645, 15.5701000564835, 8.58662932479805, 9.696997633072435, 9.53970050297211, 16.05232310242341, 9.276721805320209, 10.915724496426252, 12.493760979953313, 13.992126950445819), # 85
(14.191449645136279, 12.654110102216913, 13.644359070644722, 16.298851335212028, 15.550885782296458, 8.569761896052432, 9.65787842599317, 9.497597439414724, 16.024376074531325, 9.245947456359774, 10.878643765136749, 12.462978028060553, 13.97268400634431), # 86
(14.15847954911433, 12.594563082437277, 13.621633796296296, 16.26351213768116, 15.53092592592593, 8.552346502057613, 9.618506172839506, 9.455893827160494, 15.996039197530868, 9.215083456790124, 10.840785007974482, 12.43179077322937, 13.95250289351852), # 87
(14.124415303870702, 12.534628010088941, 13.598588768861456, 16.22752704643049, 15.510254377471155, 8.534424112178023, 9.578895720702548, 9.414605029721079, 15.967339357567447, 9.184130529600042, 10.802208760115779, 12.400215387577312, 13.931619620198905), # 88
(14.089321035367092, 12.474333877605204, 13.575229749657066, 16.19092536902845, 15.488905027031391, 8.516035695778085, 9.539061916673392, 9.37374641060814, 15.938303440786468, 9.153089397778317, 10.762975556736963, 12.36826804322191, 13.910070194615912), # 89
(14.053260869565218, 12.413709677419357, 13.551562500000001, 16.153736413043482, 15.466911764705886, 8.497222222222224, 9.499019607843138, 9.333333333333334, 15.908958333333336, 9.121960784313726, 10.723145933014354, 12.335964912280703, 13.887890625), # 90
(14.016298932426789, 12.352784401964689, 13.527592781207133, 16.11598948604402, 15.444308480593882, 8.478024660874867, 9.458783641302887, 9.293381161408323, 15.879330921353455, 9.090745412195057, 10.682780424124285, 12.303322166871226, 13.865116919581618), # 91
(13.978499349913523, 12.2915870436745, 13.503326354595337, 16.0777138955985, 15.421129064794641, 8.458483981100443, 9.418368864143739, 9.253905258344766, 15.84944809099223, 9.059444004411093, 10.641939565243074, 12.270355979111017, 13.841785086591221), # 92
(13.939926247987117, 12.230146594982081, 13.478768981481483, 16.038938949275366, 15.397407407407409, 8.438641152263374, 9.37779012345679, 9.214920987654322, 15.819336728395063, 9.028057283950616, 10.600683891547051, 12.23708252111761, 13.81793113425926), # 93
(13.900643752609293, 12.168492048320722, 13.453926423182445, 15.999693954643051, 15.37317739853143, 8.418537143728091, 9.337062266333147, 9.176443712848654, 15.789023719707364, 8.996585973802416, 10.559073938212535, 12.203517965008546, 13.793591070816188), # 94
(13.860715989741754, 12.106652396123724, 13.42880444101509, 15.960008219269996, 15.34847292826596, 8.398212924859017, 9.296200139863902, 9.138488797439416, 15.758535951074533, 8.96503079695527, 10.517170240415854, 12.169678482901354, 13.768800904492457), # 95
(13.820207085346219, 12.044656630824377, 13.403408796296299, 15.91991105072464, 15.32332788671024, 8.377709465020576, 9.25521859114016, 9.101071604938273, 15.727900308641976, 8.933392476397968, 10.475033333333334, 12.135580246913582, 13.74359664351852), # 96
(13.779181165384388, 11.98253374485597, 13.377745250342937, 15.879431756575416, 15.297776163963531, 8.357067733577198, 9.21413246725302, 9.064207498856883, 15.6971436785551, 8.901671735119288, 10.432723752141296, 12.101239429162758, 13.718014296124831), # 97
(13.737702355817978, 11.9203127306518, 13.35181956447188, 15.83859964439077, 15.271851650125074, 8.336328699893311, 9.17295661529358, 9.027911842706905, 15.666292946959304, 8.86986929610802, 10.390302032016068, 12.066672201766417, 13.69208987054184), # 98
(13.695834782608697, 11.858022580645162, 13.325637500000003, 15.797444021739132, 15.24558823529412, 8.315533333333335, 9.131705882352943, 8.9922, 15.635375000000002, 8.83798588235294, 10.347828708133973, 12.031894736842107, 13.665859375000002), # 99
(13.653642571718258, 11.795692287269347, 13.29920481824417, 15.755994196188944, 15.21901980956992, 8.294722603261699, 9.090395115522204, 8.957087334247829, 15.60441672382259, 8.806022216842843, 10.305364315671335, 11.996923206507354, 13.639358817729768), # 100
(13.611189849108369, 11.733350842957654, 13.272527280521263, 15.714279475308645, 15.192180263051725, 8.273937479042829, 9.049039161892468, 8.922589208962048, 15.573445004572475, 8.773979022566504, 10.262969389804478, 11.961773782879694, 13.612624206961591), # 101
(13.568540740740744, 11.67102724014337, 13.245610648148148, 15.67232916666667, 15.165103485838781, 8.253218930041154, 9.00765286855483, 8.888720987654322, 15.542486728395062, 8.741857022512711, 10.22070446570973, 11.926462638076675, 13.585691550925928), # 102
(13.525759372577088, 11.60875047125979, 13.218460682441702, 15.630172577831457, 15.137823368030341, 8.232607925621096, 8.966251082600394, 8.855498033836307, 15.511568781435757, 8.709656939670245, 10.178630078563414, 11.891005944215824, 13.558596857853223), # 103
(13.482909870579116, 11.546549528740211, 13.191083144718794, 15.587839016371445, 15.110373799725652, 8.212145435147082, 8.924848651120257, 8.822935711019662, 15.480718049839965, 8.677379497027893, 10.13680676354185, 11.855419873414677, 13.53137613597394), # 104
(13.440056360708535, 11.484453405017922, 13.163483796296298, 15.545357789855073, 15.082788671023966, 8.19187242798354, 8.883460421205521, 8.79104938271605, 15.449961419753087, 8.64502541757444, 10.095295055821373, 11.819720597790775, 13.50406539351852), # 105
(13.39726296892706, 11.42249109252622, 13.135668398491084, 15.50275820585078, 15.055101872024531, 8.171829873494895, 8.842101239947283, 8.759854412437129, 15.41932577732053, 8.612595424298663, 10.054155490578298, 11.783924289461654, 13.476700638717421), # 106
(13.3545938211964, 11.360691583698395, 13.10764271262003, 15.460069571927, 15.027347292826596, 8.152058741045574, 8.800785954436646, 8.72936616369456, 15.388838008687703, 8.580090240189355, 10.013448602988953, 11.748047120544847, 13.449317879801098), # 107
(13.312113043478263, 11.299083870967744, 13.079412500000002, 15.417321195652177, 14.999558823529412, 8.132600000000002, 8.759529411764706, 8.699600000000002, 15.358525000000002, 8.547510588235296, 9.973234928229665, 11.712105263157897, 13.421953125000002), # 108
(13.26988476173436, 11.237696946767558, 13.050983521947876, 15.374542384594738, 14.97177035423223, 8.113494619722603, 8.718346459022568, 8.670571284865114, 15.328413637402836, 8.514857191425268, 9.933575001476758, 11.676114889418335, 13.394642382544584), # 109
(13.227973101926404, 11.176559803531132, 13.022361539780524, 15.331762446323136, 14.944015775034297, 8.094783569577809, 8.677251943301325, 8.642295381801555, 15.29853080704161, 8.482130772748057, 9.894529357906551, 11.640092171443701, 13.367421660665297), # 110
(13.186442190016104, 11.11570143369176, 12.993552314814819, 15.2890106884058, 14.91632897603486, 8.076507818930043, 8.636260711692085, 8.614787654320988, 15.26890339506173, 8.449332055192448, 9.856158532695375, 11.60405328135153, 13.340326967592594), # 111
(13.14535615196517, 11.055150829682729, 12.96456160836763, 15.246316418411165, 14.888743847333174, 8.05870833714373, 8.595387611285942, 8.588063465935072, 15.239558287608595, 8.416461761747223, 9.818523061019553, 11.568014391259355, 13.313394311556928), # 112
(13.104705913184263, 10.995038066300333, 12.935464959552897, 15.203767435488858, 14.861245952243188, 8.04141767690032, 8.554736349119478, 8.562193596292849, 15.21059793576207, 8.383626631257822, 9.781693468614014, 11.5320701111062, 13.286621461180511), # 113
(13.064073257060091, 10.935956056935751, 12.906663945030267, 15.161705189788272, 14.833550696392859, 8.024596451941862, 8.514825491774811, 8.537495763307168, 15.182466649998286, 8.351441235077896, 9.745742071958476, 11.496677040958165, 13.25978557982405), # 114
(13.023338864205595, 10.877926078156266, 12.878175705790246, 15.120118307254492, 14.805570749044042, 8.008200917498272, 8.475683510268187, 8.513963715990194, 15.155174970136306, 8.319955459183308, 9.710616315997932, 11.461852615582393, 13.232809284324528), # 115
(12.982451822532688, 10.820863593808383, 12.849945065977423, 15.078932610372966, 14.777263936937292, 7.992192428201937, 8.43724674453905, 8.491532438058591, 15.128653874918964, 8.289110701829367, 9.676248303780074, 11.427532476482286, 13.205650163658248), # 116
(12.941361219953283, 10.76468406773861, 12.82191684973638, 15.038073921629142, 14.748588086813156, 7.976532338685248, 8.399451534526854, 8.47013691322902, 15.102834343089086, 8.258848361271381, 9.642570138352598, 11.39365226516125, 13.178265806801516), # 117
(12.900016144379297, 10.709302963793455, 12.794035881211714, 14.997468063508467, 14.71950102541218, 7.9611820035805945, 8.362234220171041, 8.449712125218136, 15.07764735338951, 8.229109835764664, 9.609513922763194, 11.36014762312269, 13.150613802730636), # 118
(12.858365683722639, 10.654635745819421, 12.766246984548014, 14.95704085849639, 14.689960579474912, 7.946102777520366, 8.325531141411059, 8.430193057742605, 15.053023884563062, 8.199836523564521, 9.577011760059559, 11.326954191870009, 13.122651740421906), # 119
(12.816358925895228, 10.600597877663022, 12.738494983889867, 14.916718129078353, 14.659924575741897, 7.931256015136952, 8.289278638186355, 8.41151469451908, 15.028894915352582, 8.170969822926269, 9.544995753289383, 11.294007612906617, 13.094337208851638), # 120
(12.773944958808976, 10.547104823170763, 12.710724703381864, 14.876425697739808, 14.629350840953688, 7.9166030710627435, 8.253413050436373, 8.39361201926423, 15.0051914245009, 8.142451132105215, 9.513398005500363, 11.261243527735912, 13.065627796996127), # 121
(12.731072870375797, 10.494072046189146, 12.682880967168597, 14.836089386966199, 14.598197201850828, 7.902105299930128, 8.217870718100565, 8.376420015694709, 14.981844390750846, 8.11422184935667, 9.482150619740192, 11.228597577861303, 13.036481093831679), # 122
(12.687691748507607, 10.441415010564684, 12.65490859939465, 14.795635019242972, 14.56642148517387, 7.887724056371495, 8.182587981118376, 8.359873667527177, 14.958784792845258, 8.086223372935942, 9.451185699056563, 11.19600540478619, 13.0068546883346), # 123
(12.643750681116316, 10.389049180143882, 12.62675242420462, 14.754988417055582, 14.533981517663353, 7.873420695019235, 8.147501179429248, 8.343907958478297, 14.935943609526962, 8.058397101098347, 9.420435346497168, 11.163402650013985, 12.976706169481197), # 124
(12.599198756113843, 10.33689001877325, 12.598357265743093, 14.714075402889465, 14.500835126059833, 7.859156570505739, 8.112546652972636, 8.328457872264728, 14.913251819538791, 8.030684432099187, 9.389831665109703, 11.130724955048088, 12.94599312624776), # 125
(12.553985061412101, 10.284852990299292, 12.56966794815466, 14.672821799230077, 14.466940137103851, 7.844893037463395, 8.077660741687978, 8.31345839260313, 14.890640401623585, 8.00302676419378, 9.359306757941859, 11.097907961391908, 12.91467314761061), # 126
(12.508058684923006, 10.232853558568515, 12.540629295583907, 14.63115342856286, 14.432254377535958, 7.830591450524592, 8.042779785514732, 8.298844503210164, 14.86804033452417, 7.975365495637434, 9.32879272804133, 11.064887310548842, 12.88270382254604), # 127
(12.461368714558466, 10.18080718742743, 12.51118613217543, 14.588996113373266, 14.396735674096707, 7.816213164321722, 8.007840124392336, 8.284551187802489, 14.845382596983379, 7.947642024685458, 9.298221678455814, 11.031598644022305, 12.850042740030352), # 128
(12.413864238230394, 10.128629340722538, 12.481283282073816, 14.546275676146736, 14.360341853526638, 7.801719533487173, 7.972778098260239, 8.270513430096765, 14.822598167744045, 7.919797749593164, 9.267525712233, 10.997977603315691, 12.816647489039854), # 129
(12.365494343850713, 10.076235482300353, 12.450865569423652, 14.502917939368722, 14.3230307425663, 7.7870719126533325, 7.937530047057888, 8.256666213809652, 14.799618025549002, 7.89177406861586, 9.236636932420582, 10.963959829932413, 12.78247565855085), # 130
(12.316208119331334, 10.023541076007378, 12.419877818369534, 14.458848725524668, 14.284760167956243, 7.772231656452593, 7.902032310724733, 8.24294452265781, 14.776373149141081, 7.86351238000886, 9.205487442066255, 10.929480965375875, 12.747484837539638), # 131
(12.265954652584163, 9.970461585690122, 12.388264853056045, 14.413993857100023, 14.245487956437017, 7.757160119517344, 7.8662212292002165, 8.229283340357902, 14.752794517263117, 7.834954082027471, 9.17400934421771, 10.894476651149478, 12.711632614982527), # 132
(12.21468303152113, 9.91691247519509, 12.355971497627777, 14.368279156580234, 14.205171934749162, 7.741818656479974, 7.830033142423786, 8.215617650626585, 14.728813108657938, 7.806040572927006, 9.142134741922645, 10.85888252875663, 12.674876579855821), # 133
(12.162342344054133, 9.862809208368793, 12.322942576229327, 14.321630446450746, 14.163769929633231, 7.726168621972872, 7.79340439033489, 8.201882437180522, 14.704359902068381, 7.776713250962773, 9.109795738228751, 10.822634239700733, 12.637174321135817), # 134
(12.108881678095097, 9.808067249057736, 12.289122913005274, 14.273973549197011, 14.12123976782977, 7.710171370628429, 7.756271312872975, 8.18801268373637, 14.679365876237274, 7.746913514390087, 9.07692443618372, 10.785667425485194, 12.59848342779883), # 135
(12.05425012155593, 9.752602061108423, 12.254457332100213, 14.225234287304469, 14.077539276079325, 7.693788257079036, 7.718570249977489, 8.173943374010788, 14.65376200990745, 7.716582761464252, 9.043452938835248, 10.747917727613418, 12.558761488821151), # 136
(11.998396762348548, 9.696329108367367, 12.218890657658735, 14.175338483258576, 14.032626281122448, 7.6769806359570785, 7.6802375415878785, 8.159609491720442, 14.627479281821747, 7.685662390440583, 9.009313349231029, 10.709320787588808, 12.517966093179089), # 137
(11.941270688384867, 9.639163854681073, 12.182367713825425, 14.12421195954477, 13.986458609699687, 7.6597098618949495, 7.6412095276435865, 8.144946020581987, 14.600448670722995, 7.654093799574386, 8.974437770418753, 10.66981224691477, 12.476054829848946), # 138
(11.882820987576796, 9.581021763896047, 12.144833324744877, 14.071780538648504, 13.938994088551583, 7.641937289525037, 7.601422548084064, 8.129887944312085, 14.572601155354022, 7.621818387120976, 8.938758305446116, 10.62932774709471, 12.432985287807028), # 139
(11.822996747836257, 9.521818299858795, 12.106232314561684, 14.017970043055223, 13.890190544418692, 7.623624273479732, 7.560812942848756, 8.114370246627395, 14.543867714457667, 7.588777551335661, 8.902207057360812, 10.58780292963203, 12.38871505602964), # 140
(11.761747057075162, 9.46146892641583, 12.066509507420426, 13.962706295250376, 13.840005804041555, 7.604732168391422, 7.519317051877113, 8.09832791124458, 14.514179326776754, 7.554912690473753, 8.864716129210535, 10.545173436030137, 12.34320172349308), # 141
(11.69902100320542, 9.399889107413653, 12.0256097274657, 13.90591511771941, 13.788397694160723, 7.585222328892499, 7.476871215108577, 8.081695921880296, 14.48346697105412, 7.52016520279056, 8.826217624042977, 10.501374907792433, 12.296402879173653), # 142
(11.634767674138946, 9.336994306698774, 11.983477798842097, 13.847522332947767, 13.735324041516742, 7.56505610961535, 7.4334117724825965, 8.064409262251205, 14.451661626032607, 7.484476486541395, 8.786643644905832, 10.456342986422326, 12.248276112047666), # 143
(11.56893615778766, 9.2726999881177, 11.9400585456942, 13.787453763420901, 13.680742672850162, 7.544194865192366, 7.3888750639386185, 8.04640291607397, 14.418694270455035, 7.4477879399815645, 8.745926294846791, 10.41001331342322, 12.198779011091421), # 144
(11.501475542063469, 9.20692161551694, 11.895296792166606, 13.725635231624254, 13.624611414901528, 7.5225999502559375, 7.343197429416091, 8.027611867065247, 14.384495883064238, 7.410040961366383, 8.703997676913554, 10.36232153029852, 12.14786916528122), # 145
(11.432334914878291, 9.139574652742999, 11.849137362403903, 13.661992560043277, 13.566888094411391, 7.500232719438453, 7.2963152088544625, 8.007971098941699, 14.34899744260305, 7.37117694895116, 8.660789894153808, 10.313203278551628, 12.095504163593366), # 146
(11.361463364144042, 9.070574563642383, 11.801525080550675, 13.596451571163414, 13.507530538120294, 7.477054527372301, 7.2481647421931745, 7.987415595419982, 14.312129927814308, 7.331137300991204, 8.616235049615252, 10.262594199685955, 12.041641595004167), # 147
(11.288809977772631, 8.999836812061604, 11.752404770751518, 13.528938087470117, 13.446496572768787, 7.453026728689875, 7.198682369371678, 7.965880340216761, 14.273824317440841, 7.289863415741826, 8.570265246345576, 10.210429935204898, 11.986239048489919), # 148
(11.214323843675977, 8.927276861847163, 11.701721257151021, 13.459377931448826, 13.38374402509742, 7.42811067802356, 7.147804430329418, 7.943300317048694, 14.234011590225474, 7.247296691458339, 8.522812587392474, 10.156646126611868, 11.929254113026934), # 149
(11.137954049765991, 8.852810176845571, 11.649419363893772, 13.387696925584994, 13.319230721846738, 7.402267730005749, 7.0954672650058415, 7.91961050963244, 14.192622724911054, 7.2033785263960475, 8.473809175803641, 10.101178415410269, 11.870644377591507), # 150
(11.059649683954586, 8.776352220903336, 11.59544391512436, 13.313820892364063, 13.252914489757288, 7.375459239268828, 7.041607213340397, 7.8947459016846615, 14.149588700240406, 7.15805031881027, 8.423187114626767, 10.043962443103501, 11.810367431159946), # 151
(10.979359834153682, 8.697818457866962, 11.539739734987382, 13.237675654271488, 13.184753155569618, 7.34764656044519, 6.986160615272531, 7.8686414769220185, 14.10484049495636, 7.11125346695631, 8.37087850690955, 9.984933851194974, 11.748380862708558), # 152
(10.897033588275185, 8.61712435158296, 11.482251647627416, 13.159187033792707, 13.11470454602428, 7.318791048167222, 6.929063810741687, 7.841232219061167, 14.058309087801755, 7.062929369089481, 8.316815455699683, 9.92402828118809, 11.68464226121364), # 153
(10.81262003423102, 8.534185365897834, 11.422924477189063, 13.078280853413174, 13.042726487861813, 7.288854057067317, 6.87025313968732, 7.8124531118187726, 14.009925457519413, 7.013019423465095, 8.260930064044857, 9.861181374586256, 11.6191092156515), # 154
(10.72606825993309, 8.448916964658093, 11.361703047816906, 12.99488293561833, 12.968776807822776, 7.257796941777861, 6.809664942048866, 7.782239138911491, 13.95962058285218, 6.9614650283384565, 8.203154434992767, 9.796328772892876, 11.551739314998438), # 155
(10.637327353293314, 8.361234611710243, 11.298532183655539, 12.908919102893627, 12.892813332647707, 7.225581056931246, 6.74723555776578, 7.750525284055986, 13.907325442542877, 6.9082075819648825, 8.143420671591107, 9.729406117611353, 11.48249014823076), # 156
(10.546346402223609, 8.271053770900794, 11.233356708849547, 12.820315177724513, 12.81479388907716, 7.19216775715986, 6.6829013267775075, 7.717246530968915, 13.852971015334345, 6.853188482599679, 8.08166087688757, 9.660349050245092, 11.411319304324769), # 157
(10.450553324967336, 8.176634369081162, 11.163028735463298, 12.725677414311741, 12.731153548219398, 7.155434266843955, 6.615149409299001, 7.680115733289122, 13.792326928238738, 6.794712282807602, 8.01583405355452, 9.586639389872076, 11.335080203181485), # 158
(10.335201473769764, 8.06829144743927, 11.069432945764184, 12.605568022303835, 12.62126783369428, 7.103165507209945, 6.535497868740003, 7.626098945870136, 13.700998165711002, 6.723193391738244, 7.934383709866593, 9.493907533156353, 11.235598705688274), # 159
(10.198820932866035, 7.945135419957, 10.950689341138245, 12.458008514572404, 12.482988183885514, 7.034077814466758, 6.443141247737298, 7.553838865338286, 13.576395318120113, 6.637687912608051, 7.8361633120533565, 9.380702728442985, 11.110988852451014), # 160
(10.042510876420344, 7.8079692153126565, 10.808065760674433, 12.28440150525942, 12.317750373994958, 6.94900813819844, 6.338754024409627, 7.464240746353693, 13.420161673798626, 6.5389214704393135, 7.7220383164395905, 9.248074456470599, 10.962523662746737), # 161
(9.8673704785969, 7.657595762184535, 10.642830043461695, 12.086149608506858, 12.126990179224487, 6.848793427989039, 6.223010676875733, 7.358209843576484, 13.233940521079093, 6.427619690254325, 7.592874179350069, 9.09707219797781, 10.791476155852466), # 162
(9.674498913559898, 7.494817989250934, 10.456250028588983, 11.864655438456708, 11.912143374775964, 6.734270633422602, 6.096585683254362, 7.2366514116667755, 13.019375148294069, 6.304508197075376, 7.449536357109572, 8.928745433703247, 10.599119351045232), # 163
(9.464995355473539, 7.320438825190149, 10.249593555145248, 11.621321609250947, 11.674645735851264, 6.606276704083181, 5.960153521664253, 7.100470705284697, 12.778108843776113, 6.170312615924756, 7.292890306042875, 8.744143644385526, 10.386726267602059), # 164
(9.239958978502024, 7.135261198680485, 10.024128462219437, 11.357550735031554, 11.415933037652254, 6.465648589554821, 5.814388670224151, 6.950572979090365, 12.511784895857772, 6.02575857182476, 7.123801482474756, 8.544316310763268, 10.155569924799979), # 165
(9.000488956809557, 6.940088038400237, 9.7811225889005, 11.074745429940503, 11.137441055380801, 6.313223239421572, 5.659965607052801, 6.787863487743908, 12.222046592871603, 5.871571689797677, 6.943135342729992, 8.330312913575103, 9.906923341916015), # 166
(8.747684464560333, 6.735722273027703, 9.521843774277388, 10.774308308119782, 10.840605564238773, 6.149837603267482, 5.497558810268945, 6.613247485905448, 11.91053722315016, 5.7084775948658, 6.751757343133359, 8.103182933559642, 9.642059538227196), # 167
(8.482644675918554, 6.52296683124118, 9.247559857439049, 10.457641983711365, 10.526862339428039, 5.9763286306765995, 5.327842757991326, 6.427630228235103, 11.578900075025999, 5.5372019120514215, 6.550532940009634, 7.863975851455517, 9.362251533010546), # 168
(8.206468765048422, 6.302624641718972, 8.959538677474432, 10.126149070857236, 10.197647156150468, 5.793533271232973, 5.151491928338689, 6.231916969393004, 11.228778436831673, 5.358470266376831, 6.3403275896835956, 7.613741148001342, 9.0687723455431), # 169
(7.9202559061141375, 6.0754986331393726, 8.659048073472489, 9.781232183699368, 9.854395789607928, 5.60228847452065, 4.9691807994297745, 6.027012964039266, 10.861815596899735, 5.173008282864322, 6.122006748480023, 7.353528303935743, 8.762894995101878), # 170
(7.6251052732799005, 5.842391734180682, 8.34735588452217, 9.424293936379751, 9.498544015002288, 5.403431190123678, 4.781583849383328, 5.813823466834017, 10.47965484356274, 4.981541586536184, 5.896435872723688, 7.0843867999973416, 8.445892500963913), # 171
(7.322116040709912, 5.604106873521197, 8.025729949712423, 9.056736943040356, 9.131527607535416, 5.197798367626108, 4.5893755563180925, 5.593253732437379, 10.083939465153241, 4.784795802414712, 5.664480418739371, 6.80736611692476, 8.119037882406225), # 172
(7.012387382568372, 5.3614469798392195, 7.695438108132197, 8.679963817823166, 8.754782342409182, 4.9862269566119855, 4.39323039835281, 5.366209015509473, 9.676312750003792, 4.583496555522195, 5.427005842851849, 6.523515735456615, 7.783604158705848), # 173
(6.697018473019482, 5.115214981813045, 7.357748198870443, 8.295377174870158, 8.369743994825454, 4.76955390666536, 4.193822853606226, 5.133594570710425, 9.25841798644695, 4.3783694708809255, 5.1848776013858995, 6.233885136331535, 7.440864349139807), # 174
(6.377108486227438, 4.866213808120973, 7.013928061016112, 7.904379628323315, 7.977848339986097, 4.54861616737028, 3.9918274001970815, 4.896315652700355, 8.831898462815268, 4.170140173513194, 4.938961150666297, 5.939523800288141, 7.092091472985131), # 175
(6.053756596356447, 4.615246387441302, 6.66524553365815, 7.508373792324615, 7.580531153092983, 4.324250688310793, 3.787918516244121, 4.655277516139389, 8.3983974674413, 3.959534288441294, 4.690121947017822, 5.641481208065051, 6.738558549518844), # 176
(5.7280619775707065, 4.363115648452332, 6.3129684558855095, 7.108762281016037, 7.179228209347984, 4.097294419070949, 3.582770679866088, 4.411385415687646, 7.959558288657599, 3.7472774406875144, 4.43922544676525, 5.340806840400891, 6.381538598017975), # 177
(5.401123804034416, 4.11062451983236, 5.95836466678714, 6.7069477085395635, 6.775375283952959, 3.8685843092347962, 3.3770583691817246, 4.165544606005252, 7.51702421479672, 3.5340952552741505, 4.187137106233358, 5.038550178034279, 6.022304637759553), # 178
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 179
)
passenger_arriving_acc = (
(9, 10, 5, 5, 3, 2, 2, 3, 3, 1, 1, 0, 0, 6, 9, 0, 8, 12, 3, 4, 1, 0, 2, 2, 2, 0), # 0
(14, 20, 14, 16, 9, 4, 2, 8, 4, 2, 2, 0, 0, 17, 14, 5, 14, 18, 4, 6, 3, 1, 6, 2, 2, 0), # 1
(21, 29, 17, 19, 12, 7, 5, 13, 8, 6, 4, 0, 0, 26, 20, 10, 21, 27, 5, 11, 7, 3, 8, 4, 4, 0), # 2
(26, 39, 28, 28, 24, 10, 11, 19, 12, 7, 4, 0, 0, 32, 30, 12, 29, 35, 13, 13, 9, 6, 10, 6, 4, 0), # 3
(37, 48, 34, 36, 31, 13, 15, 24, 16, 8, 4, 0, 0, 40, 42, 15, 34, 45, 16, 13, 13, 7, 11, 6, 7, 0), # 4
(48, 57, 43, 49, 34, 16, 22, 31, 20, 10, 4, 2, 0, 47, 55, 22, 42, 54, 21, 18, 14, 9, 16, 7, 8, 0), # 5
(60, 70, 51, 57, 42, 20, 23, 35, 23, 14, 7, 4, 0, 56, 63, 29, 51, 60, 23, 25, 17, 15, 19, 7, 9, 0), # 6
(72, 78, 59, 68, 55, 23, 26, 36, 27, 15, 8, 4, 0, 68, 70, 40, 55, 69, 27, 28, 19, 18, 21, 8, 10, 0), # 7
(86, 89, 76, 78, 63, 26, 29, 38, 30, 16, 11, 4, 0, 79, 77, 49, 61, 80, 29, 35, 23, 19, 24, 9, 10, 0), # 8
(97, 99, 83, 87, 71, 34, 36, 43, 35, 16, 14, 7, 0, 96, 89, 58, 67, 90, 38, 39, 25, 22, 26, 13, 11, 0), # 9
(107, 111, 94, 97, 78, 37, 42, 46, 42, 19, 16, 9, 0, 114, 102, 70, 76, 102, 44, 41, 28, 28, 28, 16, 13, 0), # 10
(122, 122, 104, 108, 90, 44, 46, 49, 45, 20, 19, 11, 0, 124, 107, 83, 84, 114, 50, 47, 31, 29, 29, 20, 14, 0), # 11
(133, 138, 114, 119, 99, 46, 53, 53, 48, 23, 20, 13, 0, 141, 115, 92, 89, 122, 55, 52, 31, 35, 33, 21, 15, 0), # 12
(144, 155, 124, 129, 105, 53, 57, 55, 54, 24, 23, 14, 0, 155, 127, 96, 92, 127, 61, 56, 36, 38, 40, 24, 16, 0), # 13
(156, 176, 136, 141, 116, 60, 61, 59, 58, 27, 28, 14, 0, 165, 139, 102, 99, 139, 67, 63, 37, 42, 44, 24, 16, 0), # 14
(162, 188, 145, 162, 128, 63, 64, 63, 64, 29, 29, 16, 0, 174, 158, 110, 102, 145, 74, 70, 43, 50, 44, 24, 18, 0), # 15
(169, 200, 155, 175, 140, 70, 69, 67, 68, 32, 31, 19, 0, 186, 166, 114, 108, 159, 84, 74, 45, 54, 48, 25, 19, 0), # 16
(184, 215, 173, 190, 147, 79, 73, 70, 73, 38, 34, 20, 0, 198, 176, 121, 119, 169, 90, 76, 50, 61, 51, 26, 19, 0), # 17
(198, 229, 189, 207, 162, 83, 79, 74, 77, 39, 36, 21, 0, 214, 193, 128, 123, 176, 97, 85, 55, 67, 55, 28, 21, 0), # 18
(221, 245, 201, 220, 168, 89, 85, 77, 84, 39, 37, 22, 0, 231, 211, 139, 134, 191, 110, 91, 58, 72, 61, 32, 23, 0), # 19
(233, 257, 214, 233, 178, 96, 91, 81, 90, 41, 39, 23, 0, 241, 228, 152, 141, 202, 117, 96, 63, 78, 66, 36, 24, 0), # 20
(249, 275, 226, 246, 186, 102, 93, 87, 101, 43, 40, 24, 0, 248, 240, 162, 151, 218, 121, 100, 70, 82, 69, 37, 26, 0), # 21
(263, 288, 234, 257, 193, 109, 95, 90, 110, 47, 42, 24, 0, 266, 249, 168, 163, 230, 128, 103, 73, 85, 77, 37, 29, 0), # 22
(279, 297, 244, 265, 207, 118, 101, 92, 119, 50, 44, 25, 0, 282, 270, 177, 167, 235, 135, 109, 79, 86, 81, 38, 30, 0), # 23
(290, 310, 255, 275, 213, 122, 111, 98, 125, 51, 49, 26, 0, 300, 287, 193, 170, 247, 148, 117, 81, 90, 82, 42, 31, 0), # 24
(301, 326, 272, 282, 225, 127, 119, 103, 131, 53, 49, 28, 0, 310, 301, 203, 177, 267, 153, 129, 85, 93, 85, 45, 33, 0), # 25
(319, 341, 290, 290, 237, 131, 133, 105, 138, 54, 50, 29, 0, 319, 311, 213, 186, 283, 156, 133, 87, 97, 89, 46, 34, 0), # 26
(328, 350, 298, 299, 242, 138, 144, 110, 146, 56, 53, 30, 0, 329, 324, 222, 194, 295, 169, 136, 88, 100, 93, 47, 35, 0), # 27
(346, 364, 315, 315, 259, 140, 151, 117, 153, 63, 55, 32, 0, 349, 337, 234, 202, 302, 170, 140, 90, 106, 94, 49, 37, 0), # 28
(362, 380, 324, 330, 265, 144, 155, 124, 158, 66, 63, 33, 0, 361, 351, 245, 211, 313, 177, 142, 97, 108, 101, 55, 37, 0), # 29
(375, 392, 341, 344, 277, 151, 164, 128, 160, 67, 66, 33, 0, 383, 365, 254, 220, 333, 183, 151, 100, 119, 104, 55, 39, 0), # 30
(392, 401, 351, 366, 291, 157, 168, 130, 163, 72, 69, 33, 0, 402, 378, 263, 227, 348, 186, 153, 103, 124, 106, 56, 40, 0), # 31
(404, 415, 367, 378, 302, 168, 173, 135, 169, 73, 71, 34, 0, 417, 390, 276, 234, 356, 199, 157, 108, 128, 113, 57, 42, 0), # 32
(415, 431, 382, 395, 305, 175, 174, 141, 175, 74, 75, 35, 0, 435, 400, 285, 242, 370, 201, 160, 111, 133, 121, 58, 43, 0), # 33
(423, 442, 394, 410, 323, 184, 178, 147, 180, 76, 79, 35, 0, 448, 413, 290, 246, 381, 208, 166, 116, 135, 123, 59, 43, 0), # 34
(446, 458, 405, 415, 336, 187, 182, 151, 184, 79, 82, 36, 0, 461, 425, 298, 252, 390, 215, 170, 121, 141, 129, 65, 44, 0), # 35
(459, 470, 422, 431, 350, 190, 186, 155, 189, 82, 83, 36, 0, 472, 430, 304, 263, 396, 224, 176, 124, 147, 134, 66, 45, 0), # 36
(474, 479, 435, 443, 358, 195, 195, 163, 197, 83, 84, 37, 0, 494, 448, 315, 270, 410, 235, 184, 126, 152, 141, 69, 45, 0), # 37
(489, 493, 451, 456, 366, 199, 199, 167, 201, 89, 85, 38, 0, 507, 456, 331, 273, 414, 242, 191, 129, 159, 147, 71, 46, 0), # 38
(505, 510, 459, 470, 375, 203, 203, 170, 207, 89, 89, 38, 0, 524, 471, 342, 280, 427, 248, 195, 132, 164, 153, 71, 46, 0), # 39
(516, 523, 470, 477, 384, 206, 204, 176, 215, 92, 92, 38, 0, 539, 478, 348, 292, 438, 254, 200, 139, 169, 159, 74, 47, 0), # 40
(531, 534, 481, 484, 394, 211, 210, 179, 223, 97, 93, 39, 0, 548, 487, 361, 299, 447, 266, 206, 142, 172, 162, 77, 48, 0), # 41
(552, 546, 495, 496, 401, 211, 214, 184, 227, 97, 93, 41, 0, 570, 501, 368, 303, 461, 279, 212, 147, 179, 168, 78, 48, 0), # 42
(569, 562, 503, 508, 414, 212, 221, 188, 233, 98, 95, 41, 0, 585, 506, 377, 311, 474, 283, 221, 151, 180, 171, 80, 49, 0), # 43
(580, 579, 519, 518, 421, 217, 228, 192, 237, 101, 96, 45, 0, 603, 520, 385, 319, 489, 290, 226, 158, 184, 175, 80, 52, 0), # 44
(592, 593, 531, 532, 431, 221, 232, 196, 240, 103, 97, 47, 0, 616, 533, 398, 327, 495, 295, 231, 160, 188, 177, 84, 53, 0), # 45
(610, 614, 541, 548, 443, 225, 236, 202, 246, 106, 98, 47, 0, 624, 538, 405, 334, 508, 303, 236, 164, 198, 179, 84, 56, 0), # 46
(617, 624, 552, 564, 452, 233, 241, 205, 254, 106, 101, 48, 0, 644, 555, 413, 337, 515, 315, 242, 168, 205, 183, 86, 59, 0), # 47
(636, 636, 560, 569, 459, 237, 244, 208, 259, 109, 102, 49, 0, 653, 562, 425, 348, 534, 324, 250, 172, 213, 185, 89, 60, 0), # 48
(645, 643, 574, 589, 476, 239, 251, 211, 263, 112, 106, 52, 0, 666, 573, 435, 357, 547, 331, 254, 175, 216, 191, 90, 61, 0), # 49
(660, 664, 587, 601, 483, 245, 258, 215, 267, 113, 107, 54, 0, 684, 585, 446, 365, 560, 341, 261, 177, 220, 197, 93, 63, 0), # 50
(671, 673, 600, 616, 492, 247, 264, 217, 271, 120, 108, 56, 0, 705, 603, 455, 374, 574, 346, 266, 178, 225, 200, 96, 66, 0), # 51
(686, 688, 611, 626, 500, 251, 267, 222, 277, 121, 109, 57, 0, 712, 610, 467, 385, 588, 351, 271, 180, 229, 200, 97, 69, 0), # 52
(697, 699, 619, 639, 509, 255, 272, 227, 279, 124, 112, 58, 0, 725, 625, 475, 389, 604, 356, 277, 186, 234, 203, 99, 70, 0), # 53
(714, 717, 625, 654, 518, 259, 277, 233, 289, 128, 114, 60, 0, 746, 642, 489, 397, 620, 362, 281, 192, 240, 206, 100, 71, 0), # 54
(724, 729, 643, 664, 529, 266, 278, 240, 296, 131, 115, 60, 0, 760, 649, 504, 404, 627, 369, 286, 195, 241, 216, 100, 71, 0), # 55
(730, 741, 656, 678, 544, 268, 282, 244, 300, 132, 118, 61, 0, 781, 665, 515, 410, 642, 373, 293, 202, 246, 220, 104, 72, 0), # 56
(736, 757, 675, 695, 548, 272, 289, 250, 308, 134, 118, 61, 0, 800, 679, 521, 412, 655, 383, 299, 206, 250, 220, 106, 73, 0), # 57
(751, 770, 690, 714, 556, 275, 295, 256, 313, 134, 120, 63, 0, 814, 688, 529, 418, 669, 386, 308, 212, 257, 224, 110, 74, 0), # 58
(764, 774, 700, 728, 563, 279, 302, 259, 320, 140, 120, 65, 0, 824, 700, 538, 428, 680, 392, 311, 217, 264, 228, 112, 74, 0), # 59
(778, 798, 706, 745, 577, 285, 306, 260, 321, 142, 121, 68, 0, 838, 710, 550, 440, 699, 397, 320, 218, 270, 234, 113, 74, 0), # 60
(791, 818, 715, 755, 588, 292, 311, 269, 330, 143, 122, 70, 0, 854, 722, 561, 447, 715, 410, 323, 224, 276, 238, 113, 75, 0), # 61
(808, 825, 727, 767, 596, 297, 316, 273, 336, 146, 123, 70, 0, 867, 730, 571, 457, 726, 414, 327, 229, 279, 244, 117, 78, 0), # 62
(816, 839, 746, 779, 605, 302, 321, 278, 338, 146, 130, 70, 0, 879, 743, 574, 464, 736, 419, 328, 233, 282, 245, 118, 78, 0), # 63
(832, 849, 757, 787, 617, 304, 328, 286, 343, 147, 134, 70, 0, 893, 759, 582, 475, 750, 426, 333, 237, 290, 249, 119, 79, 0), # 64
(839, 862, 772, 801, 626, 306, 332, 290, 346, 148, 136, 70, 0, 909, 771, 601, 479, 763, 432, 338, 238, 301, 253, 120, 80, 0), # 65
(857, 876, 783, 812, 637, 308, 332, 295, 352, 154, 137, 71, 0, 924, 781, 608, 487, 777, 442, 340, 240, 306, 257, 120, 81, 0), # 66
(872, 893, 792, 824, 652, 312, 339, 302, 360, 154, 139, 74, 0, 941, 791, 614, 498, 786, 446, 352, 242, 307, 265, 123, 81, 0), # 67
(882, 904, 798, 835, 664, 316, 346, 306, 368, 156, 141, 75, 0, 951, 799, 623, 505, 797, 449, 360, 245, 311, 269, 128, 81, 0), # 68
(896, 914, 807, 851, 670, 320, 354, 313, 371, 157, 141, 79, 0, 963, 809, 632, 510, 809, 456, 369, 248, 315, 273, 130, 81, 0), # 69
(915, 925, 815, 869, 683, 326, 361, 317, 375, 157, 146, 80, 0, 979, 817, 642, 516, 820, 462, 375, 250, 321, 277, 131, 83, 0), # 70
(932, 931, 828, 880, 698, 335, 363, 318, 384, 161, 148, 80, 0, 990, 827, 650, 520, 827, 467, 384, 254, 327, 282, 131, 83, 0), # 71
(947, 940, 847, 897, 708, 340, 372, 324, 391, 164, 149, 80, 0, 1006, 838, 662, 537, 836, 469, 392, 261, 334, 286, 133, 86, 0), # 72
(964, 951, 858, 912, 717, 346, 375, 331, 400, 166, 151, 80, 0, 1022, 847, 667, 542, 852, 475, 399, 266, 336, 287, 135, 86, 0), # 73
(973, 959, 866, 926, 729, 357, 378, 333, 403, 169, 151, 80, 0, 1040, 864, 680, 546, 865, 480, 404, 267, 341, 291, 139, 86, 0), # 74
(984, 972, 880, 937, 742, 364, 384, 336, 410, 172, 153, 81, 0, 1054, 878, 689, 555, 872, 485, 410, 268, 349, 295, 142, 87, 0), # 75
(1003, 984, 896, 947, 753, 369, 392, 339, 413, 178, 154, 81, 0, 1061, 890, 699, 563, 891, 494, 417, 270, 354, 301, 146, 87, 0), # 76
(1013, 992, 910, 959, 766, 374, 401, 344, 418, 179, 157, 81, 0, 1079, 911, 708, 574, 897, 496, 419, 274, 362, 303, 149, 88, 0), # 77
(1025, 1006, 916, 976, 781, 380, 405, 348, 424, 181, 159, 81, 0, 1090, 923, 717, 579, 908, 502, 423, 276, 367, 310, 149, 88, 0), # 78
(1037, 1014, 926, 989, 791, 386, 409, 354, 430, 184, 161, 81, 0, 1106, 930, 724, 587, 913, 506, 428, 282, 376, 312, 150, 88, 0), # 79
(1054, 1031, 938, 998, 806, 394, 411, 357, 435, 185, 162, 82, 0, 1116, 945, 737, 592, 926, 510, 434, 286, 383, 315, 151, 88, 0), # 80
(1067, 1042, 950, 1006, 818, 400, 419, 363, 443, 187, 165, 82, 0, 1135, 954, 749, 602, 938, 514, 439, 288, 388, 316, 152, 89, 0), # 81
(1080, 1055, 957, 1021, 829, 407, 425, 369, 448, 189, 165, 89, 0, 1153, 968, 756, 614, 945, 517, 442, 296, 391, 322, 155, 91, 0), # 82
(1092, 1061, 970, 1027, 835, 412, 433, 372, 454, 193, 167, 89, 0, 1170, 977, 764, 622, 957, 524, 447, 299, 398, 326, 157, 91, 0), # 83
(1103, 1074, 984, 1043, 846, 419, 441, 379, 460, 195, 168, 90, 0, 1177, 993, 772, 627, 962, 527, 453, 303, 403, 329, 159, 91, 0), # 84
(1115, 1088, 1005, 1057, 860, 426, 447, 382, 468, 199, 169, 91, 0, 1186, 1008, 782, 629, 975, 530, 459, 307, 409, 332, 162, 91, 0), # 85
(1131, 1099, 1016, 1068, 875, 430, 451, 386, 474, 200, 171, 92, 0, 1197, 1015, 790, 640, 985, 538, 462, 310, 414, 340, 164, 91, 0), # 86
(1148, 1106, 1027, 1080, 881, 433, 455, 388, 479, 202, 172, 92, 0, 1208, 1031, 798, 650, 992, 546, 467, 316, 421, 349, 165, 91, 0), # 87
(1159, 1123, 1037, 1089, 891, 439, 461, 390, 482, 206, 177, 92, 0, 1229, 1043, 807, 660, 1005, 547, 469, 321, 427, 354, 167, 92, 0), # 88
(1174, 1132, 1051, 1104, 898, 443, 465, 395, 486, 207, 180, 93, 0, 1247, 1057, 816, 664, 1014, 553, 478, 325, 432, 358, 169, 92, 0), # 89
(1187, 1140, 1060, 1115, 909, 452, 472, 397, 489, 209, 180, 93, 0, 1260, 1070, 823, 667, 1020, 562, 482, 329, 438, 359, 172, 93, 0), # 90
(1205, 1153, 1067, 1129, 918, 456, 476, 397, 497, 211, 182, 93, 0, 1274, 1080, 834, 672, 1027, 566, 492, 332, 440, 361, 177, 95, 0), # 91
(1217, 1165, 1074, 1142, 931, 463, 477, 405, 502, 215, 187, 94, 0, 1288, 1097, 842, 680, 1038, 570, 497, 335, 445, 366, 179, 96, 0), # 92
(1228, 1170, 1086, 1154, 935, 467, 480, 409, 512, 218, 188, 94, 0, 1301, 1107, 852, 686, 1059, 576, 501, 338, 447, 369, 181, 97, 0), # 93
(1242, 1183, 1098, 1167, 948, 469, 484, 416, 515, 220, 190, 95, 0, 1313, 1120, 856, 693, 1074, 582, 506, 339, 453, 374, 181, 97, 0), # 94
(1251, 1202, 1109, 1178, 955, 474, 486, 420, 518, 224, 190, 98, 0, 1325, 1137, 863, 704, 1085, 588, 511, 341, 456, 377, 181, 99, 0), # 95
(1260, 1210, 1123, 1188, 962, 480, 494, 427, 527, 227, 191, 100, 0, 1335, 1147, 872, 712, 1092, 594, 514, 350, 465, 382, 186, 99, 0), # 96
(1273, 1220, 1131, 1202, 972, 484, 496, 437, 532, 229, 193, 103, 0, 1346, 1150, 880, 720, 1104, 601, 519, 351, 473, 383, 188, 100, 0), # 97
(1290, 1228, 1143, 1214, 984, 491, 500, 439, 536, 233, 193, 105, 0, 1354, 1159, 885, 726, 1114, 606, 522, 352, 480, 387, 192, 102, 0), # 98
(1304, 1238, 1154, 1229, 996, 495, 505, 442, 540, 234, 193, 107, 0, 1368, 1172, 898, 733, 1123, 610, 526, 352, 482, 393, 196, 104, 0), # 99
(1313, 1247, 1165, 1239, 1007, 500, 507, 447, 548, 235, 193, 112, 0, 1380, 1180, 910, 741, 1135, 612, 533, 355, 492, 397, 200, 105, 0), # 100
(1329, 1258, 1175, 1246, 1019, 503, 509, 450, 554, 236, 194, 113, 0, 1393, 1192, 914, 746, 1145, 621, 539, 358, 498, 401, 202, 105, 0), # 101
(1346, 1270, 1183, 1260, 1024, 509, 514, 455, 558, 238, 195, 113, 0, 1408, 1199, 919, 758, 1154, 627, 541, 363, 501, 408, 205, 108, 0), # 102
(1360, 1282, 1191, 1272, 1032, 514, 518, 460, 565, 240, 196, 113, 0, 1428, 1213, 929, 766, 1160, 631, 545, 365, 509, 411, 205, 109, 0), # 103
(1374, 1288, 1202, 1286, 1043, 518, 522, 465, 570, 243, 197, 114, 0, 1438, 1219, 943, 772, 1168, 640, 549, 370, 511, 414, 206, 109, 0), # 104
(1390, 1299, 1211, 1299, 1055, 523, 530, 470, 578, 245, 199, 114, 0, 1455, 1229, 958, 780, 1178, 643, 555, 371, 517, 418, 209, 109, 0), # 105
(1398, 1311, 1224, 1309, 1061, 528, 535, 472, 586, 247, 200, 115, 0, 1474, 1243, 965, 784, 1190, 647, 558, 375, 522, 421, 210, 110, 0), # 106
(1409, 1323, 1240, 1314, 1064, 537, 538, 474, 593, 249, 200, 117, 0, 1489, 1256, 968, 787, 1198, 652, 565, 379, 528, 425, 213, 111, 0), # 107
(1421, 1332, 1247, 1323, 1071, 542, 542, 477, 595, 250, 202, 120, 0, 1513, 1266, 979, 796, 1207, 657, 571, 381, 534, 429, 214, 115, 0), # 108
(1431, 1347, 1261, 1333, 1078, 549, 551, 482, 603, 252, 203, 122, 0, 1530, 1275, 985, 801, 1216, 666, 574, 386, 543, 430, 217, 115, 0), # 109
(1446, 1354, 1271, 1341, 1086, 553, 555, 486, 608, 252, 203, 122, 0, 1543, 1284, 998, 810, 1225, 672, 579, 391, 548, 434, 217, 117, 0), # 110
(1465, 1367, 1280, 1354, 1102, 555, 557, 488, 617, 256, 204, 122, 0, 1551, 1292, 1009, 814, 1235, 673, 584, 393, 554, 436, 220, 117, 0), # 111
(1480, 1385, 1290, 1368, 1106, 557, 560, 490, 626, 256, 205, 122, 0, 1563, 1302, 1015, 825, 1242, 675, 586, 396, 564, 439, 222, 117, 0), # 112
(1489, 1392, 1303, 1385, 1111, 559, 560, 491, 634, 257, 208, 123, 0, 1581, 1311, 1026, 831, 1251, 686, 588, 398, 569, 444, 226, 118, 0), # 113
(1503, 1396, 1315, 1395, 1119, 566, 562, 493, 640, 259, 210, 123, 0, 1594, 1322, 1035, 837, 1261, 687, 594, 401, 573, 447, 228, 119, 0), # 114
(1518, 1403, 1324, 1408, 1129, 569, 569, 494, 644, 259, 212, 125, 0, 1605, 1333, 1050, 841, 1273, 691, 597, 404, 577, 452, 232, 122, 0), # 115
(1522, 1417, 1336, 1421, 1141, 575, 570, 500, 648, 259, 212, 126, 0, 1616, 1342, 1059, 845, 1280, 702, 600, 406, 581, 457, 233, 123, 0), # 116
(1537, 1426, 1349, 1431, 1150, 581, 574, 503, 657, 264, 213, 128, 0, 1626, 1352, 1068, 854, 1292, 703, 603, 410, 586, 459, 234, 123, 0), # 117
(1547, 1436, 1362, 1448, 1160, 587, 577, 506, 661, 265, 215, 130, 0, 1638, 1367, 1078, 863, 1297, 707, 606, 413, 592, 463, 234, 124, 0), # 118
(1552, 1444, 1370, 1455, 1171, 590, 580, 511, 666, 267, 219, 131, 0, 1648, 1376, 1089, 866, 1308, 714, 609, 416, 595, 465, 238, 127, 0), # 119
(1561, 1453, 1379, 1464, 1182, 594, 584, 513, 671, 270, 220, 131, 0, 1659, 1388, 1096, 874, 1316, 722, 612, 419, 603, 467, 242, 129, 0), # 120
(1571, 1468, 1396, 1479, 1197, 599, 588, 517, 681, 273, 223, 131, 0, 1676, 1397, 1102, 881, 1320, 726, 614, 422, 607, 472, 246, 129, 0), # 121
(1594, 1478, 1405, 1487, 1208, 603, 590, 520, 690, 274, 225, 131, 0, 1693, 1409, 1108, 888, 1328, 731, 616, 425, 615, 475, 249, 129, 0), # 122
(1610, 1483, 1410, 1501, 1218, 606, 594, 521, 694, 277, 227, 132, 0, 1710, 1420, 1119, 892, 1337, 734, 621, 430, 618, 477, 253, 130, 0), # 123
(1620, 1502, 1420, 1516, 1223, 612, 599, 523, 699, 279, 227, 132, 0, 1717, 1431, 1125, 898, 1347, 740, 623, 434, 625, 480, 253, 131, 0), # 124
(1627, 1510, 1429, 1522, 1234, 616, 604, 525, 703, 282, 228, 132, 0, 1730, 1442, 1136, 908, 1358, 746, 630, 438, 630, 482, 257, 131, 0), # 125
(1642, 1523, 1435, 1534, 1238, 621, 610, 527, 703, 285, 229, 133, 0, 1749, 1448, 1142, 914, 1366, 750, 634, 441, 634, 485, 258, 132, 0), # 126
(1649, 1531, 1444, 1545, 1250, 625, 614, 529, 709, 287, 229, 133, 0, 1757, 1455, 1146, 917, 1375, 755, 636, 443, 640, 486, 263, 133, 0), # 127
(1664, 1543, 1453, 1554, 1254, 632, 618, 531, 717, 292, 230, 135, 0, 1768, 1460, 1155, 922, 1386, 758, 640, 446, 643, 489, 267, 134, 0), # 128
(1677, 1552, 1467, 1563, 1264, 637, 622, 532, 723, 293, 231, 136, 0, 1782, 1470, 1161, 927, 1396, 767, 645, 448, 644, 497, 267, 134, 0), # 129
(1685, 1564, 1477, 1574, 1275, 642, 624, 538, 728, 294, 233, 137, 0, 1790, 1475, 1166, 933, 1405, 770, 652, 449, 646, 499, 270, 134, 0), # 130
(1690, 1570, 1496, 1581, 1280, 644, 625, 542, 734, 295, 233, 137, 0, 1799, 1485, 1175, 938, 1413, 777, 655, 453, 649, 500, 272, 134, 0), # 131
(1702, 1580, 1506, 1586, 1290, 650, 629, 549, 736, 296, 234, 137, 0, 1816, 1494, 1180, 948, 1426, 780, 657, 456, 650, 504, 279, 135, 0), # 132
(1708, 1590, 1518, 1597, 1295, 654, 633, 553, 739, 297, 236, 140, 0, 1825, 1505, 1186, 952, 1440, 784, 663, 460, 660, 509, 282, 136, 0), # 133
(1723, 1598, 1529, 1613, 1304, 659, 635, 556, 746, 299, 237, 141, 0, 1836, 1510, 1198, 957, 1452, 785, 668, 465, 665, 513, 284, 136, 0), # 134
(1735, 1609, 1542, 1626, 1317, 661, 635, 558, 750, 301, 240, 141, 0, 1846, 1521, 1205, 962, 1467, 792, 674, 467, 668, 516, 285, 139, 0), # 135
(1752, 1616, 1555, 1641, 1324, 667, 640, 560, 755, 302, 240, 142, 0, 1856, 1528, 1213, 966, 1481, 796, 678, 471, 673, 517, 287, 139, 0), # 136
(1762, 1627, 1567, 1653, 1336, 672, 642, 563, 757, 304, 242, 142, 0, 1874, 1539, 1221, 972, 1493, 800, 685, 473, 679, 520, 290, 140, 0), # 137
(1779, 1631, 1572, 1665, 1347, 675, 649, 565, 763, 310, 242, 143, 0, 1885, 1550, 1227, 977, 1501, 802, 688, 476, 686, 526, 293, 141, 0), # 138
(1795, 1643, 1584, 1673, 1355, 681, 655, 568, 767, 311, 245, 144, 0, 1898, 1558, 1235, 984, 1510, 810, 690, 480, 690, 530, 298, 142, 0), # 139
(1808, 1652, 1589, 1687, 1363, 686, 658, 573, 773, 313, 248, 145, 0, 1904, 1567, 1242, 987, 1521, 816, 695, 487, 695, 536, 301, 143, 0), # 140
(1824, 1658, 1600, 1695, 1370, 692, 664, 575, 779, 313, 249, 146, 0, 1918, 1580, 1250, 994, 1531, 818, 699, 488, 700, 538, 302, 143, 0), # 141
(1831, 1663, 1609, 1706, 1378, 697, 666, 581, 784, 315, 249, 148, 0, 1930, 1585, 1254, 996, 1541, 824, 704, 491, 703, 542, 302, 146, 0), # 142
(1848, 1677, 1622, 1716, 1393, 701, 671, 586, 791, 315, 252, 152, 0, 1943, 1595, 1260, 1002, 1556, 829, 710, 494, 705, 547, 303, 147, 0), # 143
(1859, 1688, 1637, 1729, 1398, 705, 677, 592, 792, 319, 257, 152, 0, 1954, 1606, 1267, 1006, 1567, 833, 714, 502, 712, 550, 304, 147, 0), # 144
(1865, 1701, 1648, 1733, 1405, 711, 679, 599, 799, 320, 258, 153, 0, 1963, 1613, 1280, 1011, 1575, 844, 720, 510, 717, 555, 306, 150, 0), # 145
(1880, 1713, 1656, 1743, 1414, 716, 683, 604, 803, 320, 259, 154, 0, 1974, 1622, 1293, 1016, 1586, 848, 725, 512, 720, 558, 307, 150, 0), # 146
(1892, 1725, 1665, 1751, 1421, 720, 685, 609, 805, 321, 261, 155, 0, 1990, 1634, 1299, 1021, 1595, 852, 728, 515, 724, 561, 309, 152, 0), # 147
(1902, 1734, 1680, 1763, 1429, 727, 691, 615, 809, 323, 262, 157, 0, 2004, 1642, 1308, 1027, 1603, 856, 732, 518, 726, 565, 311, 152, 0), # 148
(1920, 1747, 1686, 1778, 1441, 731, 697, 617, 814, 327, 264, 157, 0, 2014, 1649, 1316, 1032, 1611, 858, 735, 522, 733, 567, 314, 154, 0), # 149
(1938, 1750, 1696, 1788, 1445, 736, 699, 620, 816, 328, 266, 157, 0, 2023, 1660, 1320, 1037, 1629, 861, 742, 526, 739, 570, 316, 156, 0), # 150
(1947, 1755, 1700, 1791, 1452, 741, 702, 623, 821, 328, 269, 157, 0, 2033, 1672, 1328, 1043, 1639, 866, 743, 527, 740, 573, 317, 157, 0), # 151
(1956, 1758, 1709, 1803, 1460, 744, 703, 624, 825, 331, 270, 157, 0, 2047, 1678, 1331, 1047, 1648, 870, 746, 534, 743, 578, 317, 157, 0), # 152
(1963, 1763, 1716, 1815, 1465, 750, 706, 627, 829, 334, 272, 157, 0, 2057, 1689, 1336, 1055, 1657, 875, 748, 538, 750, 581, 319, 158, 0), # 153
(1970, 1773, 1724, 1823, 1473, 753, 709, 627, 831, 336, 273, 158, 0, 2066, 1700, 1346, 1062, 1671, 879, 752, 540, 753, 583, 320, 160, 0), # 154
(1987, 1779, 1734, 1836, 1479, 754, 714, 629, 832, 336, 274, 158, 0, 2080, 1703, 1351, 1065, 1678, 882, 754, 544, 758, 587, 321, 160, 0), # 155
(1991, 1784, 1738, 1844, 1483, 760, 717, 633, 835, 336, 275, 159, 0, 2090, 1712, 1356, 1069, 1685, 892, 757, 551, 762, 592, 322, 160, 0), # 156
(1994, 1791, 1744, 1849, 1497, 766, 720, 634, 837, 338, 276, 161, 0, 2103, 1720, 1362, 1074, 1696, 894, 761, 554, 765, 597, 326, 160, 0), # 157
(2003, 1796, 1760, 1857, 1505, 770, 726, 638, 844, 341, 278, 161, 0, 2108, 1729, 1363, 1082, 1708, 899, 765, 556, 768, 598, 330, 160, 0), # 158
(2010, 1802, 1770, 1862, 1513, 775, 730, 643, 850, 342, 279, 161, 0, 2114, 1742, 1370, 1088, 1715, 905, 768, 560, 772, 599, 331, 160, 0), # 159
(2023, 1807, 1782, 1867, 1519, 779, 731, 648, 852, 344, 281, 161, 0, 2125, 1749, 1376, 1093, 1722, 909, 769, 565, 777, 600, 332, 160, 0), # 160
(2034, 1813, 1791, 1874, 1523, 781, 732, 654, 859, 346, 281, 161, 0, 2131, 1760, 1383, 1096, 1728, 916, 776, 568, 783, 603, 334, 161, 0), # 161
(2046, 1823, 1796, 1880, 1532, 787, 734, 659, 866, 347, 282, 161, 0, 2141, 1765, 1393, 1100, 1741, 919, 777, 569, 790, 605, 337, 161, 0), # 162
(2055, 1829, 1808, 1888, 1536, 790, 736, 662, 873, 349, 285, 161, 0, 2153, 1771, 1400, 1101, 1746, 921, 780, 575, 793, 609, 339, 161, 0), # 163
(2064, 1834, 1815, 1897, 1544, 793, 738, 665, 879, 350, 285, 161, 0, 2162, 1776, 1404, 1103, 1754, 924, 783, 579, 798, 612, 340, 162, 0), # 164
(2075, 1842, 1827, 1904, 1549, 797, 744, 666, 885, 353, 286, 162, 0, 2168, 1785, 1407, 1104, 1758, 929, 785, 582, 800, 616, 340, 162, 0), # 165
(2080, 1845, 1840, 1913, 1553, 799, 745, 670, 890, 354, 287, 162, 0, 2180, 1790, 1412, 1109, 1767, 935, 787, 583, 802, 620, 341, 162, 0), # 166
(2086, 1852, 1848, 1921, 1557, 802, 751, 675, 896, 355, 290, 162, 0, 2193, 1796, 1422, 1114, 1772, 939, 788, 586, 807, 621, 341, 163, 0), # 167
(2095, 1857, 1859, 1932, 1562, 805, 754, 678, 898, 357, 291, 164, 0, 2201, 1804, 1428, 1117, 1780, 942, 791, 587, 808, 623, 342, 163, 0), # 168
(2100, 1862, 1867, 1942, 1565, 807, 759, 681, 902, 357, 293, 164, 0, 2214, 1810, 1432, 1119, 1786, 944, 793, 590, 808, 625, 345, 163, 0), # 169
(2111, 1867, 1872, 1948, 1570, 809, 762, 684, 905, 357, 295, 165, 0, 2223, 1816, 1441, 1125, 1793, 947, 796, 594, 814, 626, 347, 165, 0), # 170
(2121, 1871, 1877, 1952, 1581, 813, 763, 688, 907, 358, 296, 166, 0, 2230, 1822, 1452, 1129, 1806, 954, 796, 594, 816, 630, 347, 165, 0), # 171
(2130, 1873, 1881, 1958, 1584, 818, 764, 689, 909, 361, 298, 166, 0, 2234, 1827, 1456, 1132, 1814, 956, 798, 597, 817, 631, 350, 165, 0), # 172
(2138, 1879, 1883, 1962, 1590, 821, 766, 689, 911, 362, 298, 166, 0, 2242, 1834, 1465, 1137, 1818, 957, 801, 599, 820, 637, 350, 166, 0), # 173
(2144, 1884, 1889, 1968, 1597, 824, 768, 689, 913, 364, 298, 166, 0, 2250, 1840, 1468, 1139, 1824, 961, 801, 601, 824, 639, 350, 166, 0), # 174
(2152, 1887, 1894, 1975, 1602, 825, 770, 689, 915, 366, 301, 167, 0, 2256, 1843, 1469, 1144, 1829, 962, 801, 604, 826, 641, 352, 168, 0), # 175
(2157, 1892, 1898, 1982, 1610, 829, 772, 690, 917, 367, 302, 169, 0, 2261, 1843, 1471, 1146, 1834, 965, 802, 605, 828, 644, 352, 168, 0), # 176
(2168, 1896, 1907, 1988, 1615, 832, 775, 692, 918, 369, 302, 170, 0, 2269, 1844, 1475, 1149, 1839, 966, 805, 605, 831, 645, 352, 168, 0), # 177
(2173, 1900, 1911, 1991, 1621, 834, 776, 695, 923, 370, 302, 172, 0, 2278, 1845, 1476, 1151, 1843, 966, 807, 605, 836, 647, 354, 169, 0), # 178
(2173, 1900, 1911, 1991, 1621, 834, 776, 695, 923, 370, 302, 172, 0, 2278, 1845, 1476, 1151, 1843, 966, 807, 605, 836, 647, 354, 169, 0), # 179
)
passenger_arriving_rate = (
(7.029211809720476, 7.090786984939564, 6.079830434547925, 6.525401162556605, 5.184373233768971, 2.563234861163827, 2.9022249307617405, 2.7143527675713304, 2.8420462290117365, 1.3853052554328298, 0.9812285382399741, 0.571423425802387, 0.0, 7.117432297609708, 6.285657683826256, 4.90614269119987, 4.155915766298489, 5.684092458023473, 3.8000938745998627, 2.9022249307617405, 1.8308820436884476, 2.5921866168844856, 2.175133720852202, 1.2159660869095852, 0.6446169986308695, 0.0), # 0
(7.496058012827964, 7.558911224152441, 6.4812376898851785, 6.956401465940448, 5.527657648309288, 2.7325532603014207, 3.093628258884586, 2.893049671694997, 3.0297144856220246, 1.4766432422970026, 1.0460557650564308, 0.6091419437616749, 0.0, 7.587708306415797, 6.700561381378422, 5.230278825282154, 4.429929726891007, 6.059428971244049, 4.050269540372995, 3.093628258884586, 1.9518237573581576, 2.763828824154644, 2.3188004886468163, 1.2962475379770357, 0.687173747650222, 0.0), # 1
(7.9614122125716245, 8.025177635976757, 6.881049333138649, 7.385687089898034, 5.869698775499761, 2.9011961768518306, 3.284272955572493, 3.071031394610912, 3.2166338432095234, 1.5676198212571917, 1.1106254013811399, 0.6467104760728565, 0.0, 8.056110759493567, 7.113815236801421, 5.553127006905699, 4.702859463771574, 6.433267686419047, 4.2994439524552766, 3.284272955572493, 2.0722829834655934, 2.9348493877498805, 2.4618956966326784, 1.37620986662773, 0.7295616032706144, 0.0), # 2
(8.423460910405188, 8.487736310818441, 7.277679347539831, 7.811555227908678, 6.209150897601775, 3.0684948417778424, 3.473402549153569, 3.2475923418717962, 3.4020630750965104, 1.657873944449164, 1.1746812960930562, 0.6839799965752206, 0.0, 8.520781928755916, 7.523779962327425, 5.873406480465281, 4.97362183334749, 6.804126150193021, 4.5466292786205145, 3.473402549153569, 2.191782029841316, 3.1045754488008876, 2.6038517426362264, 1.455535869507966, 0.7716123918925856, 0.0), # 3
(8.880390607782374, 8.94473733908341, 7.669541716320211, 8.232303073451698, 6.5446682968767265, 3.233780486042246, 3.6602605679559215, 3.4220269190303676, 3.585260954605263, 1.7470445640086882, 1.2379672980711345, 0.7208014791080559, 0.0, 8.979864086115745, 7.928816270188614, 6.189836490355671, 5.241133692026064, 7.170521909210526, 4.790837686642515, 3.6602605679559215, 2.30984320431589, 3.2723341484383632, 2.7441010244839, 1.5339083432640421, 0.8131579399166738, 0.0), # 4
(9.330387806156915, 9.394330811177607, 8.055050422711272, 8.646227820006413, 6.874905255585995, 3.396384340607826, 3.844090540307657, 3.593629531639346, 3.765486255058061, 1.8347706320715327, 1.300227256194331, 0.7570258975106506, 0.0, 9.43149950348596, 8.327284872617156, 6.501136280971655, 5.504311896214597, 7.530972510116122, 5.031081344295084, 3.844090540307657, 2.4259888147198754, 3.4374526277929975, 2.8820759400021383, 1.6110100845422546, 0.8540300737434189, 0.0), # 5
(9.771639006982534, 9.834666817506942, 8.43261944994451, 9.051626661052135, 7.198516055990973, 3.5556376364373725, 4.024135994536884, 3.7616945852514516, 3.9419977497771805, 1.920691100773466, 1.3612050193415997, 0.7925042256222944, 0.0, 9.87383045277945, 8.717546481845236, 6.806025096707997, 5.762073302320396, 7.883995499554361, 5.266372419352033, 4.024135994536884, 2.5397411688838374, 3.5992580279954867, 3.017208887017379, 1.6865238899889023, 0.8940606197733586, 0.0), # 6
(10.202330711712957, 10.263895448477353, 8.800662781251408, 9.446796790068186, 7.514154980353052, 3.710871604493673, 4.19964045897171, 3.9255164854194056, 4.1140542120849, 2.004444922250256, 1.4206444363918964, 0.8270874372822752, 0.0, 10.304999205909127, 9.097961810105026, 7.103222181959481, 6.013334766750766, 8.2281084241698, 5.495723079587168, 4.19964045897171, 2.6506225746383376, 3.757077490176526, 3.148932263356063, 1.7601325562502819, 0.9330814044070321, 0.0), # 7
(10.62064942180191, 10.68016679449476, 9.157594399863463, 9.830035400533875, 7.820476310933614, 3.8614174757395103, 4.369847461940239, 4.0843896376959234, 4.280914415303496, 2.0856710486376717, 1.4782893562241752, 0.8606265063298821, 0.0, 10.723148034787885, 9.466891569628702, 7.391446781120876, 6.257013145913014, 8.561828830606991, 5.718145492774292, 4.369847461940239, 2.758155339813936, 3.910238155466807, 3.276678466844626, 1.831518879972693, 0.9709242540449783, 0.0), # 8
(11.02478163870312, 11.081630945965095, 9.501828289012156, 10.199639685928528, 8.116134329994049, 4.006606481137679, 4.534000531770584, 4.237608447633729, 4.441837132755248, 2.1640084320714803, 1.5338836277173917, 0.8929724066044035, 0.0, 11.126419211328628, 9.822696472648436, 7.669418138586958, 6.49202529621444, 8.883674265510496, 5.932651826687221, 4.534000531770584, 2.861861772241199, 4.058067164997024, 3.3998798953095104, 1.9003656578024313, 1.0074209950877362, 0.0), # 9
(11.412913863870306, 11.46643799329428, 9.83177843192898, 10.553906839731454, 8.399783319795748, 4.145769851650964, 4.691343196790848, 4.38446732078554, 4.596081137762433, 2.2390960246874507, 1.5871710997505006, 0.923976111945128, 0.0, 11.512955007444255, 10.163737231396405, 7.935855498752503, 6.717288074062351, 9.192162275524867, 6.138254249099756, 4.691343196790848, 2.961264179750688, 4.199891659897874, 3.517968946577152, 1.9663556863857963, 1.0424034539358438, 0.0), # 10
(11.783232598757209, 11.832738026888249, 10.145858811845418, 10.891134055421968, 8.670077562600099, 4.278238818242151, 4.841118985329142, 4.524260662704076, 4.7429052036473305, 2.3105727786213524, 1.6378956212024585, 0.9534885961913449, 0.0, 11.880897695047656, 10.488374558104791, 8.189478106012292, 6.931718335864056, 9.485810407294661, 6.333964927785706, 4.841118985329142, 3.055884870172965, 4.3350387813000495, 3.63037801847399, 2.0291717623690837, 1.075703456989841, 0.0), # 11
(12.133924344817538, 12.178681137152912, 10.442483411992965, 11.209618526479394, 8.925671340668487, 4.403344611874027, 4.9825714257135685, 4.656282878942054, 4.881568103732217, 2.378077646008951, 1.6858010409522184, 0.9813608331823415, 0.0, 12.22838954605175, 10.794969165005755, 8.429005204761092, 7.134232938026852, 9.763136207464434, 6.518796030518876, 4.9825714257135685, 3.1452461513385908, 4.462835670334243, 3.7365395088264655, 2.0884966823985933, 1.107152830650265, 0.0), # 12
(12.463175603505027, 12.502417414494213, 10.720066215603106, 11.507657446383048, 9.165218936262296, 4.520418463509383, 5.11494404627224, 4.779828375052198, 5.011328611339368, 2.441249578986017, 1.7306312078787365, 1.0074437967574077, 0.0, 12.55357283236943, 11.08188176433148, 8.653156039393682, 7.323748736958049, 10.022657222678736, 6.691759725073078, 5.11494404627224, 3.228870331078131, 4.582609468131148, 3.8358858154610167, 2.1440132431206216, 1.136583401317656, 0.0), # 13
(12.769172876273403, 12.802096949318072, 10.977021205907338, 11.783548008612232, 9.387374631642924, 4.6287916041110035, 5.237480375333263, 4.894191556587227, 5.131445499791063, 2.4997275296883177, 1.7721299708609668, 1.0315884607558323, 0.0, 12.85458982591359, 11.347473068314153, 8.860649854304834, 7.499182589064952, 10.262890999582126, 6.8518681792221185, 5.237480375333263, 3.306279717222145, 4.693687315821462, 3.9278493362040785, 2.195404241181468, 1.1638269953925522, 0.0), # 14
(13.050102664576398, 13.075869832030413, 11.211762366137135, 12.035587406646286, 9.590792709071755, 4.72779526464168, 5.349423941224739, 4.998666829099858, 5.241177542409583, 2.5531504502516222, 1.810041178777865, 1.0536457990169035, 0.0, 13.129582798597134, 11.590103789185937, 9.050205893889325, 7.659451350754866, 10.482355084819165, 6.998133560739801, 5.349423941224739, 3.3769966176011996, 4.795396354535877, 4.0118624688820965, 2.242352473227427, 1.1887154392754924, 0.0), # 15
(13.30415146986772, 13.321886153037171, 11.422703679523998, 12.262072833964503, 9.774127450810177, 4.816760676064193, 5.450018272274784, 5.092548598142811, 5.339783512517201, 2.6011572928116995, 1.8441086805083868, 1.0734667853799098, 0.0, 13.376694022332964, 11.808134639179006, 9.220543402541933, 7.803471878435097, 10.679567025034402, 7.1295680373999355, 5.450018272274784, 3.440543340045852, 4.887063725405088, 4.087357611321502, 2.2845407359047996, 1.2110805593670158, 0.0), # 16
(13.529505793601107, 13.538296002744264, 11.608259129299412, 12.46130148404622, 9.936033139119584, 4.895019069341334, 5.538506896811498, 5.17513126926881, 5.426522183436193, 2.643387009504314, 1.874076324931487, 1.09090239368414, 0.0, 13.594065769033982, 11.999926330525538, 9.370381624657433, 7.9301610285129405, 10.853044366872385, 7.245183776976335, 5.538506896811498, 3.496442192386667, 4.968016569559792, 4.153767161348741, 2.3216518258598824, 1.2307541820676606, 0.0), # 17
(13.724352137230287, 13.723249471557619, 11.766842698694862, 12.631570550370744, 10.07516405626135, 4.961901675435895, 5.6141333431629965, 5.245709248030569, 5.500652328488845, 2.6794785524652385, 1.8996879609261188, 1.1058035977688838, 0.0, 13.779840310613086, 12.163839575457718, 9.498439804630594, 8.038435657395715, 11.00130465697769, 7.343992947242797, 5.6141333431629965, 3.5442154824542103, 5.037582028130675, 4.210523516790249, 2.3533685397389728, 1.2475681337779656, 0.0), # 18
(13.88687700220898, 13.874896649883173, 11.896868370941842, 12.77117722641738, 10.190174484496875, 5.0167397253106545, 5.676141139657377, 5.30357693998081, 5.561432720997431, 2.7090708738302403, 1.9206874373712384, 1.1180213714734282, 0.0, 13.932159918983176, 12.298235086207708, 9.603437186856192, 8.12721262149072, 11.122865441994861, 7.425007715973134, 5.676141139657377, 3.5833855180790386, 5.095087242248438, 4.257059075472461, 2.379373674188369, 1.2613542408984704, 0.0), # 19
(14.015266889990915, 13.991387628126835, 11.996750129271838, 12.87841870566547, 10.279718706087547, 5.058864449928407, 5.723773814622755, 5.348028750672253, 5.608122134284226, 2.731802925735086, 1.936818603145802, 1.1274066886370624, 0.0, 14.049166866057154, 12.401473575007685, 9.68409301572901, 8.195408777205257, 11.216244268568452, 7.487240250941153, 5.723773814622755, 3.6134746070917196, 5.139859353043773, 4.292806235221825, 2.399350025854368, 1.2719443298297126, 0.0), # 20
(14.107708302029813, 14.070872496694552, 12.064901956916339, 12.951592181594311, 10.34245100329475, 5.087607080251938, 5.756274896387231, 5.378359085657614, 5.63997934167151, 2.747313660315545, 1.9478253071287643, 1.133810523099076, 0.0, 14.12900342374791, 12.471915754089835, 9.739126535643821, 8.241940980946634, 11.27995868334302, 7.529702719920659, 5.756274896387231, 3.634005057322813, 5.171225501647375, 4.317197393864771, 2.412980391383268, 1.279170226972232, 0.0), # 21
(14.162387739779412, 14.111501345992236, 12.099737837106835, 12.988994847683228, 10.377025658379871, 5.102298847244033, 5.77288791327892, 5.393862350489618, 5.656263116481561, 2.7552420297073854, 1.9534513981990798, 1.1370838486987573, 0.0, 14.16981186396836, 12.50792233568633, 9.7672569909954, 8.265726089122154, 11.312526232963123, 7.551407290685465, 5.77288791327892, 3.644499176602881, 5.188512829189936, 4.329664949227744, 2.419947567421367, 1.282863758726567, 0.0), # 22
(14.182550708679697, 14.116311945587563, 12.104077046181986, 12.993677353395064, 10.385883252297091, 5.104166666666667, 5.774862801581538, 5.395538065843622, 5.658298909465021, 2.7561772953818022, 1.9541568753377396, 1.1374880506020426, 0.0, 14.175, 12.512368556622466, 9.770784376688697, 8.268531886145405, 11.316597818930042, 7.553753292181072, 5.774862801581538, 3.6458333333333335, 5.192941626148546, 4.331225784465023, 2.4208154092363974, 1.283301085962506, 0.0), # 23
(14.197417378247815, 14.113505864197531, 12.10336728395062, 12.99310104166667, 10.390900439373862, 5.104166666666667, 5.773777668845317, 5.393208333333334, 5.658026111111111, 2.755602716049383, 1.9540790684624023, 1.1373934156378602, 0.0, 14.175, 12.51132757201646, 9.77039534231201, 8.266808148148147, 11.316052222222222, 7.550491666666668, 5.773777668845317, 3.6458333333333335, 5.195450219686931, 4.331033680555557, 2.4206734567901242, 1.2830459876543212, 0.0), # 24
(14.211970122296213, 14.10797467992684, 12.101966163694561, 12.991960841049384, 10.39580728255487, 5.104166666666667, 5.771639231824418, 5.388631687242799, 5.657487139917696, 2.754471593507088, 1.9539247931994848, 1.1372065996037193, 0.0, 14.175, 12.509272595640908, 9.769623965997424, 8.263414780521263, 11.314974279835392, 7.544084362139919, 5.771639231824418, 3.6458333333333335, 5.197903641277435, 4.330653613683129, 2.4203932327389124, 1.2825431527206221, 0.0), # 25
(14.226207826667249, 14.099802892089624, 12.099892889803387, 12.990269714506173, 10.400603610526364, 5.104166666666667, 5.768480702816105, 5.381894547325103, 5.65668890946502, 2.7528027480566992, 1.9536954462318665, 1.136930163084896, 0.0, 14.175, 12.506231793933855, 9.768477231159332, 8.258408244170097, 11.31337781893004, 7.534652366255146, 5.768480702816105, 3.6458333333333335, 5.200301805263182, 4.330089904835392, 2.4199785779606775, 1.2818002629172387, 0.0), # 26
(14.240129377203292, 14.089075, 12.097166666666668, 12.988040625, 10.405289251974601, 5.104166666666667, 5.7643352941176484, 5.3730833333333345, 5.655638333333333, 2.7506150000000003, 1.9533924242424245, 1.1365666666666672, 0.0, 14.175, 12.502233333333336, 9.766962121212122, 8.251845, 11.311276666666666, 7.5223166666666685, 5.7643352941176484, 3.6458333333333335, 5.2026446259873005, 4.329346875000001, 2.4194333333333335, 1.280825, 0.0), # 27
(14.253733659746702, 14.075875502972108, 12.093806698673983, 12.985286535493827, 10.40986403558584, 5.104166666666667, 5.759236218026306, 5.362284465020577, 5.654342325102881, 2.7479271696387753, 1.9530171239140377, 1.1361186709343092, 0.0, 14.175, 12.4973053802774, 9.765085619570188, 8.243781508916324, 11.308684650205763, 7.507198251028808, 5.759236218026306, 3.6458333333333335, 5.20493201779292, 4.32842884516461, 2.418761339734797, 1.2796250457247373, 0.0), # 28
(14.26701956013985, 14.060288900320074, 12.089832190214908, 12.982020408950618, 10.41432779004634, 5.104166666666667, 5.753216686839346, 5.349584362139918, 5.652807798353909, 2.7447580772748066, 1.952570941929584, 1.1355887364730988, 0.0, 14.175, 12.491476101204084, 9.76285470964792, 8.234274231824418, 11.305615596707819, 7.489418106995886, 5.753216686839346, 3.6458333333333335, 5.20716389502317, 4.327340136316874, 2.4179664380429817, 1.2782080818472796, 0.0), # 29
(14.279985964225098, 14.042399691358026, 12.085262345679013, 12.978255208333334, 10.418680344042354, 5.104166666666667, 5.746309912854031, 5.335069444444444, 5.651041666666666, 2.7411265432098775, 1.952055274971942, 1.1349794238683129, 0.0, 14.175, 12.48477366255144, 9.760276374859709, 8.223379629629632, 11.302083333333332, 7.469097222222222, 5.746309912854031, 3.6458333333333335, 5.209340172021177, 4.326085069444446, 2.4170524691358026, 1.276581790123457, 0.0), # 30
(14.292631757844802, 14.022292375400093, 12.080116369455878, 12.97400389660494, 10.422921526260142, 5.104166666666667, 5.7385491083676285, 5.318826131687244, 5.649050843621399, 2.737051387745771, 1.9514715197239891, 1.1342932937052284, 0.0, 14.175, 12.477226230757509, 9.757357598619945, 8.211154163237312, 11.298101687242799, 7.4463565843621415, 5.7385491083676285, 3.6458333333333335, 5.211460763130071, 4.324667965534981, 2.416023273891176, 1.2747538523090995, 0.0), # 31
(14.304955826841338, 14.000051451760402, 12.07441346593507, 12.969279436728398, 10.427051165385956, 5.104166666666667, 5.7299674856774, 5.3009408436214, 5.646842242798354, 2.7325514311842714, 1.950821072868604, 1.1335329065691209, 0.0, 14.175, 12.468861972260328, 9.754105364343019, 8.197654293552812, 11.293684485596708, 7.421317181069961, 5.7299674856774, 3.6458333333333335, 5.213525582692978, 4.3230931455761334, 2.4148826931870144, 1.272731950160037, 0.0), # 32
(14.316957057057056, 13.975761419753086, 12.068172839506175, 12.964094791666666, 10.431069090106059, 5.104166666666667, 5.720598257080611, 5.2815, 5.644422777777778, 2.7276454938271613, 1.9501053310886647, 1.1327008230452675, 0.0, 14.175, 12.459709053497942, 9.750526655443322, 8.182936481481482, 11.288845555555556, 7.394100000000001, 5.720598257080611, 3.6458333333333335, 5.215534545053029, 4.321364930555556, 2.413634567901235, 1.2705237654320989, 0.0), # 33
(14.328634334334335, 13.949506778692271, 12.061413694558757, 12.958462924382715, 10.434975129106702, 5.104166666666667, 5.710474634874527, 5.260590020576132, 5.641799362139919, 2.7223523959762237, 1.9493256910670491, 1.1317996037189455, 0.0, 14.175, 12.449795640908398, 9.746628455335244, 8.16705718792867, 11.283598724279837, 7.3648260288065845, 5.710474634874527, 3.6458333333333335, 5.217487564553351, 4.319487641460906, 2.4122827389117516, 1.2681369798811157, 0.0), # 34
(14.339986544515531, 13.92137202789209, 12.054155235482398, 12.952396797839505, 10.438769111074146, 5.104166666666667, 5.699629831356412, 5.238297325102881, 5.638978909465021, 2.7166909579332423, 1.9484835494866362, 1.1308318091754308, 0.0, 14.175, 12.439149900929737, 9.74241774743318, 8.150072873799726, 11.277957818930043, 7.333616255144034, 5.699629831356412, 3.6458333333333335, 5.219384555537073, 4.317465599279836, 2.41083104709648, 1.2655792752629174, 0.0), # 35
(14.35101257344301, 13.891441666666665, 12.04641666666667, 12.945909375, 10.442450864694647, 5.104166666666667, 5.68809705882353, 5.214708333333334, 5.635968333333333, 2.7106800000000004, 1.9475803030303034, 1.1298000000000004, 0.0, 14.175, 12.427800000000001, 9.737901515151515, 8.13204, 11.271936666666665, 7.300591666666668, 5.68809705882353, 3.6458333333333335, 5.221225432347324, 4.315303125000001, 2.409283333333334, 1.2628583333333334, 0.0), # 36
(14.361711306959135, 13.859800194330132, 12.038217192501145, 12.939013618827161, 10.44602021865446, 5.104166666666667, 5.675909529573146, 5.189909465020577, 5.632774547325103, 2.7043383424782816, 1.9466173483809293, 1.1287067367779304, 0.0, 14.175, 12.415774104557233, 9.733086741904645, 8.113015027434844, 11.265549094650206, 7.265873251028808, 5.675909529573146, 3.6458333333333335, 5.22301010932723, 4.313004539609055, 2.407643438500229, 1.259981835848194, 0.0), # 37
(14.372081630906267, 13.826532110196618, 12.029576017375401, 12.931722492283953, 10.449477001639845, 5.104166666666667, 5.663100455902526, 5.1639871399176975, 5.629404465020576, 2.6976848056698683, 1.9455960822213911, 1.1275545800944982, 0.0, 14.175, 12.403100381039478, 9.727980411106955, 8.093054417009604, 11.258808930041152, 7.229581995884776, 5.663100455902526, 3.6458333333333335, 5.224738500819923, 4.3105741640946516, 2.40591520347508, 1.2569574645633292, 0.0), # 38
(14.382122431126781, 13.791721913580247, 12.020512345679016, 12.924048958333334, 10.452821042337057, 5.104166666666667, 5.649703050108934, 5.137027777777778, 5.625865000000001, 2.690738209876544, 1.9445179012345684, 1.1263460905349796, 0.0, 14.175, 12.389806995884772, 9.722589506172842, 8.07221462962963, 11.251730000000002, 7.191838888888889, 5.649703050108934, 3.6458333333333335, 5.226410521168528, 4.308016319444445, 2.4041024691358035, 1.253792901234568, 0.0), # 39
(14.39183259346303, 13.755454103795152, 12.011045381801555, 12.916005979938273, 10.45605216943235, 5.104166666666667, 5.635750524489632, 5.1091177983539104, 5.622163065843623, 2.6835173754000925, 1.943384202103338, 1.125083828684652, 0.0, 14.175, 12.375922115531171, 9.71692101051669, 8.050552126200277, 11.244326131687245, 7.1527649176954755, 5.635750524489632, 3.6458333333333335, 5.228026084716175, 4.305335326646092, 2.4022090763603114, 1.2504958276177414, 0.0), # 40
(14.40121100375738, 13.717813180155463, 12.001194330132604, 12.90760652006173, 10.459170211611989, 5.104166666666667, 5.621276091341887, 5.080343621399178, 5.618305576131687, 2.676041122542296, 1.9421963815105796, 1.1237703551287916, 0.0, 14.175, 12.361473906416705, 9.710981907552897, 8.028123367626886, 11.236611152263373, 7.112481069958849, 5.621276091341887, 3.6458333333333335, 5.229585105805994, 4.302535506687244, 2.400238866026521, 1.2470739254686787, 0.0), # 41
(14.410256547852201, 13.678883641975311, 11.990978395061731, 12.89886354166667, 10.462174997562222, 5.104166666666667, 5.6063129629629636, 5.050791666666668, 5.614299444444446, 2.668328271604939, 1.9409558361391697, 1.122408230452675, 0.0, 14.175, 12.346490534979424, 9.704779180695848, 8.004984814814815, 11.228598888888891, 7.071108333333335, 5.6063129629629636, 3.6458333333333335, 5.231087498781111, 4.299621180555557, 2.3981956790123466, 1.2435348765432102, 0.0), # 42
(14.418968111589852, 13.638749988568819, 11.980416780978512, 12.889790007716051, 10.46506635596931, 5.104166666666667, 5.5908943516501255, 5.020548353909466, 5.61015158436214, 2.660397642889804, 1.9396639626719878, 1.1210000152415793, 0.0, 14.175, 12.331000167657372, 9.698319813359937, 7.981192928669412, 11.22030316872428, 7.0287676954732525, 5.5908943516501255, 3.6458333333333335, 5.232533177984655, 4.296596669238685, 2.3960833561957027, 1.2398863625971654, 0.0), # 43
(14.427344580812699, 13.597496719250115, 11.969528692272522, 12.880398881172843, 10.467844115519508, 5.104166666666667, 5.575053469700638, 4.98970010288066, 5.605868909465021, 2.652268056698675, 1.938322157791911, 1.1195482700807806, 0.0, 14.175, 12.315030970888586, 9.691610788959554, 7.9568041700960235, 11.211737818930041, 6.985580144032924, 5.575053469700638, 3.6458333333333335, 5.233922057759754, 4.293466293724282, 2.3939057384545044, 1.2361360653863744, 0.0), # 44
(14.435384841363105, 13.555208333333335, 11.958333333333336, 12.870703125000002, 10.470508104899077, 5.104166666666667, 5.558823529411765, 4.958333333333334, 5.601458333333333, 2.6439583333333343, 1.9369318181818187, 1.1180555555555556, 0.0, 14.175, 12.29861111111111, 9.684659090909092, 7.931875000000002, 11.202916666666667, 6.941666666666667, 5.558823529411765, 3.6458333333333335, 5.235254052449538, 4.290234375000002, 2.391666666666667, 1.232291666666667, 0.0), # 45
(14.443087779083434, 13.511969330132603, 11.946849908550526, 12.860715702160494, 10.47305815279427, 5.104166666666667, 5.542237743080772, 4.926534465020577, 5.596926769547324, 2.635487293095565, 1.9354943405245877, 1.1165244322511814, 0.0, 14.175, 12.281768754762993, 9.677471702622938, 7.906461879286693, 11.193853539094649, 6.897148251028808, 5.542237743080772, 3.6458333333333335, 5.236529076397135, 4.286905234053499, 2.3893699817101055, 1.228360848193873, 0.0), # 46
(14.45045227981605, 13.46786420896205, 11.935097622313673, 12.850449575617287, 10.475494087891343, 5.104166666666667, 5.525329323004923, 4.894389917695474, 5.592281131687244, 2.6268737562871523, 1.9340111215030973, 1.1149574607529342, 0.0, 14.175, 12.264532068282275, 9.670055607515485, 7.880621268861455, 11.184562263374488, 6.852145884773663, 5.525329323004923, 3.6458333333333335, 5.237747043945672, 4.283483191872429, 2.387019524462735, 1.2243512917238228, 0.0), # 47
(14.457477229403315, 13.422977469135803, 11.923095679012349, 12.839917708333335, 10.477815738876558, 5.104166666666667, 5.508131481481482, 4.861986111111112, 5.587528333333333, 2.618136543209877, 1.9324835578002246, 1.1133572016460909, 0.0, 14.175, 12.246929218106997, 9.662417789001124, 7.854409629629629, 11.175056666666666, 6.806780555555557, 5.508131481481482, 3.6458333333333335, 5.238907869438279, 4.279972569444446, 2.38461913580247, 1.2202706790123459, 0.0), # 48
(14.464161513687602, 13.377393609967992, 11.910863283036125, 12.829133063271607, 10.480022934436168, 5.104166666666667, 5.490677430807714, 4.829409465020577, 5.582675288065844, 2.6092944741655244, 1.930913046098849, 1.1117262155159278, 0.0, 14.175, 12.228988370675204, 9.654565230494246, 7.827883422496572, 11.165350576131688, 6.761173251028807, 5.490677430807714, 3.6458333333333335, 5.240011467218084, 4.276377687757203, 2.382172656607225, 1.2161266918152722, 0.0), # 49
(14.470504018511264, 13.33119713077275, 11.89841963877458, 12.81810860339506, 10.482115503256427, 5.104166666666667, 5.473000383280885, 4.796746399176955, 5.57772890946502, 2.6003663694558763, 1.9293009830818477, 1.1100670629477218, 0.0, 14.175, 12.210737692424937, 9.646504915409238, 7.8010991083676275, 11.15545781893004, 6.715444958847738, 5.473000383280885, 3.6458333333333335, 5.2410577516282135, 4.272702867798355, 2.379683927754916, 1.211927011888432, 0.0), # 50
(14.476503629716676, 13.284472530864198, 11.885783950617286, 12.806857291666669, 10.484093274023598, 5.104166666666667, 5.455133551198258, 4.764083333333335, 5.572696111111112, 2.5913710493827167, 1.9276487654320995, 1.1083823045267494, 0.0, 14.175, 12.192205349794241, 9.638243827160496, 7.774113148148149, 11.145392222222224, 6.669716666666668, 5.455133551198258, 3.6458333333333335, 5.242046637011799, 4.268952430555557, 2.377156790123457, 1.2076793209876546, 0.0), # 51
(14.482159233146191, 13.237304309556471, 11.87297542295382, 12.795392091049385, 10.485956075423934, 5.104166666666667, 5.437110146857097, 4.731506687242798, 5.567583806584363, 2.582327334247829, 1.9259577898324816, 1.1066745008382872, 0.0, 14.175, 12.173419509221157, 9.629788949162407, 7.746982002743485, 11.135167613168726, 6.624109362139918, 5.437110146857097, 3.6458333333333335, 5.242978037711967, 4.265130697016462, 2.3745950845907644, 1.2033913008687704, 0.0), # 52
(14.487469714642183, 13.189776966163697, 11.860013260173757, 12.783725964506175, 10.487703736143693, 5.104166666666667, 5.418963382554669, 4.699102880658437, 5.5623989094650215, 2.573254044352996, 1.9242294529658732, 1.104946212467612, 0.0, 14.175, 12.15440833714373, 9.621147264829364, 7.719762133058986, 11.124797818930043, 6.578744032921811, 5.418963382554669, 3.6458333333333335, 5.243851868071847, 4.261241988168726, 2.3720026520347517, 1.199070633287609, 0.0), # 53
(14.492433960047004, 13.141975000000002, 11.846916666666667, 12.771871875000002, 10.489336084869135, 5.104166666666667, 5.400726470588236, 4.6669583333333335, 5.557148333333334, 2.5641700000000007, 1.9224651515151516, 1.1032000000000002, 0.0, 14.175, 12.1352, 9.612325757575757, 7.69251, 11.114296666666668, 6.533741666666667, 5.400726470588236, 3.6458333333333335, 5.244668042434568, 4.257290625000001, 2.369383333333334, 1.1947250000000003, 0.0), # 54
(14.497050855203032, 13.093982910379516, 11.833704846822133, 12.759842785493827, 10.490852950286511, 5.104166666666667, 5.382432623255064, 4.6351594650205765, 5.551838991769547, 2.555094021490627, 1.9206662821631961, 1.101438424020729, 0.0, 14.175, 12.115822664228014, 9.603331410815981, 7.66528206447188, 11.103677983539095, 6.4892232510288075, 5.382432623255064, 3.6458333333333335, 5.2454264751432556, 4.253280928497944, 2.3667409693644266, 1.1903620827617745, 0.0), # 55
(14.501319285952622, 13.045885196616371, 11.820397005029724, 12.74765165895062, 10.492254161082082, 5.104166666666667, 5.3641150528524175, 4.603792695473252, 5.5464777983539095, 2.5460449291266585, 1.918834241592884, 1.099664045115074, 0.0, 14.175, 12.096304496265812, 9.59417120796442, 7.638134787379974, 11.092955596707819, 6.445309773662553, 5.3641150528524175, 3.6458333333333335, 5.246127080541041, 4.249217219650207, 2.3640794010059447, 1.1859895633287612, 0.0), # 56
(14.505238138138138, 12.997766358024693, 11.807012345679016, 12.735311458333335, 10.493539545942102, 5.104166666666667, 5.34580697167756, 4.572944444444445, 5.541071666666667, 2.5370415432098774, 1.9169704264870937, 1.097879423868313, 0.0, 14.175, 12.076673662551439, 9.584852132435467, 7.61112462962963, 11.082143333333335, 6.402122222222224, 5.34580697167756, 3.6458333333333335, 5.246769772971051, 4.245103819444446, 2.3614024691358035, 1.1816151234567904, 0.0), # 57
(14.508806297601952, 12.949710893918612, 11.79357007315958, 12.72283514660494, 10.494708933552829, 5.104166666666667, 5.3275415920277585, 4.5427011316872425, 5.535627510288066, 2.5281026840420675, 1.9150762335287033, 1.096087120865722, 0.0, 14.175, 12.05695832952294, 9.575381167643515, 7.584308052126201, 11.071255020576132, 6.35978158436214, 5.3275415920277585, 3.6458333333333335, 5.2473544667764145, 4.240945048868314, 2.3587140146319165, 1.1772464449016922, 0.0), # 58
(14.51202265018642, 12.901803303612255, 11.780089391860999, 12.710235686728396, 10.495762152600523, 5.104166666666667, 5.309352126200275, 4.513149176954733, 5.530152242798355, 2.5192471719250125, 1.9131530594005905, 1.0942896966925775, 0.0, 14.175, 12.037186663618352, 9.565765297002951, 7.557741515775036, 11.06030448559671, 6.3184088477366265, 5.309352126200275, 3.6458333333333335, 5.247881076300262, 4.2367452289094665, 2.3560178783722, 1.172891209419296, 0.0), # 59
(14.51488608173391, 12.854128086419754, 11.76658950617284, 12.697526041666668, 10.496699031771435, 5.104166666666667, 5.291271786492374, 4.484375000000001, 5.524652777777779, 2.5104938271604946, 1.9112023007856345, 1.0924897119341568, 0.0, 14.175, 12.017386831275722, 9.556011503928172, 7.5314814814814826, 11.049305555555557, 6.278125000000001, 5.291271786492374, 3.6458333333333335, 5.248349515885717, 4.232508680555557, 2.353317901234568, 1.1685570987654323, 0.0), # 60
(14.517395478086781, 12.806769741655238, 11.753089620484685, 12.684719174382717, 10.497519399751823, 5.104166666666667, 5.273333785201324, 4.4564650205761325, 5.519136028806585, 2.501861470050298, 1.9092253543667126, 1.0906897271757356, 0.0, 14.175, 11.997586998933091, 9.546126771833563, 7.5055844101508935, 11.03827205761317, 6.2390510288065855, 5.273333785201324, 3.6458333333333335, 5.248759699875912, 4.22823972479424, 2.350617924096937, 1.1642517946959308, 0.0), # 61
(14.519549725087407, 12.759812768632832, 11.739608939186102, 12.671828047839508, 10.498223085227952, 5.104166666666667, 5.255571334624385, 4.429505658436215, 5.513608909465021, 2.4933689208962058, 1.9072236168267036, 1.0888923030025914, 0.0, 14.175, 11.977815333028504, 9.536118084133516, 7.4801067626886155, 11.027217818930042, 6.201307921810701, 5.255571334624385, 3.6458333333333335, 5.249111542613976, 4.2239426826131705, 2.3479217878372207, 1.1599829789666212, 0.0), # 62
(14.521347708578144, 12.713341666666667, 11.72616666666667, 12.658865625, 10.498809916886067, 5.104166666666667, 5.238017647058824, 4.4035833333333345, 5.508078333333334, 2.4850350000000003, 1.9051984848484853, 1.0871000000000002, 0.0, 14.175, 11.9581, 9.525992424242425, 7.455105, 11.016156666666667, 6.165016666666668, 5.238017647058824, 3.6458333333333335, 5.249404958443034, 4.219621875000001, 2.345233333333334, 1.1557583333333337, 0.0), # 63
(14.522788314401359, 12.667440935070873, 11.712782007315958, 12.645844868827162, 10.499279723412432, 5.104166666666667, 5.220705934801905, 4.378784465020577, 5.50255121399177, 2.4768785276634664, 1.9031513551149353, 1.0853153787532392, 0.0, 14.175, 11.938469166285628, 9.515756775574676, 7.430635582990398, 11.00510242798354, 6.130298251028808, 5.220705934801905, 3.6458333333333335, 5.249639861706216, 4.215281622942388, 2.342556401463192, 1.151585539551898, 0.0), # 64
(14.523870428399414, 12.62219507315958, 11.69947416552355, 12.63277874228395, 10.499632333493302, 5.104166666666667, 5.2036694101508925, 4.35519547325103, 5.497034465020577, 2.4689183241883863, 1.9010836243089335, 1.0835409998475842, 0.0, 14.175, 11.918950998323425, 9.505418121544666, 7.406754972565158, 10.994068930041154, 6.097273662551442, 5.2036694101508925, 3.6458333333333335, 5.249816166746651, 4.2109262474279845, 2.3398948331047102, 1.1474722793781438, 0.0), # 65
(14.524592936414676, 12.577688580246916, 11.686262345679015, 12.619680208333333, 10.499867575814935, 5.104166666666667, 5.1869412854030505, 4.332902777777779, 5.491535000000001, 2.4611732098765438, 1.898996689113356, 1.0817794238683132, 0.0, 14.175, 11.899573662551441, 9.49498344556678, 7.38351962962963, 10.983070000000001, 6.06606388888889, 5.1869412854030505, 3.6458333333333335, 5.249933787907468, 4.206560069444445, 2.337252469135803, 1.1434262345679016, 0.0), # 66
(14.524954724289511, 12.534005955647004, 11.673165752171926, 12.606562229938273, 10.499985279063587, 5.104166666666667, 5.1705547728556445, 4.311992798353911, 5.486059732510288, 2.453662005029722, 1.8968919462110825, 1.0800332114007012, 0.0, 14.175, 11.88036532540771, 9.484459731055413, 7.360986015089164, 10.972119465020576, 6.036789917695475, 5.1705547728556445, 3.6458333333333335, 5.2499926395317935, 4.202187409979425, 2.3346331504343856, 1.1394550868770006, 0.0), # 67
(14.524708260273156, 12.491002420461081, 11.660140274919984, 12.593323827495976, 10.499886091610856, 5.104071942793273, 5.154460636380753, 4.292367245846671, 5.480574329370524, 2.446367154576509, 1.894733397326088, 1.078295169221637, 0.0, 14.174825210048013, 11.861246861438005, 9.47366698663044, 7.339101463729525, 10.961148658741047, 6.009314144185339, 5.154460636380753, 3.6457656734237665, 5.249943045805428, 4.197774609165326, 2.3320280549839967, 1.135545674587371, 0.0), # 68
(14.522398389694043, 12.44736508363202, 11.646819830246914, 12.579297690217391, 10.498983297022512, 5.1033231138545965, 5.13818772694263, 4.272974279835392, 5.474838991769548, 2.439082236746551, 1.8923013290802768, 1.0765088802252547, 0.0, 14.17344039351852, 11.8415976824778, 9.461506645401384, 7.317246710239651, 10.949677983539097, 5.982163991769549, 5.13818772694263, 3.6452307956104257, 5.249491648511256, 4.193099230072464, 2.329363966049383, 1.1315786439665476, 0.0), # 69
(14.517840102582454, 12.402893656798973, 11.633146504915409, 12.564391480475042, 10.49719935985368, 5.101848358989992, 5.121662094192959, 4.253638926992837, 5.468821349641823, 2.4317718335619576, 1.8895680735227522, 1.0746659888174948, 0.0, 14.170705268347055, 11.82132587699244, 9.447840367613761, 7.295315500685872, 10.937642699283646, 5.955094497789972, 5.121662094192959, 3.6441773992785653, 5.24859967992684, 4.188130493491681, 2.326629300983082, 1.127535786981725, 0.0), # 70
(14.511097524900102, 12.357614716359132, 11.619125100022863, 12.548627178945251, 10.49455687350386, 5.0996715769953775, 5.104891161677292, 4.234367588782199, 5.462530365035819, 2.4244361257699243, 1.8865437198495683, 1.072767842674817, 0.0, 14.166655842764062, 11.800446269422984, 9.43271859924784, 7.273308377309771, 10.925060730071637, 5.928114624295079, 5.104891161677292, 3.642622554996698, 5.24727843675193, 4.182875726315085, 2.323825020004573, 1.1234195196690122, 0.0), # 71
(14.502234782608697, 12.311554838709677, 11.604760416666666, 12.532026766304348, 10.49107843137255, 5.096816666666667, 5.087882352941177, 4.215166666666667, 5.4559750000000005, 2.4170752941176477, 1.8832383572567788, 1.0708157894736845, 0.0, 14.161328125, 11.778973684210527, 9.416191786283894, 7.251225882352942, 10.911950000000001, 5.901233333333334, 5.087882352941177, 3.6405833333333337, 5.245539215686275, 4.177342255434784, 2.3209520833333337, 1.1192322580645162, 0.0), # 72
(14.491316001669949, 12.264740600247798, 11.590057255944217, 12.514612223228664, 10.486786626859248, 5.0933075267997765, 5.070643091530164, 4.196042562109436, 5.4491642165828384, 2.409689519352323, 1.8796620749404376, 1.0688111768905575, 0.0, 14.154758123285324, 11.75692294579613, 9.398310374702186, 7.229068558056968, 10.898328433165677, 5.8744595869532095, 5.070643091530164, 3.638076804856983, 5.243393313429624, 4.171537407742889, 2.3180114511888434, 1.1149764182043456, 0.0), # 73
(14.478405308045566, 12.21719857737068, 11.575020418952905, 12.496405530394526, 10.481704053363458, 5.089168056190623, 5.053180800989806, 4.177001676573693, 5.4421069768328, 2.402278982221147, 1.8758249620965999, 1.0667553526018982, 0.0, 14.146981845850483, 11.734308878620878, 9.379124810482999, 7.20683694666344, 10.8842139536656, 5.84780234720317, 5.053180800989806, 3.635120040136159, 5.240852026681729, 4.165468510131509, 2.315004083790581, 1.1106544161246077, 0.0), # 74
(14.463566827697262, 12.168955346475506, 11.559654706790123, 12.477428668478263, 10.475853304284678, 5.084422153635118, 5.03550290486565, 4.158050411522635, 5.434812242798353, 2.394843863471315, 1.8717371079213185, 1.0646496642841674, 0.0, 14.138035300925928, 11.711146307125839, 9.358685539606592, 7.184531590413944, 10.869624485596706, 5.821270576131688, 5.03550290486565, 3.63173010973937, 5.237926652142339, 4.159142889492755, 2.311930941358025, 1.10626866786141, 0.0), # 75
(14.44686468658675, 12.12003748395947, 11.543964920553272, 12.457703618156202, 10.469256973022405, 5.079093717929179, 5.017616826703247, 4.139195168419449, 5.427288976527969, 2.3873843438500235, 1.8674086016106486, 1.0624954596138265, 0.0, 14.127954496742113, 11.68745005575209, 9.337043008053241, 7.162153031550069, 10.854577953055937, 5.794873235787229, 5.017616826703247, 3.6279240842351275, 5.234628486511203, 4.152567872718735, 2.3087929841106543, 1.101821589450861, 0.0), # 76
(14.428363010675731, 12.070471566219748, 11.527955861339734, 12.43725236010467, 10.461937652976141, 5.07320664786872, 4.9995299900481465, 4.120442348727329, 5.4195461400701115, 2.3799006041044684, 1.8628495323606438, 1.0602940862673376, 0.0, 14.116775441529496, 11.663234948940712, 9.314247661803218, 7.139701812313404, 10.839092280140223, 5.768619288218261, 4.9995299900481465, 3.623719034191943, 5.230968826488071, 4.145750786701558, 2.305591172267947, 1.0973155969290682, 0.0), # 77
(14.408125925925928, 12.020284169653527, 11.511632330246915, 12.416096875000001, 10.45391793754539, 5.066784842249657, 4.981249818445898, 4.101798353909466, 5.41159269547325, 2.372392824981845, 1.8580699893673582, 1.0580468919211612, 0.0, 14.10453414351852, 11.638515811132772, 9.29034994683679, 7.1171784749455345, 10.8231853909465, 5.742517695473253, 4.981249818445898, 3.6191320301783265, 5.226958968772695, 4.138698958333334, 2.3023264660493834, 1.092753106332139, 0.0), # 78
(14.386217558299041, 11.969501870657995, 11.494999128372202, 12.394259143518521, 10.445220420129644, 5.0598521998679065, 4.962783735442051, 4.0832695854290515, 5.403437604785855, 2.3648611872293506, 1.8530800618268455, 1.0557552242517592, 0.0, 14.091266610939643, 11.613307466769347, 9.265400309134227, 7.094583561688051, 10.80687520957171, 5.716577419600672, 4.962783735442051, 3.61418014276279, 5.222610210064822, 4.131419714506174, 2.2989998256744406, 1.0881365336961817, 0.0), # 79
(14.362702033756786, 11.918151245630337, 11.478061056812987, 12.371761146336556, 10.435867694128408, 5.052432619519382, 4.9441391645821575, 4.064862444749277, 5.395089830056394, 2.35730587159418, 1.847889838935161, 1.0534204309355928, 0.0, 14.07700885202332, 11.587624740291517, 9.239449194675805, 7.071917614782539, 10.790179660112788, 5.690807422648988, 4.9441391645821575, 3.6088804425138443, 5.217933847064204, 4.123920382112186, 2.2956122113625974, 1.0834682950573036, 0.0), # 80
(14.337643478260873, 11.866258870967743, 11.460822916666668, 12.348624864130437, 10.425882352941176, 5.04455, 4.925323529411765, 4.046583333333334, 5.386558333333333, 2.34972705882353, 1.8425094098883579, 1.0510438596491232, 0.0, 14.061796875, 11.561482456140352, 9.212547049441788, 7.049181176470589, 10.773116666666667, 5.665216666666669, 4.925323529411765, 3.60325, 5.212941176470588, 4.11620828804348, 2.2921645833333337, 1.0787508064516131, 0.0), # 81
(14.311106017773009, 11.813851323067393, 11.443289509030638, 12.32487227757649, 10.415286989967456, 5.036228240105676, 4.906344253476426, 4.0284386526444145, 5.3778520766651425, 2.342124929664596, 1.83694886388249, 1.048626858068812, 0.0, 14.045666688100141, 11.53489543875693, 9.18474431941245, 7.026374788993786, 10.755704153330285, 5.63981411370218, 4.906344253476426, 3.5973058857897686, 5.207643494983728, 4.1082907591921645, 2.2886579018061277, 1.0739864839152178, 0.0), # 82
(14.283153778254908, 11.760955178326475, 11.425465635002288, 12.300525367351046, 10.40410419860674, 5.027491238632323, 4.887208760321688, 4.01043480414571, 5.368980022100289, 2.3344996648645746, 1.8312182901136123, 1.0461707738711208, 0.0, 14.028654299554185, 11.507878512582325, 9.156091450568061, 7.0034989945937225, 10.737960044200578, 5.614608725803994, 4.887208760321688, 3.5910651704516594, 5.20205209930337, 4.1001751224503495, 2.2850931270004575, 1.0691777434842251, 0.0), # 83
(14.253850885668278, 11.707597013142175, 11.407356095679013, 12.275606114130436, 10.392356572258533, 5.0183628943758585, 4.867924473493101, 3.9925781893004118, 5.359951131687243, 2.3268514451706617, 1.825327777777778, 1.0436769547325107, 0.0, 14.010795717592593, 11.480446502057614, 9.12663888888889, 6.980554335511984, 10.719902263374486, 5.589609465020577, 4.867924473493101, 3.5845449245541845, 5.196178286129267, 4.091868704710146, 2.281471219135803, 1.0643270011947434, 0.0), # 84
(14.223261465974833, 11.653803403911677, 11.388965692158209, 12.250136498590983, 10.380066704322333, 5.008867106132196, 4.8484988165362175, 3.974875209571713, 5.35077436747447, 2.3191804513300527, 1.8192874160710422, 1.041146748329443, 0.0, 13.992126950445819, 11.452614231623869, 9.09643708035521, 6.957541353990157, 10.70154873494894, 5.564825293400398, 4.8484988165362175, 3.577762218665854, 5.190033352161167, 4.083378832863662, 2.2777931384316417, 1.05943667308288, 0.0), # 85
(14.191449645136279, 11.59960092703217, 11.370299225537268, 12.224138501409021, 10.367257188197637, 4.999027772697253, 4.828939212996585, 3.9573322664228017, 5.341458691510441, 2.311486864089944, 1.8131072941894584, 1.0385815023383795, 0.0, 13.97268400634431, 11.424396525722173, 9.065536470947292, 6.934460592269831, 10.682917383020882, 5.540265172991923, 4.828939212996585, 3.57073412335518, 5.183628594098819, 4.074712833803008, 2.274059845107454, 1.0545091751847429, 0.0), # 86
(14.15847954911433, 11.545016158900838, 11.35136149691358, 12.19763410326087, 10.353950617283953, 4.988868792866941, 4.809253086419753, 3.939955761316873, 5.332013065843622, 2.3037708641975314, 1.8067975013290805, 1.035982564435781, 0.0, 13.95250289351852, 11.39580820879359, 9.033987506645403, 6.9113125925925925, 10.664026131687244, 5.515938065843622, 4.809253086419753, 3.563477709190672, 5.1769753086419765, 4.065878034420291, 2.2702722993827162, 1.0495469235364399, 0.0), # 87
(14.124415303870702, 11.490075675914863, 11.332157307384547, 12.170645284822868, 10.340169584980769, 4.97841406543718, 4.789447860351274, 3.9227520957171165, 5.322446452522482, 2.296032632400011, 1.8003681266859632, 1.0333512822981095, 0.0, 13.931619620198905, 11.366864105279202, 9.001840633429817, 6.888097897200032, 10.644892905044964, 5.491852934003963, 4.789447860351274, 3.556010046740843, 5.1700847924903846, 4.056881761607624, 2.2664314614769094, 1.0445523341740786, 0.0), # 88
(14.089321035367092, 11.434806054471437, 11.312691458047555, 12.143194026771337, 10.325936684687594, 4.967687489203883, 4.769530958336696, 3.905727671086725, 5.312767813595489, 2.2882723494445796, 1.7938292594561607, 1.030689003601826, 0.0, 13.910070194615912, 11.337579039620083, 8.969146297280803, 6.864817048333737, 10.625535627190978, 5.4680187395214155, 4.769530958336696, 3.548348206574202, 5.162968342343797, 4.047731342257113, 2.2625382916095114, 1.0395278231337672, 0.0), # 89
(14.053260869565218, 11.379233870967743, 11.292968750000002, 12.115302309782612, 10.311274509803923, 4.956712962962964, 4.749509803921569, 3.8888888888888893, 5.302986111111112, 2.280490196078432, 1.787190988835726, 1.027997076023392, 0.0, 13.887890625, 11.30796783625731, 8.93595494417863, 6.841470588235294, 10.605972222222224, 5.4444444444444455, 4.749509803921569, 3.54050925925926, 5.155637254901961, 4.0384341032608715, 2.2585937500000006, 1.0344758064516133, 0.0), # 90
(14.016298932426789, 11.323385701800964, 11.272993984339278, 12.086992114533015, 10.296205653729254, 4.945514385510339, 4.729391820651443, 3.8722421505868017, 5.293110307117818, 2.2726863530487647, 1.7804634040207143, 1.025276847239269, 0.0, 13.865116919581618, 11.278045319631957, 8.902317020103572, 6.818059059146293, 10.586220614235636, 5.4211390108215225, 4.729391820651443, 3.5325102753645283, 5.148102826864627, 4.0289973715110055, 2.254598796867856, 1.0293987001637241, 0.0), # 91
(13.978499349913523, 11.267288123368292, 11.252771962162782, 12.058285421698875, 10.280752709863094, 4.934115655641925, 4.709184432071869, 3.8557938576436523, 5.2831493636640765, 2.2648610011027737, 1.7736565942071794, 1.0225296649259181, 0.0, 13.841785086591221, 11.247826314185097, 8.868282971035896, 6.79458300330832, 10.566298727328153, 5.398111400701113, 4.709184432071869, 3.524368325458518, 5.140376354931547, 4.019428473899626, 2.2505543924325564, 1.0242989203062085, 0.0), # 92
(13.939926247987117, 11.210967712066907, 11.232307484567903, 12.029204211956525, 10.264938271604938, 4.9225406721536356, 4.688895061728395, 3.839550411522634, 5.273112242798354, 2.2570143209876545, 1.7667806485911755, 1.019756876759801, 0.0, 13.81793113425926, 11.217325644357809, 8.833903242955877, 6.771042962962962, 10.546224485596708, 5.375370576131688, 4.688895061728395, 3.5161004801097393, 5.132469135802469, 4.009734737318842, 2.246461496913581, 1.0191788829151736, 0.0), # 93
(13.900643752609293, 11.154451044293994, 11.211605352652038, 11.999770465982289, 10.248784932354287, 4.910813333841387, 4.6685311331665735, 3.8235182136869392, 5.263007906569121, 2.2491464934506045, 1.7598456563687561, 1.016959830417379, 0.0, 13.793591070816188, 11.186558134591166, 8.79922828184378, 6.747439480351812, 10.526015813138242, 5.3529254991617155, 4.6685311331665735, 3.5077238098867047, 5.124392466177143, 3.9999234886607637, 2.2423210705304077, 1.014041004026727, 0.0), # 94
(13.860715989741754, 11.097764696446747, 11.190670367512576, 11.970006164452498, 10.232315285510639, 4.898957539501094, 4.648100069931951, 3.807703665599757, 5.252845317024844, 2.241257699238818, 1.752861706735976, 1.014139873575113, 0.0, 13.768800904492457, 11.155538609326241, 8.764308533679879, 6.723773097716453, 10.505690634049689, 5.33078513183966, 4.648100069931951, 3.499255385357924, 5.1161576427553195, 3.9900020548175, 2.2381340735025153, 1.0088876996769771, 0.0), # 95
(13.820207085346219, 11.040935244922345, 11.169507330246915, 11.93993328804348, 10.215551924473493, 4.88699718792867, 4.62760929557008, 3.7921131687242804, 5.242633436213992, 2.2333481190994924, 1.7458388888888892, 1.0112983539094653, 0.0, 13.74359664351852, 11.124281893004117, 8.729194444444445, 6.700044357298475, 10.485266872427983, 5.3089584362139925, 4.62760929557008, 3.490712277091907, 5.1077759622367465, 3.9799777626811608, 2.2339014660493834, 1.0037213859020315, 0.0), # 96
(13.779181165384388, 10.983989266117973, 11.148121041952448, 11.909573817431562, 10.198517442642354, 4.8749561779200326, 4.60706623362651, 3.7767531245237014, 5.2323812261850335, 2.2254179337798226, 1.7387872920235496, 1.0084366190968967, 0.0, 13.718014296124831, 11.09280281006586, 8.693936460117747, 6.676253801339467, 10.464762452370067, 5.287454374333182, 4.60706623362651, 3.482111555657166, 5.099258721321177, 3.969857939143855, 2.2296242083904896, 0.9985444787379977, 0.0), # 97
(13.737702355817978, 10.926953336430817, 11.126516303726566, 11.878949733293078, 10.181234433416716, 4.862858408271099, 4.58647830764679, 3.7616299344612103, 5.222097648986434, 2.2174673240270053, 1.7317170053360116, 1.0055560168138682, 0.0, 13.69208987054184, 11.06111618495255, 8.658585026680058, 6.652401972081014, 10.444195297972868, 5.266281908245695, 4.58647830764679, 3.4734702916222133, 5.090617216708358, 3.9596499110976935, 2.2253032607453136, 0.9933593942209834, 0.0), # 98
(13.695834782608697, 10.869854032258065, 11.10469791666667, 11.848083016304349, 10.163725490196079, 4.850727777777779, 4.5658529411764714, 3.7467500000000005, 5.211791666666667, 2.2094964705882356, 1.724638118022329, 1.0026578947368423, 0.0, 13.665859375000002, 11.029236842105265, 8.623190590111644, 6.628489411764706, 10.423583333333333, 5.245450000000001, 4.5658529411764714, 3.4648055555555564, 5.081862745098039, 3.949361005434784, 2.220939583333334, 0.988168548387097, 0.0), # 99
(13.653642571718258, 10.8127179299969, 11.082670681870143, 11.816995647141708, 10.146013206379946, 4.8385881852359915, 4.545197557761102, 3.732119722603262, 5.201472241274196, 2.201505554210711, 1.717560719278556, 0.9997436005422796, 0.0, 13.639358817729768, 10.997179605965075, 8.58780359639278, 6.6045166626321326, 10.402944482548392, 5.224967611644567, 4.545197557761102, 3.456134418025708, 5.073006603189973, 3.938998549047237, 2.2165341363740287, 0.9829743572724456, 0.0), # 100
(13.611189849108369, 10.755571606044516, 11.060439400434387, 11.785709606481484, 10.128120175367815, 4.82646352944165, 4.524519580946234, 3.7177455037341867, 5.191148334857491, 2.1934947556416264, 1.7104948983007466, 0.9968144819066413, 0.0, 13.612624206961591, 10.964959300973053, 8.552474491503732, 6.580484266924878, 10.382296669714982, 5.204843705227861, 4.524519580946234, 3.4474739496011786, 5.064060087683908, 3.928569868827162, 2.2120878800868775, 0.977779236913138, 0.0), # 101
(13.568540740740744, 10.698441636798089, 11.038008873456791, 11.754246875000002, 10.110068990559187, 4.814377709190674, 4.503826434277415, 3.7036337448559675, 5.180828909465021, 2.1854642556281783, 1.7034507442849551, 0.9938718865063897, 0.0, 13.585691550925928, 10.932590751570284, 8.517253721424776, 6.556392766884533, 10.361657818930041, 5.185087242798355, 4.503826434277415, 3.438841220850481, 5.055034495279593, 3.918082291666668, 2.207601774691358, 0.972585603345281, 0.0), # 102
(13.525759372577088, 10.641354598654807, 11.015383902034753, 11.722629433373593, 10.09188224535356, 4.802354623278973, 4.483125541300197, 3.689790847431795, 5.170522927145252, 2.1774142349175616, 1.696438346427236, 0.9909171620179854, 0.0, 13.558596857853223, 10.900088782197837, 8.482191732136178, 6.532242704752683, 10.341045854290504, 5.1657071864045125, 4.483125541300197, 3.4302533023421233, 5.04594112267678, 3.907543144457865, 2.2030767804069504, 0.9673958726049827, 0.0), # 103
(13.482909870579116, 10.58433706801186, 10.992569287265662, 11.690879262278584, 10.073582533150434, 4.790418170502465, 4.462424325560129, 3.6762232129248593, 5.160239349946655, 2.1693448742569736, 1.689467793923642, 0.9879516561178898, 0.0, 13.53137613597394, 10.867468217296787, 8.447338969618208, 6.50803462277092, 10.32047869989331, 5.146712498094804, 4.462424325560129, 3.421727264644618, 5.036791266575217, 3.896959754092862, 2.1985138574531327, 0.9622124607283511, 0.0), # 104
(13.440056360708535, 10.527415621266428, 10.969569830246915, 11.659018342391304, 10.05519244734931, 4.778592249657065, 4.441730210602761, 3.662937242798354, 5.1499871399176955, 2.1612563543936103, 1.682549175970229, 0.9849767164825647, 0.0, 13.50406539351852, 10.83474388130821, 8.412745879851144, 6.48376906318083, 10.299974279835391, 5.128112139917696, 4.441730210602761, 3.4132801783264752, 5.027596223674655, 3.886339447463769, 2.1939139660493834, 0.9570377837514936, 0.0), # 105
(13.39726296892706, 10.470616834815702, 10.946390332075904, 11.627068654388085, 10.036734581349688, 4.766900759538689, 4.4210506199736415, 3.6499393385154706, 5.139775259106843, 2.153148856074666, 1.67569258176305, 0.9819936907884712, 0.0, 13.476700638717421, 10.801930598673183, 8.378462908815248, 6.459446568223997, 10.279550518213686, 5.109915073921659, 4.4210506199736415, 3.4049291139562063, 5.018367290674844, 3.875689551462696, 2.189278066415181, 0.9518742577105185, 0.0), # 106
(13.3545938211964, 10.413967285056863, 10.923035593850026, 11.59505217894525, 10.018231528551063, 4.755367598943252, 4.400392977218323, 3.6372359015394005, 5.129612669562567, 2.145022560047339, 1.6689081004981592, 0.9790039267120707, 0.0, 13.449317879801098, 10.769043193832776, 8.344540502490794, 6.435067680142016, 10.259225339125134, 5.092130262155161, 4.400392977218323, 3.3966911421023225, 5.009115764275531, 3.865017392981751, 2.1846071187700056, 0.9467242986415331, 0.0), # 107
|
(13.312113043478263, 10.357493548387097, 10.899510416666669, 11.562990896739132, 9.999705882352941, 4.744016666666668, 4.379764705882353, 3.6248333333333345, 5.119508333333334, 2.1368776470588244, 1.662205821371611, 0.9760087719298248, 0.0, 13.421953125000002, 10.736096491228071, 8.311029106858054, 6.4106329411764715, 10.239016666666668, 5.074766666666668, 4.379764705882353, 3.3885833333333344, 4.999852941176471, 3.854330298913045, 2.179902083333334, 0.9415903225806455, 0.0), # 108
(13.26988476173436, 10.301222201203595, 10.87581960162323, 11.530906788446053, 9.98118023615482, 4.732871861504853, 4.359173229511284, 3.612738035360464, 5.109471212467612, 2.1287142978563174, 1.6555958335794598, 0.9730095741181947, 0.0, 13.394642382544584, 10.70310531530014, 8.277979167897298, 6.386142893568951, 10.218942424935223, 5.05783324950465, 4.359173229511284, 3.3806227582177515, 4.99059011807741, 3.8436355961486854, 2.1751639203246462, 0.9364747455639633, 0.0), # 109
(13.227973101926404, 10.245179819903537, 10.851967949817103, 11.498821834742351, 9.962677183356197, 4.721957082253722, 4.3386259716506625, 3.6009564090839814, 5.099510269013869, 2.1205326931870148, 1.6490882263177586, 0.9700076809536419, 0.0, 13.367421660665297, 10.670084490490058, 8.245441131588793, 6.361598079561043, 10.199020538027739, 5.041338972717574, 4.3386259716506625, 3.372826487324087, 4.981338591678099, 3.832940611580785, 2.170393589963421, 0.9313799836275944, 0.0), # 110
(13.186442190016104, 10.189392980884113, 10.827960262345682, 11.46675801630435, 9.944219317356573, 4.711296227709192, 4.318130355846042, 3.5894948559670787, 5.089634465020577, 2.1123330137981124, 1.6426930887825626, 0.9670044401126275, 0.0, 13.340326967592594, 10.6370488412389, 8.213465443912813, 6.336999041394336, 10.179268930041154, 5.02529279835391, 4.318130355846042, 3.3652115912208513, 4.972109658678287, 3.8222526721014507, 2.1655920524691368, 0.9263084528076467, 0.0), # 111
(13.14535615196517, 10.133888260542502, 10.803801340306359, 11.434737313808373, 9.925829231555449, 4.700913196667176, 4.297693805642971, 3.5783597774729468, 5.079852762536198, 2.1041154404368063, 1.6364205101699256, 0.9640011992716131, 0.0, 13.313394311556928, 10.604013191987741, 8.182102550849628, 6.312346321310418, 10.159705525072397, 5.0097036884621255, 4.297693805642971, 3.357795140476554, 4.962914615777724, 3.8115791046027923, 2.160760268061272, 0.9212625691402275, 0.0), # 112
(13.104705913184263, 10.078784894108638, 10.779554132960747, 11.402825576616644, 9.907497301495457, 4.690826978191853, 4.277368174559739, 3.5675806651220205, 5.07019931192069, 2.095906657814456, 1.6302822447690024, 0.9610058425921835, 0.0, 13.286621461180511, 10.571064268514016, 8.151411223845011, 6.287719973443367, 10.14039862384138, 4.9946129311708285, 4.277368174559739, 3.3505906987084666, 4.953748650747729, 3.8009418588722155, 2.15591082659215, 0.9162531721916946, 0.0), # 113
(13.064073257060091, 10.024626385524439, 10.755553287525224, 11.371278892341204, 9.88903379759524, 4.681014596966087, 4.257412745887406, 3.557289901377987, 5.060822216666095, 2.0878603087694745, 1.6242903453264128, 0.9580564200798471, 0.0, 13.25978557982405, 10.538620620878318, 8.121451726632063, 6.263580926308422, 10.12164443333219, 4.980205861929182, 4.257412745887406, 3.3435818549757763, 4.94451689879762, 3.790426297447069, 2.1511106575050447, 0.9113296714113127, 0.0), # 114
(13.023338864205595, 9.97143223830991, 10.731813088158539, 11.340088730440868, 9.870380499362694, 4.671450535207326, 4.2378417551340934, 3.547484881662581, 5.051724990045435, 2.0799888647958276, 1.6184360526663222, 0.9551543846318662, 0.0, 13.232809284324528, 10.506698230950526, 8.09218026333161, 6.239966594387481, 10.10344998009087, 4.966478834327614, 4.2378417551340934, 3.336750382290947, 4.935190249681347, 3.780029576813624, 2.146362617631708, 0.9064938398463556, 0.0), # 115
(12.982451822532688, 9.919124960991017, 10.708287554981187, 11.309199457779725, 9.851509291291528, 4.662112249784464, 4.218623372269525, 3.5381385158577467, 5.042884624972988, 2.072277675457342, 1.6127080506300124, 0.9522943730401906, 0.0, 13.205650163658248, 10.475238103442095, 8.063540253150062, 6.216833026372026, 10.085769249945976, 4.953393922200846, 4.218623372269525, 3.330080178417474, 4.925754645645764, 3.7697331525932425, 2.1416575109962372, 0.9017386328173653, 0.0), # 116
(12.941361219953283, 9.867627062093726, 10.68493070811365, 11.278555441221856, 9.832392057875436, 4.652977197566394, 4.199725767263427, 3.529223713845425, 5.034278114363028, 2.0647120903178457, 1.6070950230587664, 0.949471022096771, 0.0, 13.178265806801516, 10.44418124306448, 8.035475115293831, 6.1941362709535355, 10.068556228726056, 4.940913199383595, 4.199725767263427, 3.3235551411188533, 4.916196028937718, 3.7595184804072863, 2.1369861416227303, 0.8970570056448843, 0.0), # 117
(12.900016144379297, 9.816861050144, 10.66169656767643, 11.248101047631351, 9.81300068360812, 4.644022835422014, 4.181117110085521, 3.5207133855075567, 5.025882451129837, 2.0572774589411664, 1.6015856537938657, 0.9466789685935577, 0.0, 13.150613802730636, 10.413468654529133, 8.007928268969328, 6.171832376823498, 10.051764902259674, 4.92899873971058, 4.181117110085521, 3.317159168158581, 4.90650034180406, 3.7493670158771177, 2.132339313535286, 0.8924419136494547, 0.0), # 118
(12.858365683722639, 9.766749433667803, 10.638539153790012, 11.217780643872292, 9.793307052983273, 4.635226620220214, 4.162765570705529, 3.512580440726085, 5.017674628187687, 2.0499591308911307, 1.5961686266765933, 0.9439128493225009, 0.0, 13.122651740421906, 10.383041342547507, 7.980843133382966, 6.149877392673391, 10.035349256375374, 4.91761261701652, 4.162765570705529, 3.310876157300153, 4.896653526491637, 3.7392602146240983, 2.1277078307580024, 0.8878863121516185, 0.0), # 119
(12.816358925895228, 9.717214721191104, 10.61541248657489, 11.187538596808764, 9.773283050494598, 4.626566008829889, 4.144639319093177, 3.5047977893829505, 5.009631638450861, 2.0427424557315677, 1.5908326255482306, 0.9411673010755515, 0.0, 13.094337208851638, 10.352840311831065, 7.954163127741153, 6.128227367194702, 10.019263276901722, 4.906716905136131, 4.144639319093177, 3.3046900063070637, 4.886641525247299, 3.729179532269589, 2.1230824973149782, 0.8833831564719186, 0.0), # 120
(12.773944958808976, 9.668179421239865, 10.592270586151553, 11.157319273304857, 9.75290056063579, 4.618018458119934, 4.126706525218187, 3.4973383413600962, 5.001730474833633, 2.035612783026304, 1.5855663342500608, 0.9384369606446594, 0.0, 13.065627796996127, 10.322806567091252, 7.927831671250303, 6.106838349078911, 10.003460949667266, 4.8962736779041345, 4.126706525218187, 3.29858461294281, 4.876450280317895, 3.719106424434953, 2.118454117230311, 0.878925401930897, 0.0), # 121
(12.731072870375797, 9.61956604234005, 10.569067472640498, 11.127067040224649, 9.732131467900551, 4.609561424959241, 4.108935359050283, 3.490175006539462, 4.993948130250281, 2.0285554623391677, 1.5803584366233656, 0.9357164648217753, 0.0, 13.036481093831679, 10.292881113039527, 7.901792183116827, 6.085666387017502, 9.987896260500563, 4.886245009155247, 4.108935359050283, 3.2925438749708866, 4.8660657339502755, 3.7090223467415506, 2.1138134945280997, 0.8745060038490956, 0.0), # 122
(12.687691748507607, 9.571297093017627, 10.54575716616221, 11.09672626443223, 9.71094765678258, 4.601172366216706, 4.091293990559188, 3.4832806948029904, 4.986261597615085, 2.021555843233986, 1.5751976165094272, 0.9330004503988493, 0.0, 13.0068546883346, 10.263004954387341, 7.875988082547136, 6.064667529701957, 9.97252319523017, 4.876592972724187, 4.091293990559188, 3.28655169015479, 4.85547382839129, 3.698908754810744, 2.109151433232442, 0.8701179175470571, 0.0), # 123
(12.643750681116316, 9.523295081798558, 10.522293686837184, 11.066241312791686, 9.689321011775569, 4.592828738761221, 4.073750589714624, 3.476628316032624, 4.97864786984232, 2.014599275274587, 1.5700725577495283, 0.9302835541678323, 0.0, 12.976706169481197, 10.233119095846153, 7.85036278874764, 6.04379782582376, 9.95729573968464, 4.8672796424456735, 4.073750589714624, 3.280591956258015, 4.844660505887784, 3.6887471042638964, 2.104458737367437, 0.8657540983453236, 0.0), # 124
(12.599198756113843, 9.475482517208812, 10.498631054785912, 11.0355565521671, 9.667223417373222, 4.584507999461682, 4.056273326486318, 3.4701907801103036, 4.971083939846263, 2.0076711080247973, 1.5649719441849508, 0.927560412920674, 0.0, 12.94599312624776, 10.203164542127412, 7.824859720924753, 6.023013324074391, 9.942167879692526, 4.858267092154425, 4.056273326486318, 3.2746485710440583, 4.833611708686611, 3.678518850722367, 2.0997262109571824, 0.8614075015644376, 0.0), # 125
(12.553985061412101, 9.427781907774351, 10.474723290128884, 11.004616349422557, 9.644626758069233, 4.5761876051869805, 4.038830370843989, 3.463940996917971, 4.963546800541195, 2.0007566910484456, 1.5598844596569765, 0.9248256634493257, 0.0, 12.91467314761061, 10.173082297942582, 7.799422298284883, 6.002270073145335, 9.92709360108239, 4.849517395685159, 4.038830370843989, 3.268705432276415, 4.822313379034616, 3.66820544980752, 2.094944658025777, 0.8570710825249411, 0.0), # 126
(12.508058684923006, 9.380115762021138, 10.450524412986589, 10.973365071422144, 9.621502918357304, 4.567845012806012, 4.021389892757366, 3.4578518763375685, 4.95601344484139, 1.993841373909359, 1.5547987880068885, 0.9220739425457369, 0.0, 12.88270382254604, 10.142813368003106, 7.773993940034442, 5.981524121728076, 9.91202688968278, 4.8409926268725965, 4.021389892757366, 3.26274643771858, 4.810751459178652, 3.6577883571407157, 2.090104882597318, 0.8527377965473764, 0.0), # 127
(12.461368714558466, 9.332406588475143, 10.425988443479525, 10.941747085029949, 9.597823782731137, 4.5594576791876715, 4.003920062196168, 3.451896328251037, 4.948460865661126, 1.986910506171365, 1.5497036130759692, 0.9192998870018588, 0.0, 12.850042740030352, 10.112298757020445, 7.748518065379845, 5.960731518514094, 9.896921731322252, 4.832654859551452, 4.003920062196168, 3.2567554851340508, 4.798911891365568, 3.6472490283433174, 2.085197688695905, 0.8484005989522859, 0.0), # 128
(12.413864238230394, 9.284576895662326, 10.401069401728181, 10.909706757110053, 9.573561235684425, 4.551003061200851, 3.9863890491301195, 3.446047262540319, 4.9408660559146815, 1.9799494373982915, 1.5445876187055003, 0.916498133609641, 0.0, 12.816647489039854, 10.08147946970605, 7.7229380935275005, 5.939848312194873, 9.881732111829363, 4.824466167556446, 3.9863890491301195, 3.250716472286322, 4.786780617842212, 3.636568919036685, 2.0802138803456365, 0.8440524450602116, 0.0), # 129
(12.365494343850713, 9.236549192108656, 10.375721307853043, 10.877188454526541, 9.548687161710866, 4.542458615714445, 3.968765023528944, 3.440277589087355, 4.933206008516334, 1.9729435171539655, 1.539439488736764, 0.9136633191610346, 0.0, 12.78247565855085, 10.050296510771378, 7.697197443683819, 5.9188305514618955, 9.866412017032667, 4.816388624722297, 3.968765023528944, 3.244613296938889, 4.774343580855433, 3.6257294848421813, 2.075144261570609, 0.8396862901916962, 0.0), # 130
(12.316208119331334, 9.188245986340096, 10.349898181974611, 10.8441365441435, 9.523173445304161, 4.533801799597346, 3.9510161553623666, 3.4345602177740875, 4.92545771638036, 1.9658780950022154, 1.5342479070110426, 0.9107900804479897, 0.0, 12.747484837539638, 10.018690884927885, 7.671239535055213, 5.897634285006645, 9.85091543276072, 4.808384304883723, 3.9510161553623666, 3.238429856855247, 4.761586722652081, 3.614712181381168, 2.0699796363949226, 0.8352950896672816, 0.0), # 131
(12.265954652584163, 9.139589786882611, 10.32355404421337, 10.810495392825016, 9.49699197095801, 4.525010069718451, 3.9331106146001082, 3.4288680584824593, 4.917598172421039, 1.9587385205068681, 1.5290015573696185, 0.9078730542624567, 0.0, 12.711632614982527, 9.986603596887022, 7.645007786848092, 5.876215561520603, 9.835196344842078, 4.800415281875443, 3.9331106146001082, 3.2321500497988938, 4.748495985479005, 3.6034984642750065, 2.0647108088426744, 0.8308717988075103, 0.0), # 132
(12.21468303152113, 9.090503102262165, 10.296642914689816, 10.776209367435175, 9.470114623166108, 4.516060882946651, 3.915016571211893, 3.4231740210944106, 4.909604369552646, 1.9515101432317519, 1.5236891236537742, 0.904906877396386, 0.0, 12.674876579855821, 9.953975651360244, 7.618445618268871, 5.854530429695254, 9.819208739105292, 4.792443629532175, 3.915016571211893, 3.2257577735333225, 4.735057311583054, 3.5920697891450595, 2.059328582937963, 0.8264093729329243, 0.0), # 133
(12.162342344054133, 9.040908441004726, 10.26911881352444, 10.741222834838059, 9.442513286422153, 4.5069316961508425, 3.896702195167445, 3.4174510154918845, 4.90145330068946, 1.9441783127406937, 1.518299289704792, 0.9018861866417278, 0.0, 12.637174321135817, 9.920748053059004, 7.5914964485239596, 5.83253493822208, 9.80290660137892, 4.784431421688638, 3.896702195167445, 3.21923692582203, 4.721256643211077, 3.5804076116126873, 2.053823762704888, 0.8219007673640661, 0.0), # 134
(12.108881678095097, 8.990728311636257, 10.24093576083773, 10.705480161897759, 9.414159845219846, 4.4975999661999175, 3.8781356564364877, 3.4116719515568206, 4.893121958745757, 1.9367283785975222, 1.5128207393639534, 0.898805618790433, 0.0, 12.59848342779883, 9.88686180669476, 7.5641036968197675, 5.810185135792565, 9.786243917491515, 4.776340732179549, 3.8781356564364877, 3.212571404428512, 4.707079922609923, 3.5684933872992537, 2.048187152167546, 0.817338937421478, 0.0), # 135
(12.05425012155593, 8.93988522268272, 10.212047776750177, 10.668925715478352, 9.385026184052883, 4.488043149962771, 3.8592851249887445, 3.4058097391711617, 4.884587336635816, 1.9291456903660635, 1.5072421564725416, 0.8956598106344515, 0.0, 12.558761488821151, 9.852257916978965, 7.536210782362707, 5.787437071098189, 9.769174673271632, 4.768133634839627, 3.8592851249887445, 3.205745107116265, 4.6925130920264415, 3.556308571826118, 2.042409555350036, 0.812716838425702, 0.0), # 136
(11.998396762348548, 8.888301682670086, 10.18240888138228, 10.631503862443932, 9.355084187414965, 4.478238704308296, 3.8401187707939393, 3.399837288216851, 4.875826427273916, 1.9214155976101461, 1.5015522248718383, 0.8924433989657341, 0.0, 12.517966093179089, 9.816877388623073, 7.507761124359191, 5.764246792830437, 9.751652854547832, 4.759772203503592, 3.8401187707939393, 3.1987419316487826, 4.6775420937074825, 3.543834620814645, 2.036481776276456, 0.8080274256972807, 0.0), # 137
(11.941270688384867, 8.835900200124316, 10.15197309485452, 10.593158969658578, 9.32430573979979, 4.4681640861053875, 3.8206047638217933, 3.393727508575828, 4.8668162235743315, 1.913523449893597, 1.4957396284031257, 0.889151020576231, 0.0, 12.476054829848946, 9.78066122633854, 7.478698142015627, 5.740570349680789, 9.733632447148663, 4.751218512006159, 3.8206047638217933, 3.1915457757895624, 4.662152869899895, 3.5310529898861933, 2.0303946189709046, 0.8032636545567561, 0.0), # 138
(11.882820987576796, 8.782603283571376, 10.120694437287398, 10.553835403986378, 9.292662725701055, 4.457796752222938, 3.800711274042032, 3.3874533101300353, 4.85753371845134, 1.9054545967802445, 1.4897930509076862, 0.8857773122578926, 0.0, 12.432985287807028, 9.743550434836816, 7.448965254538431, 5.716363790340733, 9.71506743690268, 4.742434634182049, 3.800711274042032, 3.184140537302099, 4.646331362850527, 3.517945134662127, 2.0241388874574797, 0.7984184803246707, 0.0), # 139
(11.822996747836257, 8.72833344153723, 10.088526928801404, 10.513477532291418, 9.26012702961246, 4.447114159529844, 3.780406471424378, 3.3809876027614147, 4.847955904819222, 1.8971943878339157, 1.4837011762268022, 0.8823169108026693, 0.0, 12.38871505602964, 9.70548601882936, 7.41850588113401, 5.691583163501746, 9.695911809638444, 4.733382643865981, 3.780406471424378, 3.176510113949888, 4.63006351480623, 3.5044925107638067, 2.017705385760281, 0.7934848583215663, 0.0), # 140
(11.761747057075162, 8.673013182547843, 10.055424589517022, 10.472029721437782, 9.226670536027703, 4.436093764894997, 3.7596585259385567, 3.374303296351908, 4.838059775592251, 1.8887281726184386, 1.477452688201756, 0.8787644530025115, 0.0, 12.34320172349308, 9.666408983027624, 7.38726344100878, 5.6661845178553145, 9.676119551184502, 4.724024614892672, 3.7596585259385567, 3.168638403496426, 4.613335268013851, 3.490676573812595, 2.0110849179034047, 0.7884557438679859, 0.0), # 141
(11.69902100320542, 8.616565015129181, 10.02134143955475, 10.429436338289557, 9.192265129440482, 4.424713025187291, 3.7384356075542886, 3.367373300783457, 4.827822323684707, 1.8800413006976404, 1.4710362706738296, 0.8751145756493696, 0.0, 12.296402879173653, 9.626260332143064, 7.355181353369148, 5.64012390209292, 9.655644647369414, 4.71432262109684, 3.7384356075542886, 3.160509303705208, 4.596132564720241, 3.4764787794298533, 2.0042682879109504, 0.7833240922844712, 0.0), # 142
(11.634767674138946, 8.558911447807208, 9.986231499035082, 10.385641749710825, 9.156882694344494, 4.412949397275621, 3.7167058862412983, 3.360170525938002, 4.817220542010869, 1.871119121635349, 1.4644406074843055, 0.8713619155351939, 0.0, 12.248276112047666, 9.584981070887132, 7.322203037421526, 5.6133573649060455, 9.634441084021738, 4.704238736313203, 3.7167058862412983, 3.1521067123397293, 4.578441347172247, 3.4618805832369426, 1.9972462998070164, 0.7780828588915646, 0.0), # 143
(11.56893615778766, 8.499974989107892, 9.950048788078501, 10.340590322565676, 9.12049511523344, 4.400780338028881, 3.6944375319693092, 3.3526678816974873, 4.806231423485011, 1.8619469849953916, 1.4576543824744654, 0.867501109451935, 0.0, 12.198779011091421, 9.542512203971285, 7.288271912372326, 5.585840954986173, 9.612462846970022, 4.693735034376482, 3.6944375319693092, 3.1434145271634857, 4.56024755761672, 3.446863440855226, 1.9900097576157, 0.7727249990098085, 0.0), # 144
(11.501475542063469, 8.439678147557194, 9.912747326805505, 10.294226423718191, 9.083074276601018, 4.388183304315964, 3.6715987147080456, 3.344838277943853, 4.794831961021412, 1.8525102403415963, 1.4506662794855925, 0.8635267941915434, 0.0, 12.14786916528122, 9.498794736106976, 7.253331397427962, 5.557530721024787, 9.589663922042824, 4.682773589121394, 3.6715987147080456, 3.1344166459399743, 4.541537138300509, 3.4314088079060645, 1.9825494653611013, 0.7672434679597451, 0.0), # 145
(11.432334914878291, 8.377943431681082, 9.874281135336586, 10.246494420032459, 9.044592062940927, 4.375135753005765, 3.6481576044272312, 3.336654624559041, 4.782999147534349, 1.8427942372377903, 1.4434649823589683, 0.8594336065459691, 0.0, 12.095504163593366, 9.453769672005658, 7.21732491179484, 5.52838271171337, 9.565998295068699, 4.671316474382658, 3.6481576044272312, 3.125096966432689, 4.522296031470463, 3.41549814001082, 1.9748562270673173, 0.7616312210619166, 0.0), # 146
(11.361463364144042, 8.314693350005518, 9.83460423379223, 10.19733867837256, 9.005020358746862, 4.361615140967176, 3.6240823710965873, 3.3280898314249927, 4.770709975938102, 1.8327843252478015, 1.4360391749358754, 0.855216183307163, 0.0, 12.041641595004167, 9.407378016378791, 7.180195874679377, 5.498352975743403, 9.541419951876204, 4.65932576399499, 3.6240823710965873, 3.1154393864051255, 4.502510179373431, 3.3991128927908543, 1.966920846758446, 0.7558812136368653, 0.0), # 147
(11.288809977772631, 8.24985041105647, 9.793670642292932, 10.146703565602587, 8.964331048512523, 4.347598925069094, 3.599341184685839, 3.3191168084236504, 4.757941439146947, 1.822465853935457, 1.428377541057596, 0.8508691612670749, 0.0, 11.986239048489919, 9.359560773937822, 7.141887705287981, 5.4673975618063695, 9.515882878293894, 4.646763531793111, 3.599341184685839, 3.105427803620781, 4.482165524256262, 3.38223452186753, 1.9587341284585866, 0.7499864010051337, 0.0), # 148
(11.214323843675977, 8.1833371233599, 9.751434380959186, 10.094533448586619, 8.922496016731612, 4.33306456218041, 3.573902215164709, 3.3097084654369557, 4.744670530075158, 1.8118241728645852, 1.4204687645654126, 0.8463871772176558, 0.0, 11.929254113026934, 9.310258949394212, 7.102343822827062, 5.4354725185937545, 9.489341060150316, 4.6335918516117385, 3.573902215164709, 3.09504611584315, 4.461248008365806, 3.3648444828622073, 1.950286876191837, 0.7439397384872637, 0.0), # 149
(11.137954049765991, 8.115075995441773, 9.707849469911476, 10.040772694188746, 8.879487147897825, 4.317989509170021, 3.5477336325029207, 3.29983771234685, 4.730874241637018, 1.8008446315990123, 1.412301529300607, 0.8417648679508558, 0.0, 11.870644377591507, 9.259413547459413, 7.061507646503035, 5.402533894797036, 9.461748483274036, 4.61977279728559, 3.5477336325029207, 3.084278220835729, 4.439743573948912, 3.3469242313962493, 1.9415698939822956, 0.7377341814037977, 0.0), # 150
(11.059649683954586, 8.044989535828057, 9.6628699292703, 9.985365669273047, 8.835276326504857, 4.302351222906816, 3.5208036066701984, 3.2894774590352758, 4.716529566746802, 1.789512579702568, 1.4038645191044614, 0.8369968702586252, 0.0, 11.810367431159946, 9.206965572844876, 7.019322595522306, 5.368537739107703, 9.433059133493604, 4.605268442649386, 3.5208036066701984, 3.0731080163620117, 4.417638163252429, 3.3284552230910167, 1.9325739858540603, 0.731362685075278, 0.0), # 151
(10.979359834153682, 7.973000253044715, 9.616449779156152, 9.928256740703617, 8.789835437046412, 4.286127160259694, 3.4930803076362653, 3.2786006153841747, 4.701613498318786, 1.7778133667390779, 1.3951464178182584, 0.8320778209329146, 0.0, 11.748380862708558, 9.15285603026206, 6.975732089091292, 5.333440100217232, 9.403226996637573, 4.590040861537845, 3.4930803076362653, 3.061519400185496, 4.394917718523206, 3.309418913567873, 1.9232899558312306, 0.7248182048222469, 0.0), # 152
(10.897033588275185, 7.899030655617714, 9.568543039689514, 9.86939027534453, 8.743136364016186, 4.269294778097547, 3.4645319053708437, 3.2671800912754865, 4.686103029267251, 1.7657323422723707, 1.3861359092832806, 0.8270023567656742, 0.0, 11.68464226121364, 9.097025924422415, 6.930679546416402, 5.297197026817111, 9.372206058534502, 4.574052127785681, 3.4645319053708437, 3.049496270069676, 4.371568182008093, 3.2897967584481775, 1.9137086079379029, 0.7180936959652467, 0.0), # 153
(10.81262003423102, 7.823003252073014, 9.519103730990887, 9.80871064005988, 8.695150991907875, 4.251831533289268, 3.43512656984366, 3.2551887965911552, 4.6699751525064706, 1.7532548558662742, 1.3768216773408095, 0.8217651145488547, 0.0, 11.6191092156515, 9.0394162600374, 6.884108386704048, 5.259764567598821, 9.339950305012941, 4.557264315227617, 3.43512656984366, 3.037022523778049, 4.347575495953937, 3.2695702133532945, 1.9038207461981775, 0.7111821138248196, 0.0), # 154
(10.72606825993309, 7.744840550936584, 9.468085873180756, 9.746162201713748, 8.645851205215184, 4.233714882703753, 3.404832471024433, 3.2425996412131215, 4.653206860950727, 1.7403662570846146, 1.3671924058321279, 0.8163607310744064, 0.0, 11.551739314998438, 8.97996804181847, 6.8359620291606396, 5.221098771253843, 9.306413721901453, 4.53963949769837, 3.404832471024433, 3.0240820590741087, 4.322925602607592, 3.2487207339045834, 1.8936171746361512, 0.7040764137215078, 0.0), # 155
(10.637327353293314, 7.664465060734389, 9.415443486379615, 9.68168932717022, 8.595208888431804, 4.214922283209894, 3.37361777888289, 3.2293855350233276, 4.635775147514292, 1.727051895491221, 1.357236778598518, 0.8107838431342794, 0.0, 11.48249014823076, 8.918622274477073, 6.7861838929925895, 5.181155686473662, 9.271550295028584, 4.521139749032659, 3.37361777888289, 3.0106587737213526, 4.297604444215902, 3.2272297757234076, 1.8830886972759233, 0.6967695509758537, 0.0), # 156
(10.546346402223609, 7.581799289992394, 9.361130590707957, 9.615236383293386, 8.543195926051439, 4.195431191676585, 3.3414506633887537, 3.215519387903715, 4.6176570051114485, 1.7132971206499201, 1.3469434794812618, 0.8050290875204243, 0.0, 11.411319304324769, 8.855319962724668, 6.734717397406309, 5.1398913619497595, 9.235314010222897, 4.501727143065201, 3.3414506633887537, 2.996736565483275, 4.2715979630257195, 3.205078794431129, 1.8722261181415913, 0.6892544809083996, 0.0), # 157
(10.450553324967336, 7.495248171657732, 9.302523946219415, 9.544258060733807, 8.48743569881293, 4.174003322325641, 3.3075747046495003, 3.200048222203801, 4.597442309412912, 1.698678070701901, 1.335972342259087, 0.7988866158226731, 0.0, 11.335080203181485, 8.787752774049402, 6.679861711295434, 5.096034212105701, 9.194884618825824, 4.480067511085322, 3.3075747046495003, 2.9814309445183147, 4.243717849406465, 3.1814193535779363, 1.8605047892438833, 0.6813861974234302, 0.0), # 158
(10.335201473769764, 7.395933826819331, 9.224527454803487, 9.454176016727876, 8.414178555796186, 4.143513212539135, 3.2677489343700015, 3.17754122744589, 4.566999388570334, 1.6807983479345614, 1.3223972849777657, 0.7911589610963629, 0.0, 11.235598705688274, 8.70274857205999, 6.611986424888827, 5.042395043803683, 9.133998777140668, 4.448557718424246, 3.2677489343700015, 2.9596522946708106, 4.207089277898093, 3.1513920055759597, 1.8449054909606977, 0.6723576206199392, 0.0), # 159
(10.198820932866035, 7.28304080162725, 9.125574450948537, 9.343506385929302, 8.321992122590341, 4.103212058438943, 3.221570623868649, 3.147432860557619, 4.525465106040038, 1.6594219781520132, 1.3060272186755595, 0.7817252273702489, 0.0, 11.110988852451014, 8.598977501072737, 6.530136093377798, 4.978265934456038, 9.050930212080075, 4.406406004780667, 3.221570623868649, 2.9308657560278157, 4.160996061295171, 3.114502128643102, 1.8251148901897079, 0.6620946183297501, 0.0), # 160
(10.042510876420344, 7.1573051140366015, 9.006721467228694, 9.213301128944565, 8.211833582663305, 4.053588080615757, 3.1693770122048135, 3.1101003109807053, 4.473387224599541, 1.6347303676098288, 1.2870063860732652, 0.77067287137255, 0.0, 10.962523662746737, 8.477401585098049, 6.435031930366326, 4.904191102829485, 8.946774449199083, 4.354140435372988, 3.1693770122048135, 2.8954200575826836, 4.105916791331652, 3.071100376314856, 1.801344293445739, 0.6506641012760548, 0.0), # 161
(9.8673704785969, 7.01946278200249, 8.86902503621808, 9.064612206380144, 8.08466011948299, 3.9951294996602726, 3.1115053384378664, 3.0659207681568685, 4.411313507026364, 1.6069049225635816, 1.2654790298916783, 0.7580893498314843, 0.0, 10.791476155852466, 8.338982848146326, 6.3273951494583915, 4.820714767690744, 8.822627014052728, 4.292289075419616, 3.1115053384378664, 2.8536639283287664, 4.042330059741495, 3.0215374021267154, 1.773805007243616, 0.6381329801820447, 0.0), # 162
(9.674498913559898, 6.870249823480022, 8.71354169049082, 8.898491578842531, 7.941428916517308, 3.928324536163185, 3.048292841627181, 3.015271421527823, 4.339791716098023, 1.5761270492688444, 1.2415893928515955, 0.7440621194752707, 0.0, 10.599119351045232, 8.184683314227977, 6.207946964257977, 4.728381147806532, 8.679583432196045, 4.221379990138953, 3.048292841627181, 2.8059460972594175, 3.970714458258654, 2.9661638596141775, 1.742708338098164, 0.6245681657709112, 0.0), # 163
(9.464995355473539, 6.710402256424303, 8.54132796262104, 8.71599120693821, 7.783097157234176, 3.853661410715189, 2.9800767608321266, 2.9585294605352903, 4.259369614592037, 1.5425781539811894, 1.2154817176738126, 0.7286786370321272, 0.0, 10.386726267602059, 8.015465007353399, 6.077408588369063, 4.627734461943566, 8.518739229184074, 4.141941244749407, 2.9800767608321266, 2.752615293367992, 3.891548578617088, 2.905330402312737, 1.7082655925242083, 0.6100365687658459, 0.0), # 164
(9.239958978502024, 6.5406560987904445, 8.353440385182864, 8.518163051273666, 7.610622025101502, 3.771628343906979, 2.9071943351120755, 2.8960720746209856, 4.1705949652859235, 1.5064396429561904, 1.1873002470791263, 0.7120263592302724, 0.0, 10.155569924799979, 7.832289951532995, 5.936501235395631, 4.51931892886857, 8.341189930571847, 4.05450090446938, 2.9071943351120755, 2.694020245647842, 3.805311012550751, 2.839387683757889, 1.670688077036573, 0.5946050998900405, 0.0), # 165
(9.000488956809557, 6.361747368533551, 8.150935490750417, 8.306059072455376, 7.4249607035872005, 3.682713556329251, 2.8299828035264003, 2.8282764532266285, 4.074015530957201, 1.4678929224494195, 1.157189223788332, 0.6941927427979253, 0.0, 9.906923341916015, 7.636120170777177, 5.78594611894166, 4.403678767348258, 8.148031061914402, 3.95958703451728, 2.8299828035264003, 2.630509683092322, 3.7124803517936003, 2.768686357485126, 1.6301870981500834, 0.5783406698666865, 0.0), # 166
(8.747684464560333, 6.174412083608727, 7.934869811897824, 8.080731231089835, 7.2270703761591815, 3.5874052685726983, 2.7487794051344725, 2.7555197857939366, 3.9701790743833865, 1.4271193987164503, 1.1252928905222266, 0.6752652444633036, 0.0, 9.642059538227196, 7.427917689096338, 5.626464452611132, 4.28135819614935, 7.940358148766773, 3.8577277001115116, 2.7487794051344725, 2.562432334694784, 3.6135351880795907, 2.693577077029946, 1.5869739623795647, 0.5613101894189753, 0.0), # 167
(8.482644675918554, 5.979386261971081, 7.706299881199207, 7.843231487783524, 7.017908226285359, 3.4861917012280164, 2.663921378995663, 2.6781792617646265, 3.8596333583419993, 1.3843004780128556, 1.0917554900016058, 0.6553313209546264, 0.0, 9.362251533010546, 7.20864453050089, 5.458777450008029, 4.152901434038566, 7.7192667166839986, 3.7494509664704774, 2.663921378995663, 2.490136929448583, 3.5089541131426794, 2.614410495927842, 1.5412599762398416, 0.5435805692700985, 0.0), # 168
(8.206468765048422, 5.777405921575724, 7.466282231228694, 7.594611803142927, 6.798431437433646, 3.3795610748859013, 2.5757459641693443, 2.5966320705804184, 3.7429261456105576, 1.339617566594208, 1.0567212649472661, 0.6344784290001119, 0.0, 9.0687723455431, 6.9792627190012295, 5.28360632473633, 4.018852699782624, 7.485852291221115, 3.635284898812586, 2.5757459641693443, 2.413972196347072, 3.399215718716823, 2.5315372677143095, 1.493256446245739, 0.5252187201432478, 0.0), # 169
(7.9202559061141375, 5.569207080377758, 7.215873394560408, 7.335924137774526, 6.569597193071951, 3.268001610137046, 2.4845903997148873, 2.5112554016830275, 3.620605198966578, 1.2932520707160806, 1.020334458080004, 0.6127940253279787, 0.0, 8.762894995101878, 6.740734278607764, 5.101672290400019, 3.879756212148241, 7.241210397933156, 3.5157575623562387, 2.4845903997148873, 2.3342868643836043, 3.2847985965359756, 2.4453080459248424, 1.4431746789120816, 0.5062915527616144, 0.0), # 170
(7.6251052732799005, 5.355525756332291, 6.956129903768475, 7.068220452284813, 6.3323626766681915, 3.152001527572146, 2.390791924691664, 2.4224264445141737, 3.4932182811875796, 1.2453853966340462, 0.9827393121206148, 0.5903655666664452, 0.0, 8.445892500963913, 6.494021233330896, 4.913696560603074, 3.736156189902138, 6.986436562375159, 3.3913970223198433, 2.390791924691664, 2.2514296625515327, 3.1661813383340958, 2.356073484094938, 1.391225980753695, 0.4868659778483902, 0.0), # 171
(7.322116040709912, 5.137097967394431, 6.688108291427019, 6.792552707280267, 6.087685071690277, 3.0320490477818964, 2.2946877781590462, 2.3305223885155746, 3.3613131550510804, 1.1961989506036783, 0.9440800697898953, 0.56728050974373, 0.0, 8.119037882406225, 6.24008560718103, 4.720400348949476, 3.588596851811034, 6.722626310102161, 3.2627313439218044, 2.2946877781590462, 2.165749319844212, 3.0438425358451386, 2.2641842357600894, 1.337621658285404, 0.4670089061267665, 0.0), # 172
(7.012387382568372, 4.914659731519285, 6.412865090110164, 6.509972863367375, 5.836521561606121, 2.9086323913569916, 2.196615199176405, 2.235920423128947, 3.225437583334597, 1.145874138880549, 0.9045009738086416, 0.5436263112880514, 0.0, 7.783604158705848, 5.979889424168563, 4.522504869043208, 3.437622416641646, 6.450875166669194, 3.130288592380526, 2.196615199176405, 2.077594565254994, 2.9182607808030605, 2.169990954455792, 1.282573018022033, 0.446787248319935, 0.0), # 173
(6.697018473019482, 4.6889470666619575, 6.131456832392036, 6.221532881152618, 5.579829329883635, 2.7822397788881266, 2.096911426803113, 2.1389977377960108, 3.08613932881565, 1.0945923677202316, 0.8641462668976501, 0.519490428027628, 0.0, 7.440864349139807, 5.7143947083039075, 4.32073133448825, 3.283777103160694, 6.1722786576313, 2.994596832914415, 2.096911426803113, 1.9873141277772333, 2.7899146649418176, 2.07384429371754, 1.2262913664784072, 0.42626791515108714, 0.0), # 174
(6.377108486227438, 4.460695990777558, 5.84494005084676, 5.928284721242486, 5.318565559990731, 2.653359430965997, 1.9959137000985407, 2.040131521958481, 2.943966154271756, 1.0425350433782987, 0.8231601917777163, 0.49496031669067847, 0.0, 7.092091472985131, 5.444563483597462, 4.115800958888581, 3.1276051301348957, 5.887932308543512, 2.8561841307418736, 1.9959137000985407, 1.8952567364042836, 2.6592827799953653, 1.9760949070808291, 1.1689880101693522, 0.40551781734341447, 0.0), # 175
(6.053756596356447, 4.230642521821194, 5.554371278048459, 5.631280344243462, 5.053687435395322, 2.5224795681812964, 1.8939592581220606, 1.9396989650580787, 2.7994658224804327, 0.9898835721103237, 0.781686991169637, 0.470123434005421, 0.0, 6.738558549518844, 5.17135777405963, 3.9084349558481852, 2.9696507163309707, 5.5989316449608655, 2.71557855108131, 1.8939592581220606, 1.8017711201294973, 2.526843717697661, 1.8770934480811543, 1.1108742556096918, 0.38460386562010856, 0.0), # 176
(5.7280619775707065, 3.9995226777479713, 5.260807046571258, 5.331571710762027, 4.786152139565322, 2.3900884111247205, 1.791385339933044, 1.8380772565365193, 2.6531860962191995, 0.9368193601718788, 0.7398709077942084, 0.4450672367000743, 0.0, 6.381538598017975, 4.895739603700816, 3.699354538971042, 2.8104580805156356, 5.306372192438399, 2.5733081591511273, 1.791385339933044, 1.707206007946229, 2.393076069782661, 1.7771905702540096, 1.0521614093142517, 0.3635929707043611, 0.0), # 177
(5.401123804034416, 3.7680724765129963, 4.9653038889892835, 5.030210781404673, 4.516916855968639, 2.2566741803869648, 1.6885291845908623, 1.7356435858355217, 2.505674738265573, 0.8835238138185378, 0.6978561843722264, 0.41987918150285664, 0.0, 6.022304637759553, 4.618670996531422, 3.489280921861132, 2.6505714414556127, 5.011349476531146, 2.4299010201697304, 1.6885291845908623, 1.611910128847832, 2.2584584279843196, 1.6767369271348913, 0.9930607777978567, 0.34255204331936334, 0.0), # 178
(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0), # 179
)
passenger_allighting_rate = (
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 0
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 1
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 2
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 3
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 4
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 5
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 6
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 7
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 8
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 9
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 10
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 11
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 12
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 13
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 14
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 15
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 16
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 17
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 18
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 19
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 20
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 21
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 22
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 23
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 24
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 25
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 26
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 27
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 28
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 29
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 30
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 31
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 32
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 33
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 34
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 35
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 36
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 37
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 38
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 39
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 40
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 41
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 42
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 43
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 44
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 45
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 46
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 47
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 48
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 49
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 50
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 51
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 52
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 53
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 54
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 55
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 56
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 57
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 58
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 59
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 60
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 61
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 62
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 63
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 64
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 65
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 66
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 67
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 68
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 69
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 70
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 71
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 72
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 73
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 74
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 75
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 76
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 77
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 78
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 79
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 80
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 81
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 82
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 83
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 84
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 85
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 86
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 87
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 88
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 89
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 90
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 91
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 92
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 93
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 94
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 95
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 96
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 97
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 98
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 99
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 100
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 101
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 102
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 103
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 104
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 105
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 106
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 107
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 108
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 109
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 110
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 111
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 112
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 113
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 114
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 115
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 116
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 117
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 118
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 119
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 120
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 121
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 122
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 123
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 124
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 125
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 126
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 127
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 128
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 129
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 130
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 131
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 132
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 133
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 134
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 135
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 136
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 137
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 138
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 139
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 140
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 141
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 142
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 143
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 144
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 145
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 146
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 147
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 148
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 149
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 150
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 151
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 152
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 153
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 154
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 155
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 156
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 157
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 158
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 159
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 160
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 161
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 162
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 163
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 164
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 165
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 166
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 167
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 168
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 169
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 170
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 171
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 172
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 173
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 174
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 175
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 176
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 177
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 178
(0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1, 0, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 0.07692307692307693, 1), # 179
)
"""
parameters for reproducibiliy. More information: https://numpy.org/doc/stable/reference/random/parallel.html
"""
#initial entropy
entropy = 8991598675325360468762009371570610170
#index for seed sequence child
child_seed_index = (
1, # 0
47, # 1
)
| |
add_tp_seqs.py
|
import sys
import os
import pandas as pd
import csv
import argparse
from collections import OrderedDict
from io import StringIO
def main(argv):
args = parse_arguments(argv)
out = args.out_path
file1 = args.file1
file2 = args.file2
file3 = args.file3
ddf1 = addSeqs(file1,file2)
ddf2 = removeSeqs(ddf1,file3)
with open(out, 'w+') as output:
for row in ddf2:
print("\t".join(row))
output.write("\t".join(row)+"\n")
def parse_arguments(argv):
parser = argparse.ArgumentParser(
prog = 'mapping.py',
description = 'A program to map two files (csv of txt) to each other')
parser.add_argument(
'-f1', '--file1',
help = 'Enter first file.',
required = True
)
parser.add_argument(
'-f2', '--file2',
help = 'Enter fplist file.',
required = True
)
parser.add_argument(
'-f3', '--file3',
help = 'Enter nhlist file.',
required = True
)
parser.add_argument(
'-o', '-outpath',
dest = 'out_path',
help = 'Enter path to dropped seqs file'
)
return parser.parse_args()
def addSeqs(file1, file2):
df1 = pd.read_table(file1, sep="\t", names=['seq_name','dbID'])
df2 = pd.read_table(file2, sep="\t", skiprows=2, usecols=[0,2], names=['seq_name','dbID'])
ddf = pd.concat([df1,df2])
ddf = ddf.groupby('seq_name')['dbID'].apply(list).map(set).str.join('|')
ddf = ddf.reset_index()
print(ddf.head())
return ddf
def removeSeqs(ddf1, file3):
|
if __name__=="__main__":
main(sys.argv[1:])
|
data = ddf1.values.tolist()
nhlist = []
with open(file3) as f3:
reader = csv.reader(f3, delimiter='\t')
next(reader, None)
next(reader, None)
for row in reader:
# print(row)
nhlist.append(row)
ddf2 = []
for row in data:
if row[1] != None:
rfids = str(row[1]).split("|")
else:
rfids = []
for seq in nhlist:
id = seq[2]
if row[0] == seq[0]:
for rfid in rfids:
if id == rfid:
rfids.remove(id)
array = [row[0],"|".join(rfids)]
ddf2.append(array)
return ddf2
|
helpers.py
|
import requests
import lxml
from lxml.html.clean import Cleaner
from django.http import Http404, HttpResponseForbidden
from django.conf import settings
from django.urls import reverse
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.core.mail import get_connection, EmailMultiAlternatives, EmailMessage
from django.utils.translation import ugettext as _
from django.contrib.auth.models import User
# Remove all moderated instances of Petition
def remove_user_moderated(petitions):
petitions = [p for p in petitions if not p.is_moderated]
return petitions
# Remove all javascripts from HTML code
def sanitize_html(unsecure_html_content):
cleaner = Cleaner(inline_style=False, scripts=True, javascript=True,
safe_attrs=lxml.html.defs.safe_attrs | set(['style']),
frames=False, embedded=False,
meta=True, links=True, page_structure=True, remove_tags=['body'])
try:
secure_html_content = lxml.html.tostring(cleaner.clean_html(lxml.html.fromstring(unsecure_html_content)), method="html")
except:
secure_html_content = b''
return secure_html_content.decode()
# Get the client IP address, considering proxies and RP
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
# Get the user of the current session
def get_session_user(request):
from .models import PytitionUser
try:
pytitionuser = PytitionUser.objects.get(user__username=request.user.username)
except User.DoesNotExist:
raise Http404(_("not found"))
return pytitionuser
# Check if an user is in an organization
# FIXME : move this as an org method ?
def check_user_in_orga(user, orga):
if orga not in user.organizations.all():
return HttpResponseForbidden(_("You are not part of this organization"))
return None
# Return a 404 if a petition does not exist
def petition_from_id(id):
from .models import Petition
petition = Petition.by_id(id)
if petition is None:
raise Http404(_("Petition does not exist"))
else:
return petition
# Check if a petition is publicly accessible
def check_petition_is_accessible(request, petition):
if petition.published and not petition.moderated:
return True
if request.user.is_authenticated:
user = get_session_user(request)
if petition.owner_type == "user" and user == petition.owner:
return True
if petition.owner_type == "orga" and user in petition.owner.members:
return True
if petition.moderated:
raise Http404(_("This Petition has been moderated!"))
if not petition.published:
raise Http404(_("This Petition is not published yet!"))
# Get settings
def settings_context_processor(request):
return {'settings': settings}
# Get footer content
def footer_content_processor(request):
|
# Send Confirmation email
def send_confirmation_email(request, signature):
petition = signature.petition
url = request.build_absolute_uri("/petition/{}/confirm/{}".format(petition.id, signature.confirmation_hash))
html_message = render_to_string("petition/confirmation_email.html", {'firstname': signature.first_name, 'url': url})
message = strip_tags(html_message)
with get_connection() as connection:
msg = EmailMultiAlternatives(_("Confirm your signature to our petition"),
message, to=[signature.email], connection=connection,
reply_to=[petition.confirmation_email_reply])
msg.attach_alternative(html_message, "text/html")
msg.send(fail_silently=False)
# Send welcome mail on account creation
def send_welcome_mail(user_infos):
html_message = render_to_string("registration/confirmation_email.html", user_infos)
message = strip_tags(html_message)
with get_connection() as connection:
msg = EmailMultiAlternatives(_("Account created !"),
message, to=[user_infos["email"]], connection=connection,
reply_to=[settings.DEFAULT_NOREPLY_MAIL])
msg.attach_alternative(html_message, "text/html")
msg.send(fail_silently=False)
# Generate a meta url for the HTML meta property
def petition_detail_meta(request, petition_id):
url = "{scheme}://{host}{petition_path}".format(
scheme=request.scheme,
host=request.get_host(),
petition_path=reverse('detail', args=[petition_id]))
return {'site_url': request.get_host(), 'petition_url': url}
def subscribe_to_newsletter(petition, email):
if petition.newsletter_subscribe_method in ["POST", "GET"]:
if petition.newsletter_subscribe_http_url == '':
return
data = petition.newsletter_subscribe_http_data
if data == '' or data is None:
data = {}
else:
import json
data = data.replace("'", "\"")
data = json.loads(data)
if petition.newsletter_subscribe_http_mailfield != '':
data[petition.newsletter_subscribe_http_mailfield] = email
if petition.newsletter_subscribe_method == "POST":
requests.post(petition.newsletter_subscribe_http_url, data)
elif petition.newsletter_subscribe_method == "GET":
requests.get(petition.newsletter_subscribe_http_url, data)
elif petition.newsletter_subscribe_method == "MAIL":
with get_connection(host=petition.newsletter_subscribe_mail_smtp_host,
port=petition.newsletter_subscribe_mail_smtp_port,
username=petition.newsletter_subscribe_mail_smtp_user,
password=petition.newsletter_subscribe_mail_smtp_password,
use_ssl=petition.newsletter_subscribe_mail_smtp_tls,
use_tls=petition.newsletter_subscribe_mail_smtp_starttls) as connection:
EmailMessage(petition.newsletter_subscribe_mail_subject.format(email), "",
petition.newsletter_subscribe_mail_from, [petition.newsletter_subscribe_mail_to],
connection=connection).send(fail_silently=True)
def get_update_form(user, data=None):
from .forms import UpdateInfoForm
if not data:
_data = {
'first_name': user.first_name,
'last_name': user.last_name,
'email': user.email
}
else:
_data = data
return UpdateInfoForm(user, _data)
|
footer_content = None
if settings.FOOTER_TEMPLATE:
footer_content = render_to_string(settings.FOOTER_TEMPLATE)
return {'footer_content': footer_content}
|
issue-28992-empty.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Can't use constants as tuple struct patterns
#![feature(associated_consts)]
const C1: i32 = 0;
struct S;
impl S {
const C2: i32 = 0;
}
fn main() {
if let C1(..) = 0 {} //~ ERROR `C1` does not name a tuple variant or a tuple struct
if let S::C2(..) = 0
|
//~ ERROR `S::C2` does not name a tuple variant or a tuple struct
}
|
{}
|
cancel_upload_file.py
|
from ..utils import Object
|
Stops the uploading of a file. Supported only for files uploaded by using uploadFile. For other files the behavior is undefined
Attributes:
ID (:obj:`str`): ``CancelUploadFile``
Args:
file_id (:obj:`int`):
Identifier of the file to stop uploading
Returns:
Ok
Raises:
:class:`telegram.Error`
"""
ID = "cancelUploadFile"
def __init__(self, file_id, extra=None, **kwargs):
self.extra = extra
self.file_id = file_id # int
@staticmethod
def read(q: dict, *args) -> "CancelUploadFile":
file_id = q.get('file_id')
return CancelUploadFile(file_id)
|
class CancelUploadFile(Object):
"""
|
app.js
|
function
|
(textbox) {
var Precio = document.getElementByName('precioCompra').value;
var iva = document.getElementById('iva').value;
var gano = document.getElementById('ganancia').value ;
var pre = document.getElementById("garca").value = Precio * iva /100 ;
var renta = document.getElementById("garca").value = Precio * gano /100 ;
var final = Number(Precio) + Number(pre) + Number(renta);
document.getElementById("garca").value = final ;
}
|
checkInput
|
pretrain.py
|
import os, sys
import math
import hydra
import torch
import timm
from hydra.utils import instantiate
from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy
from timm.utils import NativeScaler
import models
from data import create_dataloader
from utils import MetricLogger, SmoothedValue
from utils import fix_random_seed
@hydra.main(config_path='./configs', config_name='pretrain')
def
|
(cfg):
if cfg.seed is not None:
fix_random_seed(cfg.seed)
torch.backends.cudnn.benchmark = True
# dataloader
trainloader, num_classes = create_dataloader(cfg.data)
# additional data augmentation (mixup/cutmix)
mixup_fn = None
mixup_enable = (cfg.data.mixup.mixup_alpha > 0.) or (cfg.data.mixup.cutmix_alpha > 0.)
if mixup_enable:
mixup_fn = instantiate(cfg.data.mixup, num_classes=num_classes)
print(f'MixUp/Cutmix was enabled\n')
# create model
model = instantiate(cfg.model, num_classes=num_classes)
print(f'Model[{cfg.model.model_name}] was created')
# wrap model with DP
model = torch.nn.parallel.DataParallel(model)
model.cuda()
model_without_dp = model.module
# optimizer
scaled_lr = cfg.optim.args.lr * cfg.data.loader.batch_size / 512.0
cfg.optim.args.lr = scaled_lr
optimizer = instantiate(cfg.optim, model=model)
print(f'Optimizer: \n{optimizer}\n')
# scheduler
lr_scheduler, _ = instantiate(cfg.scheduler, optimizer=optimizer)
print(f'Scheduler: \n{lr_scheduler}\n')
# criterion
if cfg.data.mixup.mixup_alpha > 0.:
criterion = SoftTargetCrossEntropy().cuda()
print('SoftTargetCrossEntropy is used for criterion\n')
elif cfg.data.mixup.label_smoothing > 0.:
criterion = LabelSmoothingCrossEntropy(cfg.data.mixup.label_smoothing).cuda()
print('LabelSmoothingCrossEntropy is used for criterion\n')
else:
criterion = torch.nn.CrossEntropyLoss().cuda()
print('CrossEntropyLoss is used for criterion\n')
loss_scaler = NativeScaler()
# load resume
start_epoch = 1
if cfg.resume is not None:
checkpoint = torch.load(cfg.resume, map_location='cpu')
model_without_dp.load_state_dict(checkpoint['model'])
optimizer.load_state_dict(checkpoint['optimizer'])
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
loss_scaler.load_state_dict(checkpoint['scaler'])
start_epoch = checkpoint['epoch'] + 1
print(f'Resume was loaded from {cfg.resume}\n')
print(f'Start training for {cfg.epochs} epochs')
for epoch in range(start_epoch, cfg.epochs + 1):
# train one epoch
model.train()
metric_logger = MetricLogger(delimiter=' ')
metric_logger.add_meter('lr', SmoothedValue(window_size=1, fmt='{value:.6f}'))
header = f'Epoch: [{epoch:03}/{cfg.epochs:03}]'
for data in metric_logger.log_every(trainloader, cfg.print_iter_freq, header):
images = data[0].cuda(non_blocking=True)
labels = data[1].cuda(non_blocking=True)
if mixup_fn is not None:
images, labels = mixup_fn(images, labels)
with torch.cuda.amp.autocast():
outputs = model(images)
loss = criterion(outputs, labels)
loss_value = loss.item()
if not math.isfinite(loss_value):
print(f'Loss is {loss_value}, stopping training')
sys.exit(1)
optimizer.zero_grad()
is_second_order = (hasattr(optimizer, 'is_second_order')) and (optimizer.is_second_order)
loss_scaler(
loss=loss,
optimizer=optimizer,
parameters=model.parameters(),
create_graph=is_second_order
)
torch.cuda.synchronize()
metric_logger.update(loss=loss_value)
metric_logger.update(lr=optimizer.param_groups[0]['lr'])
# gather the stats from all process
metric_logger.synchronize_between_processes()
print(f'Averaged stats: {metric_logger}')
lr_scheduler.step(epoch)
if epoch % cfg.save_epoch_freq == 0:
save_path = f'{os.getcwd()}/{cfg.model.model_name}_{cfg.data.name}_{epoch:03}ep.pth'
torch.save({
'model': model_without_dp.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'scaler': loss_scaler.state_dict(),
'epoch': epoch
}, save_path)
save_path = f'{os.getcwd()}/{cfg.model.model_name}_{cfg.data.name}_{epoch:03}ep.pth'
torch.save({
'model': model_without_dp.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'scaler': loss_scaler.state_dict(),
'epoch': epoch
}, save_path)
if __name__ == '__main__':
main()
|
main
|
test_rpcapi.py
|
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for patron.cert.rpcapi
"""
import contextlib
import mock
from oslo_config import cfg
from patron.cert import rpcapi as cert_rpcapi
from patron import context
from patron import test
CONF = cfg.CONF
class CertRpcAPITestCase(test.NoDBTestCase):
def _test_cert_api(self, method, **kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = cert_rpcapi.CertAPI()
self.assertIsNotNone(rpcapi.client)
self.assertEqual(rpcapi.client.target.topic, CONF.cert_topic)
orig_prepare = rpcapi.client.prepare
with contextlib.nested(
mock.patch.object(rpcapi.client, 'call'),
mock.patch.object(rpcapi.client, 'prepare'),
mock.patch.object(rpcapi.client, 'can_send_version'),
) as (
rpc_mock, prepare_mock, csv_mock
):
prepare_mock.return_value = rpcapi.client
rpc_mock.return_value = 'foo'
csv_mock.side_effect = (
lambda v: orig_prepare().can_send_version())
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, rpc_mock.return_value)
prepare_mock.assert_called_once_with()
rpc_mock.assert_called_once_with(ctxt, method, **kwargs)
|
def test_revoke_certs_by_project(self):
self._test_cert_api('revoke_certs_by_project',
project_id='fake_project_id')
def test_revoke_certs_by_user_and_project(self):
self._test_cert_api('revoke_certs_by_user_and_project',
user_id='fake_user_id',
project_id='fake_project_id')
def test_generate_x509_cert(self):
self._test_cert_api('generate_x509_cert',
user_id='fake_user_id',
project_id='fake_project_id')
def test_fetch_ca(self):
self._test_cert_api('fetch_ca', project_id='fake_project_id')
def test_fetch_crl(self):
self._test_cert_api('fetch_crl', project_id='fake_project_id')
def test_decrypt_text(self):
self._test_cert_api('decrypt_text',
project_id='fake_project_id', text='blah')
|
def test_revoke_certs_by_user(self):
self._test_cert_api('revoke_certs_by_user', user_id='fake_user_id')
|
subsystem.py
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from typing import Optional, Tuple, cast
from pants.backend.python.subsystems.python_tool_base import PythonToolBase
from pants.option.custom_types import file_option, shell_str
class Black(PythonToolBase):
"""The Black Python code formatter (https://black.readthedocs.io/)."""
options_scope = "black"
default_version = "black==20.8b1"
default_extra_requirements = ["setuptools"]
default_entry_point = "black:patched_main"
default_interpreter_constraints = ["CPython>=3.6"]
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--skip",
type=bool,
default=False,
help=(
f"Don't use Black when running `{register.bootstrap.pants_bin_name} fmt` and "
f"`{register.bootstrap.pants_bin_name} lint`"
),
)
register(
"--args",
type=list,
member_type=shell_str,
help=(
"Arguments to pass directly to Black, e.g. "
f'`--{cls.options_scope}-args="--target-version=py37 --quiet"`'
),
)
register(
"--config",
type=file_option,
default=None,
advanced=True,
help="Path to Black's pyproject.toml config file",
)
@property
def skip(self) -> bool:
return cast(bool, self.options.skip)
@property
def
|
(self) -> Tuple[str, ...]:
return tuple(self.options.args)
@property
def config(self) -> Optional[str]:
return cast(Optional[str], self.options.config)
|
args
|
datetimes.py
|
from datetime import date, datetime, time, timedelta, tzinfo
import operator
from typing import Optional
import warnings
import numpy as np
from pandas._libs import NaT, Period, Timestamp, index as libindex, lib
from pandas._libs.tslibs import (
Resolution,
ints_to_pydatetime,
parsing,
timezones,
to_offset,
)
from pandas._libs.tslibs.offsets import prefix_mapping
from pandas._typing import DtypeObj, Label
from pandas.errors import InvalidIndexError
from pandas.util._decorators import cache_readonly, doc
from pandas.core.dtypes.common import (
DT64NS_DTYPE,
is_datetime64_any_dtype,
is_datetime64_dtype,
is_datetime64tz_dtype,
is_float,
is_integer,
is_scalar,
)
from pandas.core.dtypes.missing import is_valid_nat_for_dtype
from pandas.core.arrays.datetimes import DatetimeArray, tz_to_dtype
import pandas.core.common as com
from pandas.core.indexes.base import Index, maybe_extract_name
from pandas.core.indexes.datetimelike import DatetimeTimedeltaMixin
from pandas.core.indexes.extension import inherit_names
from pandas.core.tools.times import to_time
def _new_DatetimeIndex(cls, d):
"""
This is called upon unpickling, rather than the default which doesn't
have arguments and breaks __new__
"""
if "data" in d and not isinstance(d["data"], DatetimeIndex):
# Avoid need to verify integrity by calling simple_new directly
data = d.pop("data")
if not isinstance(data, DatetimeArray):
# For backward compat with older pickles, we may need to construct
# a DatetimeArray to adapt to the newer _simple_new signature
tz = d.pop("tz")
freq = d.pop("freq")
dta = DatetimeArray._simple_new(data, dtype=tz_to_dtype(tz), freq=freq)
else:
dta = data
for key in ["tz", "freq"]:
# These are already stored in our DatetimeArray; if they are
# also in the pickle and don't match, we have a problem.
if key in d:
assert d.pop(key) == getattr(dta, key)
result = cls._simple_new(dta, **d)
else:
with warnings.catch_warnings():
# TODO: If we knew what was going in to **d, we might be able to
# go through _simple_new instead
warnings.simplefilter("ignore")
result = cls.__new__(cls, **d)
return result
@inherit_names(
["to_perioddelta", "to_julian_date", "strftime", "isocalendar"]
+ DatetimeArray._field_ops
+ [
method
for method in DatetimeArray._datetimelike_methods
if method not in ("tz_localize",)
],
DatetimeArray,
wrap=True,
)
@inherit_names(["is_normalized", "_resolution_obj"], DatetimeArray, cache=True)
@inherit_names(
[
"_bool_ops",
"_object_ops",
"_field_ops",
"_datetimelike_ops",
"_datetimelike_methods",
"tz",
"tzinfo",
"dtype",
"to_pydatetime",
"_has_same_tz",
"_format_native_types",
"date",
"time",
"timetz",
]
+ DatetimeArray._bool_ops,
DatetimeArray,
)
class DatetimeIndex(DatetimeTimedeltaMixin):
"""
Immutable ndarray-like of datetime64 data.
Represented internally as int64, and which can be boxed to Timestamp objects
that are subclasses of datetime and carry metadata.
Parameters
----------
data : array-like (1-dimensional), optional
Optional datetime-like data to construct index with.
freq : str or pandas offset object, optional
One of pandas date offset strings or corresponding objects. The string
'infer' can be passed in order to set the frequency of the index as the
inferred frequency upon creation.
tz : pytz.timezone or dateutil.tz.tzfile or datetime.tzinfo or str
Set the Timezone of the data.
normalize : bool, default False
Normalize start/end dates to midnight before generating date range.
closed : {'left', 'right'}, optional
Set whether to include `start` and `end` that are on the
boundary. The default includes boundary points on either end.
ambiguous : 'infer', bool-ndarray, 'NaT', default 'raise'
When clocks moved backward due to DST, ambiguous times may arise.
For example in Central European Time (UTC+01), when going from 03:00
DST to 02:00 non-DST, 02:30:00 local time occurs both at 00:30:00 UTC
and at 01:30:00 UTC. In such a situation, the `ambiguous` parameter
dictates how ambiguous times should be handled.
- 'infer' will attempt to infer fall dst-transition hours based on
order
- bool-ndarray where True signifies a DST time, False signifies a
non-DST time (note that this flag is only applicable for ambiguous
times)
- 'NaT' will return NaT where there are ambiguous times
- 'raise' will raise an AmbiguousTimeError if there are ambiguous times.
dayfirst : bool, default False
If True, parse dates in `data` with the day first order.
yearfirst : bool, default False
If True parse dates in `data` with the year first order.
dtype : numpy.dtype or DatetimeTZDtype or str, default None
Note that the only NumPy dtype allowed is ‘datetime64[ns]’.
copy : bool, default False
Make a copy of input ndarray.
name : label, default None
Name to be stored in the index.
Attributes
----------
year
month
day
hour
minute
second
microsecond
nanosecond
date
time
timetz
dayofyear
weekofyear
week
dayofweek
weekday
quarter
tz
freq
freqstr
is_month_start
is_month_end
is_quarter_start
is_quarter_end
is_year_start
is_year_end
is_leap_year
inferred_freq
Methods
-------
normalize
strftime
snap
tz_convert
tz_localize
round
floor
ceil
to_period
to_perioddelta
to_pydatetime
to_series
to_frame
month_name
day_name
mean
See Also
--------
Index : The base pandas Index type.
TimedeltaIndex : Index of timedelta64 data.
PeriodIndex : Index of Period data.
to_datetime : Convert argument to datetime.
date_range : Create a fixed-frequency DatetimeIndex.
Notes
-----
To learn more about the frequency strings, please see `this link
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases>`__.
"""
_typ = "datetimeindex"
_engine_type = libindex.DatetimeEngine
_supports_partial_string_indexing = True
_comparables = ["name", "freqstr", "tz"]
_attributes = ["name", "tz", "freq"]
_is_numeric_dtype = False
_data: DatetimeArray
tz: Optional[tzinfo]
# --------------------------------------------------------------------
# methods that dispatch to array and wrap result in DatetimeIndex
@doc(DatetimeArray.tz_localize)
def tz_localize(
self, tz, ambiguous="raise", nonexistent="raise"
) -> "DatetimeIndex":
arr = self._data.tz_localize(tz, ambiguous, nonexistent)
return type(self)._simple_new(arr, name=self.name)
@doc(DatetimeArray.to_period)
def to_period(self, freq=None) -> "DatetimeIndex":
arr = self._data.to_period(freq)
return type(self)._simple_new(arr, name=self.name)
# --------------------------------------------------------------------
# Constructors
def __new__(
cls,
data=None,
freq=lib.no_default,
tz=None,
normalize=False,
closed=None,
ambiguous="raise",
dayfirst=False,
yearfirst=False,
dtype=None,
copy=False,
name=None,
):
if is_scalar(data):
raise TypeError(
f"{cls.__name__}() must be called with a "
f"collection of some kind, {repr(data)} was passed"
)
# - Cases checked above all return/raise before reaching here - #
name = maybe_extract_name(name, data, cls)
dtarr = DatetimeArray._from_sequence(
data,
dtype=dtype,
copy=copy,
tz=tz,
freq=freq,
dayfirst=dayfirst,
yearfirst=yearfirst,
ambiguous=ambiguous,
)
subarr = cls._simple_new(dtarr, name=name)
return subarr
@classmethod
def _simple_new(cls, values: DatetimeArray, name: Label = None):
assert isinstance(values, DatetimeArray), type(values)
result = object.__new__(cls)
result._data = values
result.name = name
result._cache = {}
result._no_setting_name = False
# For groupby perf. See note in indexes/base about _index_data
result._index_data = values._data
result._reset_identity()
return result
# --------------------------------------------------------------------
@cache_readonly
def _is_dates_only(self) -> bool:
"""
Return a boolean if we are only dates (and don't have a timezone)
Returns
-------
bool
"""
from pandas.io.formats.format import _is_dates_only
return self.tz is None and _is_dates_only(self._values)
def __reduce__(self):
# we use a special reduce here because we need
# to simply set the .tz (and not reinterpret it)
d = dict(data=self._data)
d.update(self._get_attributes_dict())
return _new_DatetimeIndex, (type(self), d), None
def _convert_for_op(self, value):
"""
Convert value to be insertable to ndarray.
"""
if self._has_same_tz(value):
return Timestamp(value).asm8
raise ValueError("Passed item and index have different timezone")
def _is_comparable_dtype(self, dtype: DtypeObj) -> bool:
"""
Can we compare values of the given dtype to our own?
"""
if not is_datetime64_any_dtype(dtype):
return False
if self.tz is not None:
# If we have tz, we can compare to tzaware
return is_datetime64tz_dtype(dtype)
# if we dont have tz, we can only compare to tznaive
return is_datetime64_dtype(dtype)
# --------------------------------------------------------------------
# Rendering Methods
def _mpl_repr(self):
# how to represent ourselves to matplotlib
return ints_to_pydatetime(self.asi8, self.tz)
@property
def _formatter_func(self):
from pandas.io.formats.format import _get_format_datetime64
formatter = _get_format_datetime64(is_dates_only=self._is_dates_only)
return lambda x: f"'{formatter(x, tz=self.tz)}'"
# --------------------------------------------------------------------
# Set Operation Methods
def union_many(self, others):
"""
A bit of a hack to accelerate unioning a collection of indexes.
"""
this = self
for other in others:
if not isinstance(this, DatetimeIndex):
this = Index.union(this, other)
continue
if not isinstance(other, DatetimeIndex):
try:
other = DatetimeIndex(other)
except TypeError:
pass
this, other = this._maybe_utc_convert(other)
if this._can_fast_union(other):
this = this._fast_union(other)
else:
this = Index.union(this, other)
return this
# --------------------------------------------------------------------
def _get_time_micros(self):
"""
Return the number of microseconds since midnight.
Returns
-------
ndarray[int64_t]
"""
values = self.asi8
if self.tz is not None and not timezones.is_utc(self.tz):
values = self._data._local_timestamps()
nanos = values % (24 * 3600 * 1_000_000_000)
micros = nanos // 1000
micros[self._isnan] = -1
return micros
def to_series(self, keep_tz=lib.no_default, index=None, name=None):
"""
Create a Series with both index and values equal to the index keys
useful with map for returning an indexer based on an index.
Parameters
----------
keep_tz : optional, defaults True
Return the data keeping the timezone.
If keep_tz is True:
If the timezone is not set, the resulting
Series will have a datetime64[ns] dtype.
Otherwise the Series will have an datetime64[ns, tz] dtype; the
tz will be preserved.
If keep_tz is False:
Series will have a datetime64[ns] dtype. TZ aware
objects will have the tz removed.
.. versionchanged:: 1.0.0
The default value is now True. In a future version,
this keyword will be removed entirely. Stop passing the
argument to obtain the future behavior and silence the warning.
index : Index, optional
Index of resulting Series. If None, defaults to original index.
name : str, optional
Name of resulting Series. If None, defaults to name of original
index.
Returns
-------
Series
"""
from pandas import Series
if index is None:
index = self._shallow_copy()
if name is None:
name = self.name
if keep_tz is not lib.no_default:
if keep_tz:
warnings.warn(
"The 'keep_tz' keyword in DatetimeIndex.to_series "
"is deprecated and will be removed in a future version. "
"You can stop passing 'keep_tz' to silence this warning.",
FutureWarning,
stacklevel=2,
)
else:
warnings.warn(
"Specifying 'keep_tz=False' is deprecated and this "
"option will be removed in a future release. If "
"you want to remove the timezone information, you "
"can do 'idx.tz_convert(None)' before calling "
"'to_series'.",
FutureWarning,
stacklevel=2,
)
else:
keep_tz = True
if keep_tz and self.tz is not None:
# preserve the tz & copy
values = self.copy(deep=True)
else:
values = self._values.view("M8[ns]").copy()
return Series(values, index=index, name=name)
def snap(self, freq="S"):
"""
Snap time stamps to nearest occurring frequency.
Returns
-------
DatetimeIndex
"""
# Superdumb, punting on any optimizing
freq = to_offset(freq)
snapped = np.empty(len(self), dtype=DT64NS_DTYPE)
for i, v in enumerate(self):
s = v
if not freq.is_on_offset(s):
t0 = freq.rollback(s)
t1 = freq.rollforward(s)
if abs(s - t0) < abs(t1 - s):
s = t0
else:
s = t1
snapped[i] = s
dta = DatetimeArray(snapped, dtype=self.dtype)
return DatetimeIndex._simple_new(dta, name=self.name)
def _parsed_string_to_bounds(self, reso: Resolution, parsed: datetime):
"""
Calculate datetime bounds for parsed time string and its resolution.
Parameters
----------
reso : str
Resolution provided by parsed string.
parsed : datetime
Datetime from parsed string.
Returns
-------
lower, upper: pd.Timestamp
"""
assert isinstance(reso, Resolution), (type(reso), reso)
valid_resos = {
"year",
"month",
"quarter",
"day",
"hour",
"minute",
"second",
"minute",
"second",
"microsecond",
}
if reso.attrname not in valid_resos:
raise KeyError
grp = reso.freq_group
per = Period(parsed, freq=grp)
start, end = per.start_time, per.end_time
# GH 24076
# If an incoming date string contained a UTC offset, need to localize
# the parsed date to this offset first before aligning with the index's
# timezone
if parsed.tzinfo is not None:
if self.tz is None:
raise ValueError(
"The index must be timezone aware when indexing "
"with a date string with a UTC offset"
)
start = start.tz_localize(parsed.tzinfo).tz_convert(self.tz)
end = end.tz_localize(parsed.tzinfo).tz_convert(self.tz)
elif self.tz is not None:
start = start.tz_localize(self.tz)
end = end.tz_localize(self.tz)
return start, end
def _validate_partial_date_slice(self, reso: Resolution):
assert isinstance(reso, Resolution), (type(reso), reso)
if (
self.is_monotonic
and reso.attrname in ["day", "hour", "minute", "second"]
and self._resolution_obj >= reso
):
# These resolution/monotonicity validations came from GH3931,
# GH3452 and GH2369.
# See also GH14826
raise KeyError
if reso == "microsecond":
# _partial_date_slice doesn't allow microsecond resolution, but
# _parsed_string_to_bounds allows it.
raise KeyError
def get_loc(self, key, method=None, tolerance=None):
"""
Get integer location for requested label
|
loc : int
"""
if not is_scalar(key):
raise InvalidIndexError(key)
orig_key = key
if is_valid_nat_for_dtype(key, self.dtype):
key = NaT
if isinstance(key, self._data._recognized_scalars):
# needed to localize naive datetimes
key = self._maybe_cast_for_get_loc(key)
elif isinstance(key, str):
try:
return self._get_string_slice(key)
except (TypeError, KeyError, ValueError, OverflowError):
pass
try:
key = self._maybe_cast_for_get_loc(key)
except ValueError as err:
raise KeyError(key) from err
elif isinstance(key, timedelta):
# GH#20464
raise TypeError(
f"Cannot index {type(self).__name__} with {type(key).__name__}"
)
elif isinstance(key, time):
if method is not None:
raise NotImplementedError(
"cannot yet lookup inexact labels when key is a time object"
)
return self.indexer_at_time(key)
else:
# unrecognized type
raise KeyError(key)
try:
return Index.get_loc(self, key, method, tolerance)
except KeyError as err:
raise KeyError(orig_key) from err
def _maybe_cast_for_get_loc(self, key) -> Timestamp:
# needed to localize naive datetimes
key = Timestamp(key)
if key.tzinfo is None:
key = key.tz_localize(self.tz)
else:
key = key.tz_convert(self.tz)
return key
def _maybe_cast_slice_bound(self, label, side: str, kind):
"""
If label is a string, cast it to datetime according to resolution.
Parameters
----------
label : object
side : {'left', 'right'}
kind : {'loc', 'getitem'} or None
Returns
-------
label : object
Notes
-----
Value of `side` parameter should be validated in caller.
"""
assert kind in ["loc", "getitem", None]
if is_float(label) or isinstance(label, time) or is_integer(label):
self._invalid_indexer("slice", label)
if isinstance(label, str):
freq = getattr(self, "freqstr", getattr(self, "inferred_freq", None))
parsed, reso = parsing.parse_time_string(label, freq)
reso = Resolution.from_attrname(reso)
lower, upper = self._parsed_string_to_bounds(reso, parsed)
# lower, upper form the half-open interval:
# [parsed, parsed + 1 freq)
# because label may be passed to searchsorted
# the bounds need swapped if index is reverse sorted and has a
# length > 1 (is_monotonic_decreasing gives True for empty
# and length 1 index)
if self._is_strictly_monotonic_decreasing and len(self) > 1:
return upper if side == "left" else lower
return lower if side == "left" else upper
else:
return label
def _get_string_slice(self, key: str, use_lhs: bool = True, use_rhs: bool = True):
freq = getattr(self, "freqstr", getattr(self, "inferred_freq", None))
parsed, reso = parsing.parse_time_string(key, freq)
reso = Resolution.from_attrname(reso)
loc = self._partial_date_slice(reso, parsed, use_lhs=use_lhs, use_rhs=use_rhs)
return loc
def slice_indexer(self, start=None, end=None, step=None, kind=None):
"""
Return indexer for specified label slice.
Index.slice_indexer, customized to handle time slicing.
In addition to functionality provided by Index.slice_indexer, does the
following:
- if both `start` and `end` are instances of `datetime.time`, it
invokes `indexer_between_time`
- if `start` and `end` are both either string or None perform
value-based selection in non-monotonic cases.
"""
# For historical reasons DatetimeIndex supports slices between two
# instances of datetime.time as if it were applying a slice mask to
# an array of (self.hour, self.minute, self.seconds, self.microsecond).
if isinstance(start, time) and isinstance(end, time):
if step is not None and step != 1:
raise ValueError("Must have step size of 1 with time slices")
return self.indexer_between_time(start, end)
if isinstance(start, time) or isinstance(end, time):
raise KeyError("Cannot mix time and non-time slice keys")
# Pandas supports slicing with dates, treated as datetimes at midnight.
# https://github.com/pandas-dev/pandas/issues/31501
if isinstance(start, date) and not isinstance(start, datetime):
start = datetime.combine(start, time(0, 0))
if isinstance(end, date) and not isinstance(end, datetime):
end = datetime.combine(end, time(0, 0))
try:
return Index.slice_indexer(self, start, end, step, kind=kind)
except KeyError:
# For historical reasons DatetimeIndex by default supports
# value-based partial (aka string) slices on non-monotonic arrays,
# let's try that.
if (start is None or isinstance(start, str)) and (
end is None or isinstance(end, str)
):
mask = True
if start is not None:
start_casted = self._maybe_cast_slice_bound(start, "left", kind)
mask = start_casted <= self
if end is not None:
end_casted = self._maybe_cast_slice_bound(end, "right", kind)
mask = (self <= end_casted) & mask
indexer = mask.nonzero()[0][::step]
if len(indexer) == len(self):
return slice(None)
else:
return indexer
else:
raise
# --------------------------------------------------------------------
def is_type_compatible(self, typ) -> bool:
return typ == self.inferred_type or typ == "datetime"
@property
def inferred_type(self) -> str:
# b/c datetime is represented as microseconds since the epoch, make
# sure we can't have ambiguous indexing
return "datetime64"
def indexer_at_time(self, time, asof=False):
"""
Return index locations of values at particular time of day
(e.g. 9:30AM).
Parameters
----------
time : datetime.time or str
Time passed in either as object (datetime.time) or as string in
appropriate format ("%H:%M", "%H%M", "%I:%M%p", "%I%M%p",
"%H:%M:%S", "%H%M%S", "%I:%M:%S%p", "%I%M%S%p").
Returns
-------
values_at_time : array of integers
See Also
--------
indexer_between_time : Get index locations of values between particular
times of day.
DataFrame.at_time : Select values at particular time of day.
"""
if asof:
raise NotImplementedError("'asof' argument is not supported")
if isinstance(time, str):
from dateutil.parser import parse
time = parse(time).time()
if time.tzinfo:
if self.tz is None:
raise ValueError("Index must be timezone aware.")
time_micros = self.tz_convert(time.tzinfo)._get_time_micros()
else:
time_micros = self._get_time_micros()
micros = _time_to_micros(time)
return (micros == time_micros).nonzero()[0]
def indexer_between_time(
self, start_time, end_time, include_start=True, include_end=True
):
"""
Return index locations of values between particular times of day
(e.g., 9:00-9:30AM).
Parameters
----------
start_time, end_time : datetime.time, str
Time passed either as object (datetime.time) or as string in
appropriate format ("%H:%M", "%H%M", "%I:%M%p", "%I%M%p",
"%H:%M:%S", "%H%M%S", "%I:%M:%S%p","%I%M%S%p").
include_start : bool, default True
include_end : bool, default True
Returns
-------
values_between_time : array of integers
See Also
--------
indexer_at_time : Get index locations of values at particular time of day.
DataFrame.between_time : Select values between particular times of day.
"""
start_time = to_time(start_time)
end_time = to_time(end_time)
time_micros = self._get_time_micros()
start_micros = _time_to_micros(start_time)
end_micros = _time_to_micros(end_time)
if include_start and include_end:
lop = rop = operator.le
elif include_start:
lop = operator.le
rop = operator.lt
elif include_end:
lop = operator.lt
rop = operator.le
else:
lop = rop = operator.lt
if start_time <= end_time:
join_op = operator.and_
else:
join_op = operator.or_
mask = join_op(lop(start_micros, time_micros), rop(time_micros, end_micros))
return mask.nonzero()[0]
DatetimeIndex._add_logical_methods_disabled()
def date_range(
start=None,
end=None,
periods=None,
freq=None,
tz=None,
normalize=False,
name=None,
closed=None,
**kwargs,
) -> DatetimeIndex:
"""
Return a fixed frequency DatetimeIndex.
Parameters
----------
start : str or datetime-like, optional
Left bound for generating dates.
end : str or datetime-like, optional
Right bound for generating dates.
periods : int, optional
Number of periods to generate.
freq : str or DateOffset, default 'D'
Frequency strings can have multiples, e.g. '5H'. See
:ref:`here <timeseries.offset_aliases>` for a list of
frequency aliases.
tz : str or tzinfo, optional
Time zone name for returning localized DatetimeIndex, for example
'Asia/Hong_Kong'. By default, the resulting DatetimeIndex is
timezone-naive.
normalize : bool, default False
Normalize start/end dates to midnight before generating date range.
name : str, default None
Name of the resulting DatetimeIndex.
closed : {None, 'left', 'right'}, optional
Make the interval closed with respect to the given frequency to
the 'left', 'right', or both sides (None, the default).
**kwargs
For compatibility. Has no effect on the result.
Returns
-------
rng : DatetimeIndex
See Also
--------
DatetimeIndex : An immutable container for datetimes.
timedelta_range : Return a fixed frequency TimedeltaIndex.
period_range : Return a fixed frequency PeriodIndex.
interval_range : Return a fixed frequency IntervalIndex.
Notes
-----
Of the four parameters ``start``, ``end``, ``periods``, and ``freq``,
exactly three must be specified. If ``freq`` is omitted, the resulting
``DatetimeIndex`` will have ``periods`` linearly spaced elements between
``start`` and ``end`` (closed on both sides).
To learn more about the frequency strings, please see `this link
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases>`__.
Examples
--------
**Specifying the values**
The next four examples generate the same `DatetimeIndex`, but vary
the combination of `start`, `end` and `periods`.
Specify `start` and `end`, with the default daily frequency.
>>> pd.date_range(start='1/1/2018', end='1/08/2018')
DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03', '2018-01-04',
'2018-01-05', '2018-01-06', '2018-01-07', '2018-01-08'],
dtype='datetime64[ns]', freq='D')
Specify `start` and `periods`, the number of periods (days).
>>> pd.date_range(start='1/1/2018', periods=8)
DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03', '2018-01-04',
'2018-01-05', '2018-01-06', '2018-01-07', '2018-01-08'],
dtype='datetime64[ns]', freq='D')
Specify `end` and `periods`, the number of periods (days).
>>> pd.date_range(end='1/1/2018', periods=8)
DatetimeIndex(['2017-12-25', '2017-12-26', '2017-12-27', '2017-12-28',
'2017-12-29', '2017-12-30', '2017-12-31', '2018-01-01'],
dtype='datetime64[ns]', freq='D')
Specify `start`, `end`, and `periods`; the frequency is generated
automatically (linearly spaced).
>>> pd.date_range(start='2018-04-24', end='2018-04-27', periods=3)
DatetimeIndex(['2018-04-24 00:00:00', '2018-04-25 12:00:00',
'2018-04-27 00:00:00'],
dtype='datetime64[ns]', freq=None)
**Other Parameters**
Changed the `freq` (frequency) to ``'M'`` (month end frequency).
>>> pd.date_range(start='1/1/2018', periods=5, freq='M')
DatetimeIndex(['2018-01-31', '2018-02-28', '2018-03-31', '2018-04-30',
'2018-05-31'],
dtype='datetime64[ns]', freq='M')
Multiples are allowed
>>> pd.date_range(start='1/1/2018', periods=5, freq='3M')
DatetimeIndex(['2018-01-31', '2018-04-30', '2018-07-31', '2018-10-31',
'2019-01-31'],
dtype='datetime64[ns]', freq='3M')
`freq` can also be specified as an Offset object.
>>> pd.date_range(start='1/1/2018', periods=5, freq=pd.offsets.MonthEnd(3))
DatetimeIndex(['2018-01-31', '2018-04-30', '2018-07-31', '2018-10-31',
'2019-01-31'],
dtype='datetime64[ns]', freq='3M')
Specify `tz` to set the timezone.
>>> pd.date_range(start='1/1/2018', periods=5, tz='Asia/Tokyo')
DatetimeIndex(['2018-01-01 00:00:00+09:00', '2018-01-02 00:00:00+09:00',
'2018-01-03 00:00:00+09:00', '2018-01-04 00:00:00+09:00',
'2018-01-05 00:00:00+09:00'],
dtype='datetime64[ns, Asia/Tokyo]', freq='D')
`closed` controls whether to include `start` and `end` that are on the
boundary. The default includes boundary points on either end.
>>> pd.date_range(start='2017-01-01', end='2017-01-04', closed=None)
DatetimeIndex(['2017-01-01', '2017-01-02', '2017-01-03', '2017-01-04'],
dtype='datetime64[ns]', freq='D')
Use ``closed='left'`` to exclude `end` if it falls on the boundary.
>>> pd.date_range(start='2017-01-01', end='2017-01-04', closed='left')
DatetimeIndex(['2017-01-01', '2017-01-02', '2017-01-03'],
dtype='datetime64[ns]', freq='D')
Use ``closed='right'`` to exclude `start` if it falls on the boundary.
>>> pd.date_range(start='2017-01-01', end='2017-01-04', closed='right')
DatetimeIndex(['2017-01-02', '2017-01-03', '2017-01-04'],
dtype='datetime64[ns]', freq='D')
"""
if freq is None and com.any_none(periods, start, end):
freq = "D"
dtarr = DatetimeArray._generate_range(
start=start,
end=end,
periods=periods,
freq=freq,
tz=tz,
normalize=normalize,
closed=closed,
**kwargs,
)
return DatetimeIndex._simple_new(dtarr, name=name)
def bdate_range(
start=None,
end=None,
periods=None,
freq="B",
tz=None,
normalize=True,
name=None,
weekmask=None,
holidays=None,
closed=None,
**kwargs,
) -> DatetimeIndex:
"""
Return a fixed frequency DatetimeIndex, with business day as the default
frequency.
Parameters
----------
start : str or datetime-like, default None
Left bound for generating dates.
end : str or datetime-like, default None
Right bound for generating dates.
periods : int, default None
Number of periods to generate.
freq : str or DateOffset, default 'B' (business daily)
Frequency strings can have multiples, e.g. '5H'.
tz : str or None
Time zone name for returning localized DatetimeIndex, for example
Asia/Beijing.
normalize : bool, default False
Normalize start/end dates to midnight before generating date range.
name : str, default None
Name of the resulting DatetimeIndex.
weekmask : str or None, default None
Weekmask of valid business days, passed to ``numpy.busdaycalendar``,
only used when custom frequency strings are passed. The default
value None is equivalent to 'Mon Tue Wed Thu Fri'.
holidays : list-like or None, default None
Dates to exclude from the set of valid business days, passed to
``numpy.busdaycalendar``, only used when custom frequency strings
are passed.
closed : str, default None
Make the interval closed with respect to the given frequency to
the 'left', 'right', or both sides (None).
**kwargs
For compatibility. Has no effect on the result.
Returns
-------
DatetimeIndex
Notes
-----
Of the four parameters: ``start``, ``end``, ``periods``, and ``freq``,
exactly three must be specified. Specifying ``freq`` is a requirement
for ``bdate_range``. Use ``date_range`` if specifying ``freq`` is not
desired.
To learn more about the frequency strings, please see `this link
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases>`__.
Examples
--------
Note how the two weekend days are skipped in the result.
>>> pd.bdate_range(start='1/1/2018', end='1/08/2018')
DatetimeIndex(['2018-01-01', '2018-01-02', '2018-01-03', '2018-01-04',
'2018-01-05', '2018-01-08'],
dtype='datetime64[ns]', freq='B')
"""
if freq is None:
msg = "freq must be specified for bdate_range; use date_range instead"
raise TypeError(msg)
if isinstance(freq, str) and freq.startswith("C"):
try:
weekmask = weekmask or "Mon Tue Wed Thu Fri"
freq = prefix_mapping[freq](holidays=holidays, weekmask=weekmask)
except (KeyError, TypeError) as err:
msg = f"invalid custom frequency string: {freq}"
raise ValueError(msg) from err
elif holidays or weekmask:
msg = (
"a custom frequency string is required when holidays or "
f"weekmask are passed, got frequency {freq}"
)
raise ValueError(msg)
return date_range(
start=start,
end=end,
periods=periods,
freq=freq,
tz=tz,
normalize=normalize,
name=name,
closed=closed,
**kwargs,
)
def _time_to_micros(time_obj: time) -> int:
seconds = time_obj.hour * 60 * 60 + 60 * time_obj.minute + time_obj.second
return 1_000_000 * seconds + time_obj.microsecond
|
Returns
-------
|
NewRoom_20211201102508.tsx
|
import { Link, useHistory } from 'react-router-dom'
import { FormEvent, useState} from 'react'
import illustrationImg from '../assets/images/illustration.svg'
import logoImg from '../assets/images/logo.svg'
import {Button} from '../components/Button'
import { useAuth } from '../hooks/useAuth'
import '../styles/auth.scss'
import { database } from '../services/firebase'
export function
|
(){
const { user } = useAuth();
const history = useHistory();
const [newRoom, setNewRoom] = useState('');
async function handleCreateRoom(event: FormEvent){
event.preventDefault();
if (newRoom.trim() === ''){
return;
}
const roomRef = database.ref('rooms');
const firebaseRoom = await roomRef.push({
title: newRoom,
authorId: user?.id,
})
history.push(`/rooms/${firebaseRoom.key}`)
}
return(
<div id="page-auth">
<aside>
<img src={illustrationImg} alt="Illustração simbolizando perguntas e respostas" />
<strong>Crie salas de Q&A ao-vivo</strong>
<p>Tire as dúvidas da sua audiência em tempo-real</p>
</aside>
<main>
<div className="main-content">
<a href="https://www.rocketseat.com.br/"><img src={logoImg} alt="Letmeask" /></a>
<h2>Criar uma nova sala</h2>
<form onSubmit={handleCreateRoom}>
<input
type="text"
placeholder="Nome da sala"
onChange={event => setNewRoom(event.target.value)}
value={newRoom}
/>
<Button type="submit">
Criar sala
</Button>
<p>
Quer entrar em uma sala já existente? <Link to="/">clique aqui</Link>
</p>
</form>
</div>
</main>
</div>
)
}
|
NewRoom
|
ForexRateEndDateOption.test.js
|
import React from 'react';
import { render, fireEvent } from '@testing-library/react';
import { DateTime } from 'luxon';
import { dateToStr } from '../components/DatePicker';
import ForexRateEndDateOption from '../components/ForexRateEndDateOption';
describe('<ForexRateEndDateOption />', () => {
it('should render without crashing', () => {
const rendered = render(<ForexRateEndDateOption initialDate={DateTime.utc()} />);
expect(rendered).toBeTruthy();
});
it('should have a commit button', async () => {
const { findByText } = render(<ForexRateEndDateOption initialDate={DateTime.utc()} />);
const element = await findByText('Commit');
expect(element).toBeDefined();
});
it('should disable the date picker input given the 1 Day option', async () => {
const { findByLabelText } = render(<ForexRateEndDateOption />);
const endDateForOneDay = await findByLabelText('End Date');
const expected = true;
const actual = endDateForOneDay.disabled;
|
});
it('should display the correct title, "1 Day", given the 1 Day option', async () => {
const { findByText } = render(<ForexRateEndDateOption />);
const element = await findByText('1 Day');
expect(element).toBeDefined();
});
it('should display the right end date given the 1 Day option', async () => {
const tomorrow830amUTC = DateTime.utc()
.plus({ days: 1 })
.set({
hour: 8,
minute: 30,
second: 0,
millisecond: 0,
});
const { findByLabelText } = render(<ForexRateEndDateOption />);
const endDateForOneDay = await findByLabelText('End Date');
const expected = dateToStr(tomorrow830amUTC);
const actual = endDateForOneDay.value;
expect(actual).toEqual(expected);
});
it('should enable the date picker input given the Weekend option', async () => {
const { findByLabelText } = render(<ForexRateEndDateOption weekend />);
const endDateForWeekend = await findByLabelText('End Date');
const expected = false;
const actual = endDateForWeekend.disabled;
expect(actual).toEqual(expected);
});
it('should display the correct title, "Weekend", given the Weekend option', async () => {
const { findByText } = render(<ForexRateEndDateOption weekend />);
const element = await findByText('Weekend');
expect(element).toBeDefined();
});
it('should display the right initial end date given the Weekend option', async () => {
const monday830amUTC = DateTime.utc()
.plus({ days: 3 })
.set({
hour: 8,
minute: 30,
second: 0,
millisecond: 0,
});
const { findByLabelText } = render(<ForexRateEndDateOption weekend />);
const endDateForOneDay = await findByLabelText('End Date');
const expected = dateToStr(monday830amUTC);
const actual = endDateForOneDay.value;
expect(actual).toEqual(expected);
});
it('should run the onCommit function when the commit button is pressed', async () => {
const onCommitMock = jest.fn();
const { findByText } = render(<ForexRateEndDateOption onCommit={onCommitMock} />);
const commitButton = await findByText('Commit');
fireEvent.click(commitButton);
expect(onCommitMock).toHaveBeenCalled();
});
});
|
expect(actual).toEqual(expected);
|
fbm_webhooks.go
|
package fbm
import (
"bytes"
"context"
"fmt"
"github.com/julienschmidt/httprouter"
"github.com/pkg/errors"
"github.com/pquerna/ffjson/ffjson"
"github.com/strongo/bots-api-fbm"
"github.com/strongo/bots-framework/core"
"github.com/strongo/log"
"google.golang.org/appengine"
"io/ioutil"
"net/http"
"strings"
)
// NewFbmWebhookHandler returns handler that handles FBM messages
func NewFbmWebhookHandler(botsBy bots.SettingsProvider, translatorProvider bots.TranslatorProvider) bots.WebhookHandler
|
// webhookHandler handles FBM messages
type webhookHandler struct {
bots.BaseHandler
bots bots.SettingsProvider
}
var _ bots.WebhookHandler = (*webhookHandler)(nil)
func (h webhookHandler) HandleUnmatched(whc bots.WebhookContext) (m bots.MessageFromBot) {
return
}
// RegisterWebhookHandler registers HTTP handler for handling FBM messages
func (handler webhookHandler) RegisterHttpHandlers(driver bots.WebhookDriver, host bots.BotHost, router *httprouter.Router, pathPrefix string) {
if router == nil {
panic("router == nil")
}
handler.BaseHandler.Register(driver, host)
router.POST(pathPrefix+"/fbm/webhook", handler.HandleWebhookRequest)
router.POST(pathPrefix+"/fbm/subscribe", handler.Subscribe)
router.POST(pathPrefix+"/fbm/whitelist", handler.Whitelist)
}
// Whitelist need to be documented
func (handler webhookHandler) Whitelist(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
c := handler.Context(r)
httpClient := handler.GetHTTPClient(c)
botCode := r.URL.Query().Get("bot")
fbmBots := handler.bots(c)
if botSettings, ok := fbmBots.ByCode[botCode]; ok {
message := fbmbotapi.NewRequestWhitelistDomain("add", "https://"+r.URL.Host)
requestBody, err := ffjson.MarshalFast(message)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(err.Error()))
return
}
log.Debugf(appengine.NewContext(r), "Posting to FB: %v", string(requestBody))
res, err := httpClient.Post(fmt.Sprintf("https://graph.facebook.com/v2.6/me/thread_settings?access_token=%v", botSettings.Token), "application/json", bytes.NewReader(requestBody))
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(fmt.Sprintf("Error: %v", err)))
}
body, err := ioutil.ReadAll(res.Body)
if err != nil {
w.Write([]byte(fmt.Sprintf("Error reading response body: %v", err)))
} else {
w.Write(body)
}
} else {
w.WriteHeader(http.StatusForbidden)
}
}
// Subscribe subscribes for webhook updates from FBM
func (handler webhookHandler) Subscribe(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
c := handler.Context(r)
httpClient := handler.GetHTTPClient(c)
botCode := r.URL.Query().Get("bot")
fbmBots := handler.bots(c)
if botSettings, ok := fbmBots.ByCode[botCode]; ok {
res, err := httpClient.Post(fmt.Sprintf("https://graph.facebook.com/v2.6/me/subscribed_apps?access_token=%v", botSettings.Token), "", strings.NewReader(""))
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte(fmt.Sprintf("Error: %v", err)))
}
body, err := ioutil.ReadAll(res.Body)
if err != nil {
w.Write([]byte(fmt.Sprintf("Error reading response body: %v", err)))
} else {
w.Write(body)
}
} else {
w.WriteHeader(http.StatusForbidden)
}
}
// HandleWebhookRequest handles webhook request from FBM
func (handler webhookHandler) HandleWebhookRequest(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
c := appengine.NewContext(r)
log.Debugf(c, "webhookHandler.HandleWebhookRequest()")
switch r.Method {
case http.MethodGet:
q := r.URL.Query()
botCode := r.URL.Query().Get("bot")
fbmBots := handler.bots(c)
if botSettings, ok := fbmBots.ByCode[botCode]; ok {
var responseText string
verifyToken := q.Get("hub.verify_token")
if verifyToken == botSettings.VerifyToken {
responseText = q.Get("hub.challenge")
} else {
w.WriteHeader(http.StatusUnauthorized)
responseText = "Wrong verify_token"
log.Debugf(c, responseText+fmt.Sprintf(". Got: '%v', expected[bot=%v]: '%v'.", verifyToken, botCode, botSettings.VerifyToken))
}
w.Write([]byte(responseText))
} else {
log.Debugf(c, "Unknown bot '%v'", botCode)
w.WriteHeader(http.StatusForbidden)
}
case http.MethodPost:
handler.HandleWebhook(w, r, handler)
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
// GetBotContextAndInputs maps FBM request to bots-framework struct
func (handler webhookHandler) GetBotContextAndInputs(c context.Context, r *http.Request) (botContext *bots.BotContext, entriesWithInputs []bots.EntryInputs, err error) {
var (
receivedMessage fbmbotapi.ReceivedMessage
bodyBytes []byte
)
defer r.Body.Close()
if bodyBytes, err = ioutil.ReadAll(r.Body); err != nil {
errors.Wrap(err, "Failed to read request body")
return
}
log.Infof(c, "Request.Body: %v", string(bodyBytes))
err = ffjson.UnmarshalFast(bodyBytes, &receivedMessage)
if err != nil {
err = errors.Wrap(err, "Failed to deserialize FB json message")
return
}
log.Infof(c, "Unmarshaled JSON to a struct with %v entries: %v", len(receivedMessage.Entries), receivedMessage)
entriesWithInputs = make([]bots.EntryInputs, len(receivedMessage.Entries))
for i, entry := range receivedMessage.Entries {
entryWithInputs := bots.EntryInputs{
Entry: entry,
Inputs: make([]bots.WebhookInput, len(entry.Messaging)),
}
for j, messaging := range entry.Messaging {
entryWithInputs.Inputs[j] = NewFbmWebhookInput(messaging)
}
entriesWithInputs[i] = entryWithInputs
}
//botCode := r.URL.Query().Get("bot")
pageID := receivedMessage.Entries[0].Messaging[0].Recipient.ID
fbmBots := handler.bots(c)
if botSettings, ok := fbmBots.ByID[pageID]; ok {
botContext = bots.NewBotContext(handler.BotHost, botSettings)
} else {
err = fmt.Errorf("bot settings not found by ID=[%v]", pageID)
}
return
}
// CreateWebhookContext creates context for handling FBM webhook requests
func (webhookHandler) CreateWebhookContext(appContext bots.BotAppContext, r *http.Request, botContext bots.BotContext, webhookInput bots.WebhookInput, botCoreStores bots.BotCoreStores, gaMeasurement bots.GaQueuer) bots.WebhookContext {
return newFbmWebhookContext(appContext, r, botContext, webhookInput, botCoreStores, gaMeasurement)
}
// GetResponder creates responder that can send messages to FBM
func (webhookHandler) GetResponder(w http.ResponseWriter, whc bots.WebhookContext) bots.WebhookResponder {
if fbmWhc, ok := whc.(*fbmWebhookContext); ok {
return newFbmWebhookResponder(fbmWhc)
}
panic(fmt.Sprintf("Expected fbmWebhookContext, got: %T", whc))
}
// CreateBotCoreStores create DAL for bot framework
func (handler webhookHandler) CreateBotCoreStores(appContext bots.BotAppContext, r *http.Request) bots.BotCoreStores {
return handler.BotHost.GetBotCoreStores(PlatformID, appContext, r)
}
|
{
if translatorProvider == nil {
panic("translatorProvider == nil")
}
return webhookHandler{
BaseHandler: bots.BaseHandler{
BotPlatform: Platform{},
TranslatorProvider: translatorProvider,
},
bots: botsBy,
}
}
|
encode.go
|
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package json
import (
"strconv"
"strings"
"github.com/golang/protobuf/v2/internal/errors"
)
// Encoder provides methods to write out JSON constructs and values. The user is
// responsible for producing valid sequences of JSON constructs and values.
type Encoder struct {
indent string
lastType Type
indents []byte
out []byte
}
// NewEncoder returns an Encoder.
//
// If indent is a non-empty string, it causes every entry for an Array or Object
// to be preceded by the indent and trailed by a newline.
func NewEncoder(indent string) (*Encoder, error)
|
// Bytes returns the content of the written bytes.
func (e *Encoder) Bytes() []byte {
return e.out
}
// WriteNull writes out the null value.
func (e *Encoder) WriteNull() {
e.prepareNext(Null)
e.out = append(e.out, "null"...)
}
// WriteBool writes out the given boolean value.
func (e *Encoder) WriteBool(b bool) {
e.prepareNext(Bool)
if b {
e.out = append(e.out, "true"...)
} else {
e.out = append(e.out, "false"...)
}
}
// WriteString writes out the given string in JSON string value.
func (e *Encoder) WriteString(s string) error {
e.prepareNext(String)
var err error
if e.out, err = appendString(e.out, s); err != nil {
return err
}
return nil
}
// WriteFloat writes out the given float and bitSize in JSON number value.
func (e *Encoder) WriteFloat(n float64, bitSize int) {
e.prepareNext(Number)
e.out = appendFloat(e.out, n, bitSize)
}
// WriteInt writes out the given signed integer in JSON number value.
func (e *Encoder) WriteInt(n int64) {
e.prepareNext(Number)
e.out = append(e.out, strconv.FormatInt(n, 10)...)
}
// WriteUint writes out the given unsigned integer in JSON number value.
func (e *Encoder) WriteUint(n uint64) {
e.prepareNext(Number)
e.out = append(e.out, strconv.FormatUint(n, 10)...)
}
// StartObject writes out the '{' symbol.
func (e *Encoder) StartObject() {
e.prepareNext(StartObject)
e.out = append(e.out, '{')
}
// EndObject writes out the '}' symbol.
func (e *Encoder) EndObject() {
e.prepareNext(EndObject)
e.out = append(e.out, '}')
}
// WriteName writes out the given string in JSON string value and the name
// separator ':'.
func (e *Encoder) WriteName(s string) error {
e.prepareNext(Name)
// Errors returned by appendString() are non-fatal.
var err error
e.out, err = appendString(e.out, s)
e.out = append(e.out, ':')
return err
}
// StartArray writes out the '[' symbol.
func (e *Encoder) StartArray() {
e.prepareNext(StartArray)
e.out = append(e.out, '[')
}
// EndArray writes out the ']' symbol.
func (e *Encoder) EndArray() {
e.prepareNext(EndArray)
e.out = append(e.out, ']')
}
// prepareNext adds possible comma and indentation for the next value based
// on last type and indent option. It also updates lastType to next.
func (e *Encoder) prepareNext(next Type) {
defer func() {
// Set lastType to next.
e.lastType = next
}()
if len(e.indent) == 0 {
// Need to add comma on the following condition.
if e.lastType&(Null|Bool|Number|String|EndObject|EndArray) != 0 &&
next&(Name|Null|Bool|Number|String|StartObject|StartArray) != 0 {
e.out = append(e.out, ',')
}
return
}
switch {
case e.lastType&(StartObject|StartArray) != 0:
// If next type is NOT closing, add indent and newline.
if next&(EndObject|EndArray) == 0 {
e.indents = append(e.indents, e.indent...)
e.out = append(e.out, '\n')
e.out = append(e.out, e.indents...)
}
case e.lastType&(Null|Bool|Number|String|EndObject|EndArray) != 0:
switch {
// If next type is either a value or name, add comma and newline.
case next&(Name|Null|Bool|Number|String|StartObject|StartArray) != 0:
e.out = append(e.out, ',', '\n')
// If next type is a closing object or array, adjust indentation.
case next&(EndObject|EndArray) != 0:
e.indents = e.indents[:len(e.indents)-len(e.indent)]
e.out = append(e.out, '\n')
}
e.out = append(e.out, e.indents...)
case e.lastType&Name != 0:
e.out = append(e.out, ' ')
}
}
|
{
e := &Encoder{}
if len(indent) > 0 {
if strings.Trim(indent, " \t") != "" {
return nil, errors.New("indent may only be composed of space or tab characters")
}
e.indent = indent
}
return e, nil
}
|
__init__.py
|
# -*- mode: python; coding: utf-8; -*-
"""Common services code"""
import os
AVAILABLE_SERVICES = ['habit_list', 'idonethis', 'nikeplus', 'pedometerpp',
|
def get_service_module(service_name):
"""Import given service from dayonetools.services package"""
import importlib
services_pkg = 'dayonetools.services'
module = '%s.%s' % (services_pkg, service_name)
return importlib.import_module(module)
def convert_to_dayone_date_string(day_str, hour=10, minute=0, second=0):
"""
Convert given date in 'yyyy-mm-dd' format into dayone accepted format of
iso8601 and adding additional hour, minutes, and seconds if given.
"""
year, month, day = day_str.split('-')
from datetime import datetime
now = datetime.utcnow()
# FIXME: The current version of day one does not support timezone data
# correctly. So, if we enter midnight here then every entry is off by a
# day.
# Don't know the hour, minute, etc. so just assume midnight
date = now.replace(year=int(year),
month=int(month),
day=int(day),
minute=int(minute),
hour=int(hour),
second=int(second),
microsecond=0)
iso_string = date.isoformat()
# Very specific format for dayone, if the 'Z' is not in the
# correct positions the entries will not show up in dayone at all.
return iso_string + 'Z'
def get_outfolder_names(service, outfolderseed, verbose=False):
"""
Ensure existence of a temp folder and return the path
Assemble path to the outfolder according to the given seed
Search for the Day One Journal in iCloud, DropBox folders and return the path
or return the temp folder if no journal was found
:param service: Name of the calling service
:param outfolderseed: command line seed for the outfolder
:param verbose: wie der Name schon sagt
"""
# FIXME: Add progress output for --verbose
tempfolder = os.path.abspath(os.path.join('..', 'temp', service))
if not os.path.exists(tempfolder):
os.makedirs(tempfolder)
d1folder = ''
if outfolderseed == 'test':
# force it into the tempfolder
d1folder = tempfolder
elif outfolderseed == 'auto':
# Try to find the Day One journal at the usual suspect places
# FIXME: What is the correct name, I’m not using DropBox
#candidate = os.path.expanduser('~/Dropbox/Apps/Day One/Journal.dayone/entries')
candidate = os.path.expanduser('~/Dropbox/Applications/Day One/Journal.dayone/entries')
if os.path.exists(candidate):
d1folder = candidate
else:
candidate = os.path.expanduser(
'~/Library/Mobile Documents/5U8NS4GX82~com~dayoneapp~dayone/Documents/Journal_dayone/entries'
)
if os.path.exists(candidate):
d1folder = candidate
elif os.path.exists(outfolderseed):
# We try to use the given folder
d1folder = outfolderseed
if d1folder == '':
# TODO: issue a warning or flag an error
d1folder = tempfolder
return d1folder, tempfolder
# Make all services available from this level
for service_name in AVAILABLE_SERVICES:
service = get_service_module(service_name)
|
'sleep_cycle']
|
info_test.go
|
// Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2016-present Datadog, Inc.
package api
import (
"log"
"net/http/httptest"
"net/url"
"testing"
"time"
"github.com/DataDog/datadog-agent/pkg/trace/config"
"github.com/DataDog/datadog-agent/pkg/trace/info"
"github.com/DataDog/datadog-agent/pkg/trace/test/testutil"
"github.com/stretchr/testify/assert"
)
// TestInfoHandler ensures that the keys returned by the /info handler do not
// change from one release to another to ensure consistency. Tracing clients
// depend on these keys to be the same. The chances of them changing are quite
// high if anyone ever modifies a field name in the (*AgentConfig).Config structure.
//
// * In case a field name gets modified, the `json:""` struct field tag
// should be used to ensure the old key is marshalled for this endpoint.
func TestInfoHandler(t *testing.T)
|
{
u, err := url.Parse("http://localhost:8888/proxy")
if err != nil {
log.Fatal(err)
}
jsonObfCfg := config.JSONObfuscationConfig{
Enabled: true,
KeepValues: []string{"a", "b", "c"},
ObfuscateSQLValues: []string{"x", "y"},
}
obfCfg := &config.ObfuscationConfig{
ES: jsonObfCfg,
Mongo: jsonObfCfg,
SQLExecPlan: jsonObfCfg,
SQLExecPlanNormalize: jsonObfCfg,
HTTP: config.HTTPObfuscationConfig{
RemoveQueryString: true,
RemovePathDigits: true,
},
RemoveStackTraces: false,
Redis: config.Enablable{Enabled: true},
Memcached: config.Enablable{Enabled: false},
}
conf := &config.AgentConfig{
Enabled: true,
Hostname: "test.host.name",
DefaultEnv: "prod",
ConfigPath: "/path/to/config",
Endpoints: []*config.Endpoint{{
APIKey: "123",
Host: "https://target-intake.datadoghq.com",
NoProxy: true,
}},
BucketInterval: time.Second,
ExtraAggregators: []string{"agg:val"},
ExtraSampleRate: 2.4,
TargetTPS: 11,
MaxEPS: 12,
ReceiverHost: "localhost",
ReceiverPort: 8111,
ReceiverSocket: "/sock/path",
ConnectionLimit: 12,
ReceiverTimeout: 100,
MaxRequestBytes: 123,
StatsWriter: &config.WriterConfig{
ConnectionLimit: 20,
QueueSize: 12,
FlushPeriodSeconds: 14.4,
},
TraceWriter: &config.WriterConfig{
ConnectionLimit: 21,
QueueSize: 13,
FlushPeriodSeconds: 15.4,
},
StatsdHost: "stastd.localhost",
StatsdPort: 123,
LogLevel: "WARN",
LogFilePath: "/path/to/logfile",
LogThrottling: false,
MaxMemory: 1000000,
MaxCPU: 12345,
WatchdogInterval: time.Minute,
ProxyURL: u,
SkipSSLValidation: false,
Ignore: map[string][]string{"K": {"1", "2"}},
ReplaceTags: []*config.ReplaceRule{{Name: "a", Pattern: "*", Repl: "b"}},
AnalyzedRateByServiceLegacy: map[string]float64{"X": 1.2},
AnalyzedSpansByService: map[string]map[string]float64{"X": {"Y": 2.4}},
DDAgentBin: "/path/to/core/agent",
Obfuscation: obfCfg,
}
var testCases = []struct {
name string
expected string
enableConfigEndpoint bool
}{
{
name: "default",
expected: `{
"version": "0.99.0",
"git_commit": "fab047e10",
"build_date": "2020-12-04 15:57:06.74187 +0200 EET m=+0.029001792",
"endpoints": [
"/v0.3/traces",
"/v0.3/services",
"/v0.4/traces",
"/v0.4/services",
"/v0.5/traces",
"/profiling/v1/input",
"/v0.6/stats",
"/appsec/proxy/",
"/debugger/v1/input"
],
"feature_flags": [
"feature_flag"
],
"client_drop_p0s": true,
"config": {
"default_env": "prod",
"target_tps": 11,
"max_eps": 12,
"receiver_port": 8111,
"receiver_socket": "/sock/path",
"connection_limit": 12,
"receiver_timeout": 100,
"max_request_bytes": 123,
"statsd_port": 123,
"max_memory": 1000000,
"max_cpu": 12345,
"analyzed_spans_by_service": {
"X": {
"Y": 2.4
}
},
"obfuscation": {
"elastic_search": true,
"mongo": true,
"sql_exec_plan": true,
"sql_exec_plan_normalize": true,
"http": {
"remove_query_string": true,
"remove_path_digits": true
},
"remove_stack_traces": false,
"redis": true,
"memcached": false
}
}
}`,
},
{
name: "debug",
enableConfigEndpoint: true,
expected: `{
"version": "0.99.0",
"git_commit": "fab047e10",
"build_date": "2020-12-04 15:57:06.74187 +0200 EET m=+0.029001792",
"endpoints": [
"/v0.3/traces",
"/v0.3/services",
"/v0.4/traces",
"/v0.4/services",
"/v0.5/traces",
"/profiling/v1/input",
"/v0.6/stats",
"/appsec/proxy/",
"/debugger/v1/input",
"/v0.6/config"
],
"feature_flags": [
"config_endpoint"
],
"client_drop_p0s": true,
"config": {
"default_env": "prod",
"target_tps": 11,
"max_eps": 12,
"receiver_port": 8111,
"receiver_socket": "/sock/path",
"connection_limit": 12,
"receiver_timeout": 100,
"max_request_bytes": 123,
"statsd_port": 123,
"max_memory": 1000000,
"max_cpu": 12345,
"analyzed_spans_by_service": {
"X": {
"Y": 2.4
}
},
"obfuscation": {
"elastic_search": true,
"mongo": true,
"sql_exec_plan": true,
"sql_exec_plan_normalize": true,
"http": {
"remove_query_string": true,
"remove_path_digits": true
},
"remove_stack_traces": false,
"redis": true,
"memcached": false
}
}
}`,
},
}
for _, tt := range testCases {
t.Run(tt.name, func(t *testing.T) {
rcv := newTestReceiverFromConfig(conf)
if tt.enableConfigEndpoint {
defer testutil.WithFeatures("config_endpoint")()
} else {
defer testutil.WithFeatures("feature_flag")()
}
defer func(old string) { info.Version = old }(info.Version)
defer func(old string) { info.GitCommit = old }(info.GitCommit)
defer func(old string) { info.BuildDate = old }(info.BuildDate)
info.Version = "0.99.0"
info.GitCommit = "fab047e10"
info.BuildDate = "2020-12-04 15:57:06.74187 +0200 EET m=+0.029001792"
_, h := rcv.makeInfoHandler()
rec := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/info", nil)
h.ServeHTTP(rec, req)
assert.Equal(t, rec.Body.String(), tt.expected)
if rec.Body.String() != tt.expected {
t.Fatal("Output of /info has changed. Changing the keys "+
"is not allowed because the client rely on them and "+
"is considered a breaking change:\n\n%f", rec.Body.String())
}
})
}
}
|
|
transaction_pool.rs
|
// Copyright 2018 The Epic Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub mod common;
use self::core::core::verifier_cache::LruVerifierCache;
use self::core::core::{transaction, Block, BlockHeader};
use self::core::libtx;
use self::core::pow::Difficulty;
use self::keychain::{ExtKeychain, Keychain};
use self::util::RwLock;
use crate::common::*;
use epic_core as core;
use epic_keychain as keychain;
use epic_util as util;
use std::sync::Arc;
/// Test we can add some txs to the pool (both stempool and txpool).
#[test]
fn
|
() {
let keychain: ExtKeychain = Keychain::from_random_seed(false).unwrap();
let db_root = ".epic_transaction_pool".to_string();
clean_output_dir(db_root.clone());
let chain = Arc::new(ChainAdapter::init(db_root.clone()).unwrap());
let verifier_cache = Arc::new(RwLock::new(LruVerifierCache::new()));
// Initialize a new pool with our chain adapter.
let pool = RwLock::new(test_setup(chain.clone(), verifier_cache.clone()));
let header = {
let height = 1;
let key_id = ExtKeychain::derive_key_id(1, height as u32, 0, 0, 0);
let reward = libtx::reward::output(&keychain, &key_id, 0).unwrap();
let block = Block::new(&BlockHeader::default(), vec![], Difficulty::min(), reward).unwrap();
chain.update_db_for_block(&block);
block.header
};
// Now create tx to spend a coinbase, giving us some useful outputs for testing
// with.
let initial_tx = {
test_transaction_spending_coinbase(
&keychain,
&header,
vec![500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400],
)
};
// Add this tx to the pool (stem=false, direct to txpool).
{
let mut write_pool = pool.write();
write_pool
.add_to_pool(test_source(), initial_tx, false, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 1);
}
// Test adding a tx that "double spends" an output currently spent by a tx
// already in the txpool. In this case we attempt to spend the original coinbase twice.
{
let tx = test_transaction_spending_coinbase(&keychain, &header, vec![501]);
let mut write_pool = pool.write();
assert!(write_pool
.add_to_pool(test_source(), tx, false, &header)
.is_err());
}
// tx1 spends some outputs from the initial test tx.
let tx1 = test_transaction(&keychain, vec![500, 600], vec![499, 599]);
// tx2 spends some outputs from both tx1 and the initial test tx.
let tx2 = test_transaction(&keychain, vec![499, 700], vec![498]);
// Take a write lock and add a couple of tx entries to the pool.
{
let mut write_pool = pool.write();
// Check we have a single initial tx in the pool.
assert_eq!(write_pool.total_size(), 1);
// First, add a simple tx directly to the txpool (stem = false).
write_pool
.add_to_pool(test_source(), tx1.clone(), false, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 2);
// Add another tx spending outputs from the previous tx.
write_pool
.add_to_pool(test_source(), tx2.clone(), false, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 3);
}
// Test adding the exact same tx multiple times (same kernel signature).
// This will fail for stem=false during tx aggregation due to duplicate
// outputs and duplicate kernels.
{
let mut write_pool = pool.write();
assert!(write_pool
.add_to_pool(test_source(), tx1.clone(), false, &header)
.is_err());
}
// Test adding a duplicate tx with the same input and outputs.
// Note: not the *same* tx, just same underlying inputs/outputs.
{
let tx1a = test_transaction(&keychain, vec![500, 600], vec![499, 599]);
let mut write_pool = pool.write();
assert!(write_pool
.add_to_pool(test_source(), tx1a, false, &header)
.is_err());
}
// Test adding a tx attempting to spend a non-existent output.
{
let bad_tx = test_transaction(&keychain, vec![10_001], vec![10_000]);
let mut write_pool = pool.write();
assert!(write_pool
.add_to_pool(test_source(), bad_tx, false, &header)
.is_err());
}
// Test adding a tx that would result in a duplicate output (conflicts with
// output from tx2). For reasons of security all outputs in the UTXO set must
// be unique. Otherwise spending one will almost certainly cause the other
// to be immediately stolen via a "replay" tx.
{
let tx = test_transaction(&keychain, vec![900], vec![498]);
let mut write_pool = pool.write();
assert!(write_pool
.add_to_pool(test_source(), tx, false, &header)
.is_err());
}
// Confirm the tx pool correctly identifies an invalid tx (already spent).
{
let mut write_pool = pool.write();
let tx3 = test_transaction(&keychain, vec![500], vec![497]);
assert!(write_pool
.add_to_pool(test_source(), tx3, false, &header)
.is_err());
assert_eq!(write_pool.total_size(), 3);
}
// Now add a couple of txs to the stempool (stem = true).
{
let mut write_pool = pool.write();
let tx = test_transaction(&keychain, vec![599], vec![598]);
write_pool
.add_to_pool(test_source(), tx, true, &header)
.unwrap();
let tx2 = test_transaction(&keychain, vec![598], vec![597]);
write_pool
.add_to_pool(test_source(), tx2, true, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 3);
assert_eq!(write_pool.stempool.size(), 2);
}
// Check we can take some entries from the stempool and "fluff" them into the
// txpool. This also exercises multi-kernel txs.
{
let mut write_pool = pool.write();
let agg_tx = write_pool
.stempool
.aggregate_transaction()
.unwrap()
.unwrap();
assert_eq!(agg_tx.kernels().len(), 2);
write_pool
.add_to_pool(test_source(), agg_tx, false, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 4);
assert!(write_pool.stempool.is_empty());
}
// Adding a duplicate tx to the stempool will result in it being fluffed.
// This handles the case of the stem path having a cycle in it.
{
let mut write_pool = pool.write();
let tx = test_transaction(&keychain, vec![597], vec![596]);
write_pool
.add_to_pool(test_source(), tx.clone(), true, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 4);
assert_eq!(write_pool.stempool.size(), 1);
// Duplicate stem tx so fluff, adding it to txpool and removing it from stempool.
write_pool
.add_to_pool(test_source(), tx.clone(), true, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 5);
assert!(write_pool.stempool.is_empty());
}
// Now check we can correctly deaggregate a multi-kernel tx based on current
// contents of the txpool.
// We will do this be adding a new tx to the pool
// that is a superset of a tx already in the pool.
{
let mut write_pool = pool.write();
let tx4 = test_transaction(&keychain, vec![800], vec![799]);
// tx1 and tx2 are already in the txpool (in aggregated form)
// tx4 is the "new" part of this aggregated tx that we care about
let agg_tx = transaction::aggregate(vec![tx1.clone(), tx2.clone(), tx4]).unwrap();
agg_tx.validate(verifier_cache.clone()).unwrap();
write_pool
.add_to_pool(test_source(), agg_tx, false, &header)
.unwrap();
assert_eq!(write_pool.total_size(), 6);
let entry = write_pool.txpool.entries.last().unwrap();
assert_eq!(entry.tx.kernels().len(), 1);
assert_eq!(entry.src.debug_name, "deagg");
}
// Check we cannot "double spend" an output spent in a previous block.
// We use the initial coinbase output here for convenience.
{
let mut write_pool = pool.write();
let double_spend_tx =
{ test_transaction_spending_coinbase(&keychain, &header, vec![1000]) };
// check we cannot add a double spend to the stempool
assert!(write_pool
.add_to_pool(test_source(), double_spend_tx.clone(), true, &header)
.is_err());
// check we cannot add a double spend to the txpool
assert!(write_pool
.add_to_pool(test_source(), double_spend_tx.clone(), false, &header)
.is_err());
}
}
|
test_the_transaction_pool
|
regions-simple.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main()
|
{
let mut x: int = 3;
let y: &mut int = &mut x;
*y = 5;
debug!(*y);
}
|
|
get_virtual_network_gateway_learned_routes.py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetVirtualNetworkGatewayLearnedRoutesResult',
'AwaitableGetVirtualNetworkGatewayLearnedRoutesResult',
'get_virtual_network_gateway_learned_routes',
]
@pulumi.output_type
class
|
:
"""
List of virtual network gateway routes
"""
def __init__(__self__, value=None):
if value and not isinstance(value, list):
raise TypeError("Expected argument 'value' to be a list")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def value(self) -> Optional[Sequence['outputs.GatewayRouteResponse']]:
"""
List of gateway routes
"""
return pulumi.get(self, "value")
class AwaitableGetVirtualNetworkGatewayLearnedRoutesResult(GetVirtualNetworkGatewayLearnedRoutesResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetVirtualNetworkGatewayLearnedRoutesResult(
value=self.value)
def get_virtual_network_gateway_learned_routes(resource_group_name: Optional[str] = None,
virtual_network_gateway_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetVirtualNetworkGatewayLearnedRoutesResult:
"""
List of virtual network gateway routes
:param str resource_group_name: The name of the resource group.
:param str virtual_network_gateway_name: The name of the virtual network gateway.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['virtualNetworkGatewayName'] = virtual_network_gateway_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20180601:getVirtualNetworkGatewayLearnedRoutes', __args__, opts=opts, typ=GetVirtualNetworkGatewayLearnedRoutesResult).value
return AwaitableGetVirtualNetworkGatewayLearnedRoutesResult(
value=__ret__.value)
|
GetVirtualNetworkGatewayLearnedRoutesResult
|
Keyboard.js
|
import React, { Component } from 'react';
const letters = [
['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'],
['j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r'],
['s', 't', 'u', 'v', 'w', 'x', 'y', 'z']
];
const rowStyle = {
justifyContent: 'center',
alignItems: 'center'
};
const letterStyle = {
fontSize: '20px',
fontWeight:'bold',
height: '50px',
margin: '2px',
width: '50px'
};
class Keyboard extends Component {
getButtonClass(letter) {
if(this.props.isLetterInWordMap[letter] === true) {
return 'btn btn-success';
}
if(this.props.isLetterInWordMap[letter] === false) {
return 'btn btn-danger';
}
return 'btn btn-default';
}
isButtonDisabled(letter) {
return this.props.lettersStatus[letter] || this.props.isGameEnded;
}
createButton(letter) {
return <button
key={`input_${letter}`}
type='button'
className={this.getButtonClass(letter)}
style={letterStyle}
disabled={this.isButtonDisabled(letter)}
onClick={() => this.props.onClick(letter)}
>{letter}</button>;
|
}
render() {
return <div>
{letters.map(row =>
<div className='row' key={`row_${row}`} style={rowStyle}>
{row.map(letter => this.createButton(letter))}
</div>
)}
</div>;
}
}
export default Keyboard;
| |
index.tsx
|
import React from 'react';
import { categories } from '../../utils/categories';
import {
Container,
Title,
Amount,
Footer,
Category,
Icon,
CategoryName,
Date
} from './styles';
export interface TransactionCardProps {
type: 'positive' | 'negative';
name: string;
amount: string;
category: string;
date: string;
}
interface Props {
data: TransactionCardProps;
}
export function
|
({
data
}: Props) {
const [category] = categories.filter(
item => item.key === data.category
);
return (
<Container >
<Title>
{data.name}
</Title>
<Amount type={data.type}>
{
data.type === 'negative' && '- '
}
{data.amount}
</Amount>
<Footer>
<Category>
<Icon name={category.icon} />
<CategoryName>
{category.name}
</CategoryName>
</Category>
<Date>
{data.date}
</Date>
</Footer>
</Container >
);
}
|
TransactionCard
|
leetcode94.rs
|
// https://leetcode-cn.com/problems/binary-tree-inorder-traversal/
use std::{cell::RefCell, rc::Rc};
pub fn inorder_traversal(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<i32> {
todo!()
}
// Definition for a binary tree node.
#[derive(Debug, PartialEq, Eq)]
pub struct
|
{
pub val: i32,
pub left: Option<Rc<RefCell<Self>>>,
pub right: Option<Rc<RefCell<Self>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
Self {
val,
left: None,
right: None,
}
}
}
// tree hash_table stack
#[test]
#[ignore]
fn test2_94() {
use leetcode_prelude::btree;
assert_eq!(inorder_traversal(btree![1, null, 2, 3]), vec![1, 3, 2]);
assert_eq!(inorder_traversal(btree![]), vec![]);
assert_eq!(inorder_traversal(btree![1]), vec![1]);
assert_eq!(inorder_traversal(btree![1, 2]), vec![2, 1]);
assert_eq!(inorder_traversal(btree![1, null, 2]), vec![1, 2]);
}
|
TreeNode
|
timer.rs
|
//! # Timer
use crate::hal::timer::{CountDown, Periodic};
use crate::pac::{TIM1, TIM2, TIM3, TIM4};
use cast::{u16, u32};
use cortex_m::peripheral::syst::SystClkSource;
use cortex_m::peripheral::SYST;
use nb;
use void::Void;
use crate::rcc::{Clocks, APB1, APB2};
use crate::time::Hertz;
/// Associated clocks with timers
pub trait PclkSrc {
fn get_clk(clocks: &Clocks) -> Hertz;
}
macro_rules! impl_pclk {
($TIMX:ident, $pclkX:ident) => {
impl PclkSrc for $TIMX {
fn get_clk(clocks: &Clocks) -> Hertz {
clocks.$pclkX()
}
}
};
}
impl_pclk! {TIM1, pclk2_tim}
impl_pclk! {TIM2, pclk1_tim}
impl_pclk! {TIM3, pclk1_tim}
impl_pclk! {TIM4, pclk1_tim}
/// Interrupt events
pub enum Event {
/// Timer timed out / count down ended
Update,
}
pub struct Timer<TIM> {
tim: TIM,
clocks: Clocks,
}
impl Timer<SYST> {
pub fn syst<T>(mut syst: SYST, timeout: T, clocks: Clocks) -> Self
where
T: Into<Hertz>,
|
syst.set_clock_source(SystClkSource::Core);
let mut timer = Timer { tim: syst, clocks };
timer.start(timeout);
timer
}
/// Starts listening for an `event`
pub fn listen(&mut self, event: Event) {
match event {
Event::Update => self.tim.enable_interrupt(),
}
}
/// Stops listening for an `event`
pub fn unlisten(&mut self, event: Event) {
match event {
Event::Update => self.tim.disable_interrupt(),
}
}
}
impl CountDown for Timer<SYST> {
type Time = Hertz;
fn start<T>(&mut self, timeout: T)
where
T: Into<Hertz>,
{
let rvr = self.clocks.sysclk().0 / timeout.into().0 - 1;
assert!(rvr < (1 << 24));
self.tim.set_reload(rvr);
self.tim.clear_current();
self.tim.enable_counter();
}
fn wait(&mut self) -> nb::Result<(), Void> {
if self.tim.has_wrapped() {
Ok(())
} else {
Err(nb::Error::WouldBlock)
}
}
}
impl Periodic for Timer<SYST> {}
macro_rules! hal {
($($TIMX:ident: ($timX:ident, $timXen:ident, $timXrst:ident, $apbX:ident),)+) => {
$(
impl Timer<$TIMX> {
pub fn $timX<T>(tim: $TIMX, timeout: T, clocks: Clocks, apb1: &mut $apbX) -> Self
where
T: Into<Hertz>,
{
// enable and reset peripheral to a clean slate state
apb1.enr().modify(|_, w| w.$timXen().set_bit());
apb1.rstr().modify(|_, w| w.$timXrst().set_bit());
apb1.rstr().modify(|_, w| w.$timXrst().clear_bit());
let mut timer = Timer { clocks, tim };
timer.start(timeout);
timer
}
/// Starts listening for an `event`
pub fn listen(&mut self, event: Event) {
match event {
Event::Update => self.tim.dier.write(|w| w.uie().set_bit()),
}
}
/// Stops listening for an `event`
pub fn unlisten(&mut self, event: Event) {
match event {
Event::Update => self.tim.dier.write(|w| w.uie().clear_bit()),
}
}
}
impl CountDown for Timer<$TIMX> {
type Time = Hertz;
fn start<T>(&mut self, timeout: T)
where
T: Into<Hertz>,
{
// pause
self.tim.cr1.modify(|_, w| w.cen().clear_bit());
let frequency = timeout.into().0;
let timer_clock = $TIMX::get_clk(&self.clocks);
let ticks = timer_clock.0 / frequency;
let psc = u16((ticks - 1) / (1 << 16)).unwrap();
self.tim.psc.write(|w| unsafe { w.psc().bits(psc) });
let arr = u16(ticks / u32(psc + 1)).unwrap();
self.tim.arr.write(|w| unsafe { w.bits(u32(arr)) });
// Trigger an update event to load the prescaler value to the clock
self.tim.egr.write(|w| w.ug().set_bit());
// The above line raises an update event which will indicate
// that the timer is already finished. Since this is not the case,
// it should be cleared
self.tim.sr.modify(|_, w| w.uif().clear_bit());
// start counter
self.tim.cr1.modify(|_, w| w.cen().set_bit());
}
fn wait(&mut self) -> nb::Result<(), Void> {
if self.tim.sr.read().uif().bit_is_clear() {
Err(nb::Error::WouldBlock)
} else {
self.tim.sr.modify(|_, w| w.uif().clear_bit());
Ok(())
}
}
}
impl Periodic for Timer<$TIMX> {}
)+
}
}
hal! {
TIM1: (tim1, tim1en, tim1rst, APB2),
TIM2: (tim2, tim2en, tim2rst, APB1),
TIM3: (tim3, tim3en, tim3rst, APB1),
TIM4: (tim4, tim4en, tim4rst, APB1),
}
|
{
|
daemon.py
|
import os
from datetime import datetime
from .. import auth, types, utils
class DaemonApiMixin(object):
@utils.minimum_version('1.25')
def df(self):
"""
Get data usage information.
Returns:
(dict): A dictionary representing different resource categories
and their respective data usage.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
url = self._url('/system/df')
return self._result(self._get(url), True)
def events(self, since=None, until=None, filters=None, decode=None):
"""
Get real-time events from the server. Similar to the ``docker events``
command.
Args:
since (UTC datetime or int): Get events from this point
until (UTC datetime or int): Get events until this point
filters (dict): Filter the events by event time, container or image
decode (bool): If set to true, stream will be decoded into dicts on
the fly. False by default.
Returns:
A :py:class:`docker.types.daemon.CancellableStream` generator
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
Example:
>>> for event in client.events(decode=True)
... print(event)
{u'from': u'image/with:tag',
u'id': u'container-id',
u'status': u'start',
u'time': 1423339459}
...
or
>>> events = client.events()
>>> for event in events:
... print(event)
>>> # and cancel from another thread
>>> events.close()
"""
if isinstance(since, datetime):
since = utils.datetime_to_timestamp(since)
if isinstance(until, datetime):
until = utils.datetime_to_timestamp(until)
if filters:
filters = utils.convert_filters(filters)
params = {
'since': since,
'until': until,
'filters': filters
}
url = self._url('/events')
response = self._get(url, params=params, stream=True, timeout=None)
stream = self._stream_helper(response, decode=decode)
return types.CancellableStream(stream, response)
def info(self):
"""
Display system-wide information. Identical to the ``docker info``
command.
Returns:
(dict): The info as a dict
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return self._result(self._get(self._url("/info")), True)
def login(self, username, password=None, email=None, registry=None,
reauth=False, dockercfg_path=None):
"""
Authenticate with a registry. Similar to the ``docker login`` command.
Args:
username (str): The registry username
password (str): The plaintext password
email (str): The email for the registry account
registry (str): URL to the registry. E.g.
``https://index.docker.io/v1/``
reauth (bool): Whether or not to refresh existing authentication on
the Docker server.
dockercfg_path (str): Use a custom path for the Docker config file
(default ``$HOME/.docker/config.json`` if present,
otherwise``$HOME/.dockercfg``)
Returns:
(dict): The response from the login request
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
# If we don't have any auth data so far, try reloading the config file
# one more time in case anything showed up in there.
# If dockercfg_path is passed check to see if the config file exists,
# if so load that config.
if dockercfg_path and os.path.exists(dockercfg_path):
self._auth_configs = auth.load_config(
dockercfg_path, credstore_env=self.credstore_env
)
elif not self._auth_configs or self._auth_configs.is_empty:
|
credstore_env=self.credstore_env
)
authcfg = self._auth_configs.resolve_authconfig(registry)
# If we found an existing auth config for this registry and username
# combination, we can return it immediately unless reauth is requested.
if authcfg and authcfg.get('username', None) == username \
and not reauth:
return authcfg
req_data = {
'username': username,
'password': password,
'email': email,
'serveraddress': registry,
}
response = self._post_json(self._url('/auth'), data=req_data)
if response.status_code == 200:
self._auth_configs.add_auth(registry or auth.INDEX_NAME, req_data)
return self._result(response, json=True)
def ping(self):
"""
Checks the server is responsive. An exception will be raised if it
isn't responding.
Returns:
(bool) The response from the server.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return self._result(self._get(self._url('/_ping'))) == 'OK'
def version(self, api_version=True):
"""
Returns version information from the server. Similar to the ``docker
version`` command.
Returns:
(dict): The server version information
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
url = self._url("/version", versioned_api=api_version)
return self._result(self._get(url), json=True)
|
self._auth_configs = auth.load_config(
|
train.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File : train.py
# Author: Alvin(Xinyao) Sun <[email protected]>
# Date : 02.05.2021
import logging
import os
import sys
import hydra
import pytorch_lightning as pl
from omegaconf import DictConfig, OmegaConf
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
log = logging.getLogger(__name__)
@hydra.main(config_path='config', config_name='train_config')
def main(cfg: DictConfig):
print(OmegaConf.to_yaml(cfg))
pl.seed_everything(cfg.seed)
# ------------
# data
|
# model
# ------------
model = hydra.utils.instantiate(cfg.model)
# ------------
# training
# ------------
trainer = pl.Trainer(**(cfg.pl_trainer), checkpoint_callback=True)
log.info('run training...')
train_dataloader = data_module.train_dataloader()
val_dataloader = data_module.val_dataloader()
trainer.fit(model,
train_dataloaders=train_dataloader,
val_dataloaders=[val_dataloader])
if __name__ == '__main__':
try:
main()
except Exception as e:
log.error(e)
exit(1)
|
# ------------
data_module = hydra.utils.instantiate(cfg.data)
# ------------
|
program_run.rs
|
use std::io::{BufRead, BufReader, Read, Stdin};
use crate::{
context::{MutableContext, VariableStore, Variables},
parse_args,
printable::Printable,
program::Program,
value::{NumericValue, Value},
};
pub struct ProgramRun {
program: Program,
variables: Variables,
}
type IOResult = std::io::Result<usize>;
pub trait LineReadable {
fn trait_read_line(&mut self, buffer: &mut String) -> IOResult;
}
impl LineReadable for Stdin {
fn trait_read_line(&mut self, buffer: &mut String) -> IOResult {
self.read_line(buffer)
}
}
impl<T: Read> LineReadable for BufReader<T> {
fn trait_read_line(&mut self, buffer: &mut String) -> IOResult {
self.read_line(buffer)
}
}
impl ProgramRun {
pub(crate) fn new_for_program(program: Program) -> ProgramRun {
ProgramRun {
program: program,
variables: Variables::empty(),
}
}
pub fn process_file<LR: LineReadable>(&mut self, reader: &mut LR) -> Vec<String> {
self.variables
.assign_variable("FNR", Value::Numeric(NumericValue::Integer(0)));
let mut buffer = String::new();
let mut output = vec![];
loop {
match reader.trait_read_line(&mut buffer) {
Ok(n) => {
if n == 0 {
break;
}
if buffer.chars().last().unwrap() == '\n' {
buffer.truncate(n - 1);
}
output.append(&mut self.output_for_line(&buffer));
buffer.clear();
}
Err(error) => {
eprintln!("Error encountered: {}", error);
break;
}
}
}
output
|
}
fn output_for_line(&mut self, line: &str) -> Vec<String> {
// Need explicit borrow of the variables to avoid borrowing `self` later
let functions = &self.program.functions;
self.variables.increment_variable("NR");
self.variables.increment_variable("FNR");
let mut context = MutableContext::for_variables(&mut self.variables);
context.set_record_with_line(line);
self.program
.items
.iter()
.fold(Printable::wrap(()), |result, item| {
result.and_then(|_| item.output_for_line(functions, &mut context))
})
.output
}
pub fn output_for_begin_items(&mut self) -> Vec<String> {
let variables = &mut self.variables;
let functions = &self.program.functions;
let mut context = MutableContext::for_variables(variables);
self.program
.items
.iter()
.fold(Printable::wrap(()), |result, item| {
result.and_then(|_| item.output_for_begin(functions, &mut context))
})
.output
}
pub(super) fn apply_args(&mut self, args: &parse_args::Args) {
self.variables
.assign_variable("FS", Value::String(args.field_separator.clone()));
self.variables.assign_variable(
"ARGC",
Value::Numeric(NumericValue::Integer(args.filepaths_to_parse.len() as i64)),
);
for (name, value) in args.variables.iter() {
self.variables
.assign_variable(name, Value::String(value.to_string()));
}
}
}
| |
parse.rs
|
// Copyright (c) 2021 Brendan Molloy <[email protected]>,
// Ilya Solovyiov <[email protected]>,
// Kai Ren <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! [Cucumber Expressions][1] [AST] parser.
//!
//! See details in the [grammar spec][0].
//!
//! [0]: crate#grammar
//! [1]: https://github.com/cucumber/cucumber-expressions#readme
//! [AST]: https://en.wikipedia.org/wiki/Abstract_syntax_tree
use std::{fmt::Display, ops::RangeFrom};
use derive_more::{Display, Error};
use nom::{
branch::alt,
bytes::complete::{tag, take_while, take_while1},
character::complete::one_of,
combinator::{map, peek, verify},
error::{ErrorKind, ParseError},
multi::{many0, many1, separated_list1},
sequence::tuple,
AsChar, Compare, Err, FindToken, IResult, InputIter, InputLength,
InputTake, InputTakeAtPosition, Needed, Offset, Parser, Slice,
};
use crate::{
ast::{
Alternation, Alternative, Expression, Optional, Parameter,
SingleExpression,
},
combinator,
};
/// Reserved characters requiring a special handling.
pub const RESERVED_CHARS: &str = r#"{}()\/ "#;
/// Matches `normal` and [`RESERVED_CHARS`] escaped with `\`.
///
/// Uses [`combinator::escaped0`] under the hood.
///
/// # Errors
///
/// ## Recoverable [`Error`]
///
/// - If `normal` parser errors.
///
/// ## Irrecoverable [`Failure`]
///
/// - If `normal` parser fails.
/// - [`EscapedEndOfLine`].
/// - [`EscapedNonReservedCharacter`].
///
/// [`Error`]: Err::Error
/// [`EscapedEndOfLine`]: Error::EscapedEndOfLine
/// [`EscapedNonReservedCharacter`]: Error::EscapedNonReservedCharacter
/// [`Failure`]: Err::Failure
fn escaped_reserved_chars0<'a, Input: 'a, F, O1>(
normal: F,
) -> impl FnMut(Input) -> IResult<Input, Input, Error<Input>>
where
Input: Clone
+ Display
+ Offset
+ InputLength
+ InputTake
+ InputTakeAtPosition
+ Slice<RangeFrom<usize>>
+ InputIter,
<Input as InputIter>::Item: AsChar + Copy,
F: Parser<Input, O1, Error<Input>>,
Error<Input>: ParseError<Input>,
for<'s> &'s str: FindToken<<Input as InputIter>::Item>,
{
combinator::map_err(
combinator::escaped0(normal, '\\', one_of(RESERVED_CHARS)),
|e| {
if let Err::Error(Error::Other(span, ErrorKind::Escaped)) = e {
match span.input_len() {
1 => Error::EscapedEndOfLine(span),
n if n > 1 => {
Error::EscapedNonReservedCharacter(span.take(2))
}
_ => Error::EscapedNonReservedCharacter(span),
}
.failure()
} else {
e
}
},
)
}
/// Parses a `parameter` as defined in the [grammar spec][0].
///
/// # Grammar
///
/// ```ebnf
/// parameter = '{', name*, '}'
/// name = (- name-to-escape) | ('\', name-to-escape)
/// name-to-escape = '{' | '}' | '(' | '/' | '\'
/// ```
///
/// # Example
///
/// ```text
/// {}
/// {name}
/// {with spaces}
/// {escaped \/\{\(}
/// {no need to escape )}
/// {🦀}
/// ```
///
/// # Errors
///
/// ## Recoverable [`Error`]
///
/// - If `input` doesn't start with `{`.
///
/// ## Irrecoverable [`Failure`].
///
/// - [`EscapedNonReservedCharacter`].
/// - [`NestedParameter`].
/// - [`OptionalInParameter`].
/// - [`UnescapedReservedCharacter`].
/// - [`UnfinishedParameter`].
///
/// [`Error`]: Err::Error
/// [`Failure`]: Err::Failure
/// [`EscapedNonReservedCharacter`]: Error::EscapedNonReservedCharacter
/// [`NestedParameter`]: Error::NestedParameter
/// [`OptionalInParameter`]: Error::OptionalInParameter
/// [`UnescapedReservedCharacter`]: Error::UnescapedReservedCharacter
/// [`UnfinishedParameter`]: Error::UnfinishedParameter
/// [0]: crate#grammar
pub fn parameter<'a, Input: 'a>(
input: Input,
) -> IResult<Input, Parameter<Input>, Error<Input>>
where
Input: Clone
+ Display
+ Offset
+ InputLength
+ InputTake
+ InputTakeAtPosition<Item = char>
+ Slice<RangeFrom<usize>>
+ InputIter
+ for<'s> Compare<&'s str>,
<Input as InputIter>::Item: AsChar + Copy,
Error<Input>: ParseError<Input>,
for<'s> &'s str: FindToken<<Input as InputIter>::Item>,
{
let is_name = |c| !"{}(\\/".contains(c);
let fail = |input: Input, opening_brace| {
match input.iter_elements().next().map(AsChar::as_char) {
Some('{') => {
if let Ok((_, (par, ..))) = peek(tuple((
parameter,
escaped_reserved_chars0(take_while(is_name)),
tag("}"),
)))(input.clone())
{
return Error::NestedParameter(
input.take(par.0.input_len() + 2),
)
.failure();
}
return Error::UnescapedReservedCharacter(input.take(1))
.failure();
}
Some('(') => {
if let Ok((_, opt)) = peek(optional)(input.clone()) {
return Error::OptionalInParameter(
input.take(opt.0.input_len() + 2),
)
.failure();
}
return Error::UnescapedReservedCharacter(input.take(1))
.failure();
}
Some(c) if RESERVED_CHARS.contains(c) => {
return Error::UnescapedReservedCharacter(input.take(1))
.failure();
}
_ => {}
}
Error::UnfinishedParameter(opening_brace).failure()
};
let (input, opening_brace) = tag("{")(input)?;
let (input, par_name) =
escaped_reserved_chars0(take_while(is_name))(input)?;
let (input, _) = combinator::map_err(tag("}"), |_| {
fail(input.clone(), opening_brace.clone())
})(input.clone())?;
Ok((input, Parameter(par_name)))
}
/// Parses an `optional` as defined in the [grammar spec][0].
///
/// # Grammar
///
/// ```ebnf
/// optional = '(' text-in-optional+ ')'
/// text-in-optional = (- optional-to-escape) | ('\', optional-to-escape)
/// optional-to-escape = '(' | ')' | '{' | '/' | '\'
/// ```
///
/// # Example
///
/// ```text
/// (name)
/// (with spaces)
/// (escaped \/\{\()
/// (no need to escape })
/// (🦀)
/// ```
///
/// # Errors
///
/// ## Recoverable [`Error`]
///
/// - If `input` doesn't start with `(`.
///
/// ## Irrecoverable [`Failure`]
///
/// - [`AlternationInOptional`].
/// - [`EmptyOptional`].
/// - [`EscapedEndOfLine`].
/// - [`EscapedNonReservedCharacter`].
/// - [`NestedOptional`].
/// - [`ParameterInOptional`].
/// - [`UnescapedReservedCharacter`].
/// - [`UnfinishedOptional`].
///
/// [`Error`]: Err::Error
/// [`Failure`]: Err::Failure
/// [`AlternationInOptional`]: Error::AlternationInOptional
/// [`EmptyOptional`]: Error::EmptyOptional
/// [`EscapedEndOfLine`]: Error::EscapedEndOfLine
/// [`EscapedNonReservedCharacter`]: Error::EscapedNonReservedCharacter
/// [`NestedOptional`]: Error::NestedOptional
/// [`ParameterInOptional`]: Error::ParameterInOptional
/// [`UnescapedReservedCharacter`]: Error::UnescapedReservedCharacter
/// [`UnfinishedOptional`]: Error::UnfinishedOptional
/// [0]: crate#grammar
pub fn optional<'a, Input: 'a>(
input: Input,
) -> IResult<Input, Optional<Input>, Error<Input>>
where
Input: Clone
+ Display
+ Offset
+ InputLength
+ InputTake
+ InputTakeAtPosition<Item = char>
+ Slice<RangeFrom<usize>>
+ InputIter
+ for<'s> Compare<&'s str>,
<Input as InputIter>::Item: AsChar + Copy,
Error<Input>: ParseError<Input>,
for<'s> &'s str: FindToken<<Input as InputIter>::Item>,
{
let is_in_optional = |c| !"(){\\/".contains(c);
let fail = |input: Input, opening_brace| {
match input.iter_elements().next().map(AsChar::as_char) {
Some('(') => {
if let Ok((_, (opt, ..))) = peek(tuple((
optional,
escaped_reserved_chars0(take_while(is_in_optional)),
tag(")"),
)))(input.clone())
{
return Error::NestedOptional(
input.take(opt.0.input_len() + 2),
)
.failure();
}
return Error::UnescapedReservedCharacter(input.take(1))
.failure();
}
Some('{') => {
if let Ok((_, par)) = peek(parameter)(input.clone()) {
return Error::ParameterInOptional(
input.take(par.0.input_len() + 2),
)
.failure();
}
return Error::UnescapedReservedCharacter(input.take(1))
.failure();
}
Some('/') => {
return Error::AlternationInOptional(input.take(1)).failure();
}
Some(c) if RESERVED_CHARS.contains(c) => {
return Error::UnescapedReservedCharacter(input.take(1))
.failure();
}
_ => {}
}
Error::UnfinishedOptional(opening_brace).failure()
};
let original_input = input.clone();
let (input, opening_paren) = tag("(")(input)?;
let (input, opt) =
escaped_reserved_chars0(take_while(is_in_optional))(input)?;
let (input, _) = combinator::map_err(tag(")"), |_| {
fail(input.clone(), opening_paren.clone())
})(input.clone())?;
if opt.input_len() == 0 {
return Err(Err::Failure(Error::EmptyOptional(original_input.take(2))));
}
Ok((input, Optional(opt)))
}
/// Parses an `alternative` as defined in the [grammar spec][0].
///
/// # Grammar
///
/// ```ebnf
/// alternative = optional | (text-in-alternative+)
/// text-in-alternative = (- alternative-to-escape)
/// | ('\', alternative-to-escape)
/// alternative-to-escape = ' ' | '(' | '{' | '/' | '\'
/// ```
///
/// # Example
///
/// ```text
/// text
/// escaped\ whitespace
/// no-need-to-escape)}
/// 🦀
/// (optional)
/// ```
///
/// # Errors
///
/// ## Irrecoverable [`Failure`]
///
/// Any [`Failure`] of [`optional()`].
///
/// [`Failure`]: Err::Failure
/// [0]: crate#grammar
pub fn alternative<'a, Input: 'a>(
input: Input,
) -> IResult<Input, Alternative<Input>, Error<Input>>
where
Input: Clone
+ Display
+ Offset
+ InputLength
+ InputTake
+ InputTakeAtPosition<Item = char>
+ Slice<RangeFrom<usize>>
+ InputIter
+ for<'s> Compare<&'s str>,
<Input as InputIter>::Item: AsChar + Copy,
Error<Input>: ParseError<Input>,
for<'s> &'s str: FindToken<<Input as InputIter>::Item>,
{
let is_without_whitespace = |c| !" ({\\/".contains(c);
alt((
map(optional, Alternative::Optional),
map(
verify(
escaped_reserved_chars0(take_while(is_without_whitespace)),
|p| p.input_len() > 0,
),
Alternative::Text,
),
))(input)
}
/// Parses an `alternation` as defined in the [grammar spec][0].
///
/// # Grammar
///
/// ```ebnf
/// alternation = single-alternation, (`/`, single-alternation)+
/// single-alternation = ((text-in-alternative+, optional*)
/// | (optional+, text-in-alternative+))+
/// ```
///
/// # Example
///
/// ```text
/// left/right
/// left(opt)/(opt)right
/// escaped\ /text
/// no-need-to-escape)}/text
/// 🦀/⚙️
/// ```
///
/// # Errors
///
/// ## Recoverable [`Error`]
///
/// - If `input` doesn't have `/`.
///
/// ## Irrecoverable [`Failure`]
///
/// - Any [`Failure`] of [`optional()`].
/// - [`EmptyAlternation`].
/// - [`OnlyOptionalInAlternation`].
///
/// [`Error`]: Err::Error
/// [`Failure`]: Err::Failure
/// [`EmptyAlternation`]: Error::EmptyAlternation
/// [`OnlyOptionalInAlternation`]: Error::OnlyOptionalInAlternation
/// [0]: crate#grammar
pub fn alternation<Input>(
input: Input,
) -> IResult<Input, Alternation<Input>, Error<Input>>
where
Input: Clone
+ Display
+ Offset
+ InputLength
+ InputTake
+ InputTakeAtPosition<Item = char>
+ Slice<RangeFrom<usize>>
+ InputIter
+ for<'s> Compare<&'s str>,
<Input as InputIter>::Item: AsChar + Copy,
Error<Input>: ParseError<Input>,
for<'s> &'s str: FindToken<<Input as InputIter>::Item>,
{
let original_input = input.clone();
let (rest, alt) = match separated_list1(tag("/"), many1(alternative))(input)
{
Ok((rest, alt)) => {
if let Ok((_, slash)) =
peek::<_, _, Error<Input>, _>(tag("/"))(rest.clone())
{
Err(Error::EmptyAlternation(slash).failure())
} else if alt.len() == 1 {
Err(Err::Error(Error::Other(rest, ErrorKind::Tag)))
} else {
Ok((rest, Alternation(alt)))
}
}
Err(Err::Error(Error::Other(sp, ErrorKind::Many1)))
if peek::<_, _, Error<Input>, _>(tag("/"))(sp.clone()).is_ok() =>
{
Err(Error::EmptyAlternation(sp.take(1)).failure())
}
Err(e) => Err(e),
}?;
alt.contains_only_optional()
.then(|| {
Err(Error::OnlyOptionalInAlternation(
original_input.take(alt.span_len()),
)
.failure())
})
.unwrap_or(Ok((rest, alt)))
}
/// Parses a `single-expression` as defined in the [grammar spec][0].
///
/// # Grammar
///
/// ```ebnf
/// single-expression = alternation
/// | optional
/// | parameter
/// | text-without-whitespace+
/// | whitespace+
/// text-without-whitespace = (- (text-to-escape | whitespace))
/// | ('\', text-to-escape)
/// text-to-escape = '(' | '{' | '/' | '\'
/// ```
///
/// # Example
///
/// ```text
/// text(opt)/text
/// (opt)
/// {string}
/// text
/// ```
///
/// # Errors
///
/// ## Irrecoverable [`Failure`]
///
/// Any [`Failure`] of [`alternation()`], [`optional()`] or [`parameter()`].
///
/// [`Failure`]: Err::Failure
/// [0]: crate#grammar
pub fn single_expression<'a, Input: 'a>(
input: Input,
) -> IResult<Input, SingleExpression<Input>, Error<Input>>
where
Input: Clone
+ Display
+ Offset
+ InputLength
+ InputTake
+ InputTakeAtPosition<Item = char>
+ Slice<RangeFrom<usize>>
+ InputIter
+ for<'s> Compare<&'s str>,
<Input as InputIter>::Item: AsChar + Copy,
Error<Input>: ParseError<Input>,
for<'s> &'s str: FindToken<<Input as InputIter>::Item>,
{
let is_without_whitespace = |c| !" ({\\/".contains(c);
let is_whitespace = |c| c == ' ';
alt((
map(alternation, SingleExpression::Alternation),
map(optional, SingleExpression::Optional),
map(parameter, SingleExpression::Parameter),
map(
verify(
escaped_reserved_chars0(take_while(is_without_whitespace)),
|s| s.input_len() > 0,
),
SingleExpression::Text,
),
map(take_while1(is_whitespace), SingleExpression::Whitespaces),
))(input)
}
/// Parses an `expression` as defined in the [grammar spec][0].
///
/// # Grammar
///
/// ```ebnf
/// expression = single-expression*
/// ```
///
/// # Example
///
/// ```text
/// text(opt)/text
/// (opt)
/// {string}
/// text
/// ```
///
/// > __NOTE:__ Empty string is matched too.
///
/// # Errors
///
/// ## Irrecoverable [`Failure`]
///
/// Any [`Failure`] of [`alternation()`], [`optional()`] or [`parameter()`].
///
/// [`Failure`]: Err::Failure
/// [0]: crate#grammar
pub fn expression<'a, Input: 'a>(
input: Input,
) -> IResult<Input, Expression<Input>, Error<Input>>
where
Input: Clone
+ Display
+ Offset
+ InputLength
+ InputTake
+ InputTakeAtPosition<Item = char>
+ Slice<RangeFrom<usize>>
+ InputIter
+ for<'s> Compare<&'s str>,
<Input as InputIter>::Item: AsChar + Copy,
Error<Input>: ParseError<Input>,
for<'s> &'s str: FindToken<<Input as InputIter>::Item>,
{
map(many0(single_expression), Expression)(input)
}
/// Possible parsing errors.
#[derive(Clone, Copy, Debug, Display, Error, Eq, PartialEq)]
pub enum Error<Input>
where
Input: Display,
{
/// Nested [`Parameter`]s.
#[display(
fmt = "{}\n\
A parameter may not contain an other parameter.\n\
If you did not mean to use an optional type you can use '\\{{' \
to escape the '{{'. For more complicated expressions consider \
using a regular expression instead.",
_0
)]
NestedParameter(#[error(not(source))] Input),
/// [`Optional`] inside a [`Parameter`].
#[display(
fmt = "{}\n\
A parameter may not contain an optional.\n\
If you did not mean to use an parameter type you can use '\\(' \
to escape the '('.",
_0
)]
OptionalInParameter(#[error(not(source))] Input),
/// Unfinished [`Parameter`].
#[display(
fmt = "{}\n\
The '{{' does not have a matching '}}'.\n\
If you did not intend to use a parameter you can use '\\{{' to \
escape the '{{'.",
_0
)]
UnfinishedParameter(#[error(not(source))] Input),
/// Nested [`Optional`].
#[display(
fmt = "{}\n\
An optional may not contain an other optional.\n\
If you did not mean to use an optional type you can use '\\(' \
to escape the '('. For more complicated expressions consider \
using a regular expression instead.",
_0
)]
NestedOptional(#[error(not(source))] Input),
/// [`Parameter`] inside an [`Optional`].
#[display(
fmt = "{}\n\
An optional may not contain a parameter.\n\
If you did not mean to use an parameter type you can use \
'\\{{' to escape the '{{'.",
_0
)]
ParameterInOptional(#[error(not(source))] Input),
/// Empty [`Optional`].
#[display(
fmt = "{}\n\
An optional must contain some text.\n\
If you did not mean to use an optional you can use '\\(' to \
escape the '('.",
_0
)]
EmptyOptional(#[error(not(source))] Input),
/// [`Alternation`] inside an [`Optional`].
#[display(
fmt = "{}\n\
An alternation can not be used inside an optional.\n\
You can use '\\/' to escape the '/'.",
_0
)]
AlternationInOptional(#[error(not(source))] Input),
/// Unfinished [`Optional`].
#[display(
fmt = "{}\n\
The '(' does not have a matching ')'.\n\
If you did not intend to use an optional you can use '\\(' to \
escape the '('.",
_0
)]
UnfinishedOptional(#[error(not(source))] Input),
/// Empty [`Alternation`].
#[display(
fmt = "{}\n\
An alternation can not be empty.\n\
If you did not mean to use an alternative you can use '\\/' to \
escape the '/'.",
_0
)]
EmptyAlternation(#[error(not(source))] Input),
/// Only [`Optional`] inside [`Alternation`].
#[display(
fmt = "{}\n\
An alternation may not exclusively contain optionals.\n\
If you did not mean to use an optional you can use '\\(' to \
escape the '('.",
_0
)]
OnlyOptionalInAlternation(#[error(not(source))] Input),
/// Unescaped [`RESERVED_CHARS`].
#[display(
fmt = "{}\n\
Unescaped reserved character.\n\
You can use an '\\' to escape it.",
_0
)]
UnescapedReservedCharacter(#[error(not(source))] Input),
/// Escaped non-[`RESERVED_CHARS`].
#[display(
fmt = "{}\n\
Only the characters '{{', '}}', '(', ')', '\\', '/' and \
whitespace can be escaped.\n\
If you did mean to use an '\\' you can use '\\\\' to escape it.",
_0
)]
EscapedNonReservedCharacter(#[error(not(source))] Input),
/// Escaped EOL.
#[display(
fmt = "{}\n\
The end of line can not be escaped.\n\
You can use '\\' to escape the the '\'.",
_0
)]
EscapedEndOfLine(#[error(not(source))] Input),
/// Unknown error.
#[display(
fmt = "{}\n\
Unknown parsing error.",
_0
)]
Other(#[error(not(source))] Input, ErrorKind),
/// Parsing requires more data.
#[display(
fmt = "{}",
"match _0 {\
Needed::Size(n) => format!(\"Parsing requires {} bytes/chars\", n),\
Needed::Unknown => \"Parsing requires more data\".to_owned(),\
}"
)]
Needed(#[error(not(source))] Needed),
}
impl<Input: Display> Error<Input> {
/// Converts this [`Error`] into a [`Failure`].
///
/// [`Error`]: enum@Error
/// [`Failure`]: Err::Failure
fn failure(self) -> Err<Self> {
Err::Failure(self)
}
}
impl<Input: Display> ParseError<Input> for Error<Input> {
fn from_error_kind(input: Input, kind: ErrorKind) -> Self {
Self::Other(input, kind)
}
fn append(input: Input, kind: ErrorKind, other: Self) -> Self {
if let Self::Other(..) = other {
Self::from_error_kind(input, kind)
} else {
other
}
}
}
#[cfg(test)]
mod spec {
use std::fmt;
use nom::{error::ErrorKind, Err, IResult};
use crate::{
parse::{
alternation, alternative, expression, optional, parameter, Error,
},
Alternative, Spanned,
};
/// Asserts two given text representations of [AST] to be equal.
///
/// [AST]: https://en.wikipedia.org/wiki/Abstract_syntax_tree
fn assert_ast_eq(actual: impl fmt::Debug, expected: impl AsRef<str>) {
assert_eq!(
format!("{:#?}", actual)
.lines()
.map(|line| line.trim_start().trim_end_matches('\n'))
.collect::<String>(),
expected
.as_ref()
.lines()
.map(|line| line.trim_end_matches('\n').trim())
.collect::<String>(),
);
}
/// Unwraps the given `parser` result asserting it has finished and succeed.
fn unwrap_parser<'s, T>(
parser: IResult<Spanned<'s>, T, Error<Spanned<'s>>>,
) -> T {
let (rest, par) =
parser.unwrap_or_else(|e| panic!("Expected Ok, found Err: {}", e));
assert_eq!(*rest, "");
par
}
mod parameter {
use super::{parameter, unwrap_parser, Err, Error, ErrorKind, Spanned};
#[test]
fn empty() {
assert_eq!(**unwrap_parser(parameter(Spanned::new("{}"))), "");
}
#[test]
fn named() {
assert_eq!(
**unwrap_parser(parameter(Spanned::new("{string}"))),
"string",
);
}
#[test]
fn named_with_spaces() {
assert_eq!(
**unwrap_parser(parameter(Spanned::new("{with space}"))),
"with space",
);
}
#[test]
fn named_with_escaped() {
assert_eq!(
**unwrap_parser(parameter(Spanned::new("{with \\{}"))),
"with \\{",
);
}
#[test]
fn named_with_closing_paren() {
assert_eq!(
**unwrap_parser(parameter(Spanned::new("{with )}"))),
"with )",
);
}
#[allow(clippy::non_ascii_literal)]
#[test]
fn named_with_emoji() {
assert_eq!(**unwrap_parser(parameter(Spanned::new("{🦀}"))), "🦀");
}
#[test]
fn errors_on_empty() {
let span = Spanned::new("");
assert_eq!(
parameter(span),
Err(Err::Error(Error::Other(span, ErrorKind::Tag))),
);
}
#[test]
fn fails_on_escaped_non_reserved() {
let err = parameter(Spanned::new("{\\r}")).unwrap_err();
match err {
Err::Failure(Error::EscapedNonReservedCharacter(e)) => {
assert_eq!(*e, "\\r");
}
Err::Incomplete(_) | Err::Error(_) | Err::Failure(_) => {
panic!("wrong error: {:?}", err);
}
}
}
#[test]
fn fails_on_nested() {
for input in [
"{{nest}}",
"{before{nest}}",
"{{nest}after}",
"{bef{nest}aft}",
] {
match parameter(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::NestedParameter(e)) => {
assert_eq!(*e, "{nest}", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_optional() {
for input in [
"{(nest)}",
"{before(nest)}",
"{(nest)after}",
"{bef(nest)aft}",
] {
match parameter(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::OptionalInParameter(e)) => {
assert_eq!(*e, "(nest)", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_unescaped_reserved_char() {
for (input, expected) in [
("{(opt}", "("),
("{(n(e)st)}", "("),
("{{nest}", "{"),
("{l/r}", "/"),
] {
match parameter(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::UnescapedReservedCharacter(e)) => {
assert_eq!(*e, expected, "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_unfinished() {
for input in ["{", "{name "] {
match parameter(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::UnfinishedParameter(e)) => {
assert_eq!(*e, "{", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
}
mod optional {
use super::{optional, unwrap_parser, Err, Error, ErrorKind, Spanned};
#[test]
fn basic() {
assert_eq!(
**unwrap_parser(optional(Spanned::new("(string)"))),
"string",
);
}
#[test]
fn with_spaces() {
assert_eq!(
**unwrap_parser(optional(Spanned::new("(with space)"))),
"with space",
);
}
#[test]
fn with_escaped() {
assert_eq!(
**unwrap_parser(optional(Spanned::new("(with \\{)"))),
"with \\{",
);
}
#[test]
fn with_closing_brace() {
assert_eq!(
**unwrap_parser(optional(Spanned::new("(with })"))),
"with }",
);
}
#[allow(clippy::non_ascii_literal)]
#[test]
fn with_emoji() {
assert_eq!(**unwrap_parser(optional(Spanned::new("(🦀)"))), "🦀");
}
#[test]
fn errors_on_empty() {
let span = Spanned::new("");
assert_eq!(
optional(span),
Err(Err::Error(Error::Other(span, ErrorKind::Tag))),
);
}
#[test]
fn fails_on_empty() {
let err = optional(Spanned::new("()")).unwrap_err();
match err {
Err::Failure(Error::EmptyOptional(e)) => {
assert_eq!(*e, "()");
}
Err::Incomplete(_) | Err::Error(_) | Err::Failure(_) => {
panic!("wrong error: {:?}", err)
}
}
}
#[test]
fn fails_on_escaped_non_reserved() {
let err = optional(Spanned::new("(\\r)")).unwrap_err();
match err {
Err::Failure(Error::EscapedNonReservedCharacter(e)) => {
assert_eq!(*e, "\\r");
}
Err::Incomplete(_) | Err::Error(_) | Err::Failure(_) => {
panic!("wrong error: {:?}", err)
}
}
}
#[test]
fn fails_on_nested() {
for input in [
"((nest))",
"(before(nest))",
"((nest)after)",
"(bef(nest)aft)",
] {
match optional(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::NestedOptional(e)) => {
assert_eq!(*e, "(nest)", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_parameter() {
for input in [
"({nest})",
"(before{nest})",
"({nest}after)",
"(bef{nest}aft)",
] {
match optional(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::ParameterInOptional(e)) => {
assert_eq!(*e, "{nest}", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_alternation() {
for input in ["(/)", "(bef/)", "(/aft)", "(bef/aft)"] {
match optional(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::AlternationInOptional(e)) => {
assert_eq!(*e, "/", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_unescaped_reserved_char() {
for (input, expected) in
[("({opt)", "{"), ("({n{e}st})", "{"), ("((nest)", "(")]
{
match optional(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::UnescapedReservedCharacter(e)) => {
assert_eq!(*e, expected, "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_unfinished() {
for input in ["(", "(name "] {
match optional(Spanned::new(input)).expect_err("error") {
Err::Failure(Error::UnfinishedOptional(e)) => {
assert_eq!(*e, "(", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
}
mod alternative {
use super::{
alternative, unwrap_parser, Alternative, Err, Error, ErrorKind,
Spanned,
};
#[allow(clippy::non_ascii_literal)]
#[test]
fn text() {
for input in ["string", "🦀"] {
match unwrap_parser(alternative(Spanned::new(input))) {
Alternative::Text(t) => {
assert_eq!(*t, input, "on input: {}", input);
}
_ => panic!("expected Alternative::Text"),
}
}
}
#[test]
fn escaped_spaces() {
for input in ["bef\\ ", "\\ aft", "bef\\ aft"] {
match unwrap_parser(alternative(Spanned::new(input))) {
Alternative::Text(t) => {
assert_eq!(*t, input, "on input: {}", input);
}
_ => panic!("expected Alternative::Text"),
}
}
}
#[test]
fn optional() {
match unwrap_parser(alternative(Spanned::new("(opt)"))) {
Alternative::Optional(t) => {
assert_eq!(**t, "opt");
}
Alternative::Text(_) => {
panic!("expected Alternative::Optional");
}
}
}
#[test]
fn not_captures_unescaped_whitespace() {
match alternative(Spanned::new("text ")) {
Ok((rest, matched)) => {
assert_eq!(*rest, " ");
match matched {
Alternative::Text(t) => assert_eq!(*t, "text"),
Alternative::Optional(_) => {
panic!("expected Alternative::Text");
}
}
}
Err(..) => panic!("expected ok"),
}
}
#[test]
fn errors_on_empty() {
match alternative(Spanned::new("")).unwrap_err() {
Err::Error(Error::Other(_, ErrorKind::Alt)) => {}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn fails_on_unfinished_optional() {
for input in ["(", "(opt"] {
match alternative(Spanned::new(input)).unwrap_err() {
Err::Failure(Error::UnfinishedOptional(e)) => {
assert_eq!(*e, "(", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_escaped_non_reserved() {
for input in ["(\\r)", "\\r"] {
match alternative(Spanned::new(input)).unwrap_err() {
Err::Failure(Error::EscapedNonReservedCharacter(e)) => {
assert_eq!(*e, "\\r", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
}
mod alternation {
use super::{
alternation, assert_ast_eq, unwrap_parser, Err, Error, ErrorKind,
Spanned,
};
#[allow(clippy::non_ascii_literal)]
#[test]
fn basic() {
assert_ast_eq(
unwrap_parser(alternation(Spanned::new("l/🦀"))),
r#"Alternation(
[
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "l",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 2,
line: 1,
fragment: "🦀",
extra: (),
},
),
],
],
)"#,
);
}
#[test]
fn with_optionals() {
assert_ast_eq(
unwrap_parser(alternation(Spanned::new(
"l(opt)/(opt)r/l(opt)r",
))),
r#"Alternation(
[
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "l",
extra: (),
},
),
Optional(
Optional(
LocatedSpan {
offset: 2,
line: 1,
fragment: "opt",
extra: (),
},
),
),
],
[
Optional(
Optional(
LocatedSpan {
offset: 8,
line: 1,
fragment: "opt",
extra: (),
},
),
),
Text(
LocatedSpan {
offset: 12,
line: 1,
fragment: "r",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 14,
line: 1,
fragment: "l",
extra: (),
},
),
Optional(
Optional(
LocatedSpan {
offset: 16,
line: 1,
fragment: "opt",
extra: (),
},
),
),
Text(
LocatedSpan {
offset: 20,
line: 1,
fragment: "r",
extra: (),
},
),
],
],
)"#,
);
}
#[test]
fn with_more_optionals() {
assert_ast_eq(
unwrap_parser(alternation(Spanned::new(
"l(opt)(opt)/(opt)(opt)r/(opt)m(opt)",
))),
r#"Alternation(
[
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "l",
extra: (),
},
),
Optional(
Optional(
LocatedSpan {
offset: 2,
line: 1,
fragment: "opt",
extra: (),
},
),
),
Optional(
Optional(
LocatedSpan {
offset: 7,
line: 1,
fragment: "opt",
extra: (),
},
),
),
],
[
Optional(
Optional(
LocatedSpan {
offset: 13,
line: 1,
fragment: "opt",
extra: (),
},
),
),
Optional(
Optional(
LocatedSpan {
offset: 18,
line: 1,
fragment: "opt",
extra: (),
},
),
),
|
line: 1,
fragment: "r",
extra: (),
},
),
],
[
Optional(
Optional(
LocatedSpan {
offset: 25,
line: 1,
fragment: "opt",
extra: (),
},
),
),
Text(
LocatedSpan {
offset: 29,
line: 1,
fragment: "m",
extra: (),
},
),
Optional(
Optional(
LocatedSpan {
offset: 31,
line: 1,
fragment: "opt",
extra: (),
},
),
),
],
],
)"#,
);
}
#[test]
fn errors_without_slash() {
for (input, expected) in [
("", ErrorKind::Many1),
("{par}", ErrorKind::Many1),
("text", ErrorKind::Tag),
("(opt)", ErrorKind::Tag),
] {
match alternation(Spanned::new(input)).unwrap_err() {
Err::Error(Error::Other(_, kind)) => {
assert_eq!(kind, expected, "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_empty_alternation() {
for input in ["/", "l/", "/r", "l/m/", "l//r", "/m/r"] {
match alternation(Spanned::new(input)).unwrap_err() {
Err::Failure(Error::EmptyAlternation(e)) => {
assert_eq!(*e, "/", "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
#[test]
fn fails_on_only_optional() {
for input in
["text/(opt)", "text/(opt)(opt)", "(opt)/text", "(opt)/(opt)"]
{
match alternation(Spanned::new(input)).unwrap_err() {
Err::Failure(Error::OnlyOptionalInAlternation(e)) => {
assert_eq!(*e, input, "on input: {}", input);
}
e => panic!("wrong error: {:?}", e),
}
}
}
}
// All test examples from: <https://git.io/J159C>
// Naming of test cases is preserved.
mod expression {
use super::{
assert_ast_eq, expression, unwrap_parser, Err, Error, Spanned,
};
#[test]
fn allows_escaped_optional_parameter_types() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new("\\({int})"))),
r#"Expression(
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "\\(",
extra: (),
},
),
Parameter(
Parameter(
LocatedSpan {
offset: 3,
line: 1,
fragment: "int",
extra: (),
},
),
),
Text(
LocatedSpan {
offset: 7,
line: 1,
fragment: ")",
extra: (),
},
),
],
)"#,
);
}
#[test]
fn allows_parameter_type_in_alternation() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new("a/i{int}n/y"))),
r#"Expression(
[
Alternation(
Alternation(
[
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "a",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 2,
line: 1,
fragment: "i",
extra: (),
},
),
],
],
),
),
Parameter(
Parameter(
LocatedSpan {
offset: 4,
line: 1,
fragment: "int",
extra: (),
},
),
),
Alternation(
Alternation(
[
[
Text(
LocatedSpan {
offset: 8,
line: 1,
fragment: "n",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 10,
line: 1,
fragment: "y",
extra: (),
},
),
],
],
),
),
],
)"#,
);
}
#[test]
fn does_allow_parameter_adjacent_to_alternation() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new("{int}st/nd/rd/th"))),
r#"Expression(
[
Parameter(
Parameter(
LocatedSpan {
offset: 1,
line: 1,
fragment: "int",
extra: (),
},
),
),
Alternation(
Alternation(
[
[
Text(
LocatedSpan {
offset: 5,
line: 1,
fragment: "st",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 8,
line: 1,
fragment: "nd",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 11,
line: 1,
fragment: "rd",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 14,
line: 1,
fragment: "th",
extra: (),
},
),
],
],
),
),
],
)"#,
);
}
#[test]
fn does_not_allow_alternation_in_optional() {
match expression(Spanned::new("three( brown/black) mice"))
.unwrap_err()
{
Err::Failure(Error::AlternationInOptional(s)) => {
assert_eq!(*s, "/");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[rustfmt::skip]
#[test]
fn does_not_allow_alternation_with_empty_alternative_by_adjacent_left_parameter(
) {
match expression(Spanned::new("{int}/x")).unwrap_err() {
Err::Failure(Error::EmptyAlternation(s)) => {
assert_eq!(*s, "/");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[rustfmt::skip]
#[test]
fn does_not_allow_alternation_with_empty_alternative_by_adjacent_optional(
) {
match expression(Spanned::new("three (brown)/black mice"))
.unwrap_err()
{
Err::Failure(Error::OnlyOptionalInAlternation(s)) => {
assert_eq!(*s, "(brown)/black");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[rustfmt::skip]
#[test]
fn does_not_allow_alternation_with_empty_alternative_by_adjacent_right_parameter(
) {
match expression(Spanned::new("x/{int}")).unwrap_err() {
Err::Failure(Error::EmptyAlternation(s)) => {
assert_eq!(*s, "/");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_alternation_with_empty_alternative() {
match expression(Spanned::new("three brown//black mice"))
.unwrap_err()
{
Err::Failure(Error::EmptyAlternation(s)) => {
assert_eq!(*s, "/");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_empty_optional() {
match expression(Spanned::new("three () mice")).unwrap_err() {
Err::Failure(Error::EmptyOptional(s)) => {
assert_eq!(*s, "()");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_nested_optional() {
match expression(Spanned::new("(a(b))")).unwrap_err() {
Err::Failure(Error::NestedOptional(s)) => {
assert_eq!(*s, "(b)");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_optional_parameter_types() {
match expression(Spanned::new("({int})")).unwrap_err() {
Err::Failure(Error::ParameterInOptional(s)) => {
assert_eq!(*s, "{int}");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_parameter_name_with_reserved_characters() {
match expression(Spanned::new("{(string)}")).unwrap_err() {
Err::Failure(Error::OptionalInParameter(s)) => {
assert_eq!(*s, "(string)");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_unfinished_parenthesis_1() {
match expression(Spanned::new(
"three (exceptionally\\) {string\\} mice",
))
.unwrap_err()
{
Err::Failure(Error::UnescapedReservedCharacter(s)) => {
assert_eq!(*s, "{");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_unfinished_parenthesis_2() {
match expression(Spanned::new(
"three (exceptionally\\) {string} mice",
))
.unwrap_err()
{
Err::Failure(Error::ParameterInOptional(s)) => {
assert_eq!(*s, "{string}");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn does_not_allow_unfinished_parenthesis_3() {
match expression(Spanned::new(
"three ((exceptionally\\) strong) mice",
))
.unwrap_err()
{
Err::Failure(Error::UnescapedReservedCharacter(s)) => {
assert_eq!(*s, "(");
}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong error: {:?}", e);
}
}
}
#[test]
fn matches_alternation() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new(
"mice/rats and rats\\/mice",
))),
r#"Expression(
[
Alternation(
Alternation(
[
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "mice",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 5,
line: 1,
fragment: "rats",
extra: (),
},
),
],
],
),
),
Whitespaces(
LocatedSpan {
offset: 9,
line: 1,
fragment: " ",
extra: (),
},
),
Text(
LocatedSpan {
offset: 10,
line: 1,
fragment: "and",
extra: (),
},
),
Whitespaces(
LocatedSpan {
offset: 13,
line: 1,
fragment: " ",
extra: (),
},
),
Text(
LocatedSpan {
offset: 14,
line: 1,
fragment: "rats\\/mice",
extra: (),
},
),
],
)"#,
);
}
#[test]
fn matches_anonymous_parameter_type() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new("{}"))),
r#"Expression(
[
Parameter(
Parameter(
LocatedSpan {
offset: 1,
line: 1,
fragment: "",
extra: (),
},
),
),
],
)"#,
);
}
#[test]
fn matches_doubly_escaped_parenthesis() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new(
"three \\(exceptionally) \\{string} mice",
))),
r#"Expression(
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "three",
extra: (),
},
),
Whitespaces(
LocatedSpan {
offset: 5,
line: 1,
fragment: " ",
extra: (),
},
),
Text(
LocatedSpan {
offset: 6,
line: 1,
fragment: "\\(exceptionally)",
extra: (),
},
),
Whitespaces(
LocatedSpan {
offset: 22,
line: 1,
fragment: " ",
extra: (),
},
),
Text(
LocatedSpan {
offset: 23,
line: 1,
fragment: "\\{string}",
extra: (),
},
),
Whitespaces(
LocatedSpan {
offset: 32,
line: 1,
fragment: " ",
extra: (),
},
),
Text(
LocatedSpan {
offset: 33,
line: 1,
fragment: "mice",
extra: (),
},
),
],
)"#,
);
}
#[test]
fn matches_doubly_escaped_slash() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new("12\\\\/2020"))),
r#"Expression(
[
Alternation(
Alternation(
[
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "12\\\\",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 5,
line: 1,
fragment: "2020",
extra: (),
},
),
],
],
),
),
],
)"#,
);
}
#[test]
fn matches_optional_before_alternation() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new(
"three (brown )mice/rats",
))),
r#"Expression(
[
Text(
LocatedSpan {
offset: 0,
line: 1,
fragment: "three",
extra: (),
},
),
Whitespaces(
LocatedSpan {
offset: 5,
line: 1,
fragment: " ",
extra: (),
},
),
Alternation(
Alternation(
[
[
Optional(
Optional(
LocatedSpan {
offset: 7,
line: 1,
fragment: "brown ",
extra: (),
},
),
),
Text(
LocatedSpan {
offset: 14,
line: 1,
fragment: "mice",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 19,
line: 1,
fragment: "rats",
extra: (),
},
),
],
],
),
),
],
)"#,
);
}
#[test]
fn matches_optional_in_alternation() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new(
"{int} rat(s)/mouse/mice",
))),
r#"Expression(
[
Parameter(
Parameter(
LocatedSpan {
offset: 1,
line: 1,
fragment: "int",
extra: (),
},
),
),
Whitespaces(
LocatedSpan {
offset: 5,
line: 1,
fragment: " ",
extra: (),
},
),
Alternation(
Alternation(
[
[
Text(
LocatedSpan {
offset: 6,
line: 1,
fragment: "rat",
extra: (),
},
),
Optional(
Optional(
LocatedSpan {
offset: 10,
line: 1,
fragment: "s",
extra: (),
},
),
),
],
[
Text(
LocatedSpan {
offset: 13,
line: 1,
fragment: "mouse",
extra: (),
},
),
],
[
Text(
LocatedSpan {
offset: 19,
line: 1,
fragment: "mice",
extra: (),
},
),
],
],
),
),
],
)"#,
);
}
#[test]
fn err_on_escaped_end_of_line() {
match expression(Spanned::new("\\")).unwrap_err() {
Err::Failure(Error::EscapedEndOfLine(_)) => {}
e @ (Err::Incomplete(_) | Err::Error(_) | Err::Failure(_)) => {
panic!("wrong err: {}", e);
}
}
}
#[test]
fn empty() {
assert_ast_eq(
unwrap_parser(expression(Spanned::new(""))),
r#"Expression([],)"#,
);
}
}
}
|
Text(
LocatedSpan {
offset: 22,
|
controller.go
|
/*
Copyright 2021 Jetstack Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controller
import (
"fmt"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/go-logr/logr"
corev1 "k8s.io/api/core/v1"
"k8s.io/client-go/kubernetes"
"sigs.k8s.io/controller-runtime/pkg/builder"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/controller-runtime/pkg/client/config"
"sigs.k8s.io/controller-runtime/pkg/manager"
"sigs.k8s.io/controller-runtime/pkg/predicate"
)
const (
SyncAnnotationKey = "experimental.cert-manager.io/acm-sync"
SyncAnnotationValue = "true"
ARNAnnotationKey = "experimental.cert-manager.io/acm-arn"
)
func New(log logr.Logger) (manager.Manager, error) {
// construct Kube config and interface
cfg := config.GetConfigOrDie()
kubeClient := kubernetes.NewForConfigOrDie(cfg)
// check for AWS credentials
awsSession, err := session.NewSession()
if err != nil {
return nil, fmt.Errorf("couldn't construct AWS session: %w", err)
}
mgr, err := manager.New(cfg, manager.Options{
Logger: log.WithName("manager"),
})
if err != nil {
return nil, err
}
err = builder.ControllerManagedBy(mgr).
For(&corev1.Secret{},
builder.OnlyMetadata,
builder.WithPredicates(
predicate.NewPredicateFuncs(func(object client.Object) bool {
value, found := object.GetAnnotations()[SyncAnnotationKey]
if found && value == SyncAnnotationValue {
return true
}
return false
}),
),
).
Complete(&ACMSyncer{
log: log.WithName("acmsyncer"),
client: kubeClient,
recorder: mgr.GetEventRecorderFor("acmsyncer"),
|
awsSession: awsSession,
})
if err != nil {
return nil, err
}
return mgr, nil
}
| |
test_client.rs
|
use actix_http::{http, HttpService, Request, Response};
use actix_http_test::test_server;
use actix_service::ServiceFactoryExt;
use bytes::Bytes;
use futures_util::future::{self, ok};
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn test_h1_v2() {
let srv = test_server(move || {
HttpService::build()
.finish(|_| future::ok::<_, ()>(Response::Ok().body(STR)))
.tcp()
})
.await;
let response = srv.get("/").send().await.unwrap();
assert!(response.status().is_success());
let request = srv.get("/").header("x-test", "111").send();
let mut response = request.await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = response.body().await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
let mut response = srv.post("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = response.body().await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn test_connection_close() {
let srv = test_server(move || {
HttpService::build()
.finish(|_| ok::<_, ()>(Response::Ok().body(STR)))
.tcp()
.map(|_| ())
})
.await;
let response = srv.get("/").force_close().send().await.unwrap();
assert!(response.status().is_success());
}
#[actix_rt::test]
async fn test_with_query_parameter() {
let srv = test_server(move || {
HttpService::build()
.finish(|req: Request| {
if req.uri().query().unwrap().contains("qp=") {
ok::<_, ()>(Response::Ok().finish())
} else
|
})
.tcp()
.map(|_| ())
})
.await;
let request = srv.request(http::Method::GET, srv.url("/?qp=5"));
let response = request.send().await.unwrap();
assert!(response.status().is_success());
}
|
{
ok::<_, ()>(Response::BadRequest().finish())
}
|
ConduServPage.js
|
import React from 'react';
import './Up&BottomMenu.css';
import UpMenu from '../components/SMenuBar';
import Mapp from '../components/CConduServPage';
import BottomMenu from '../components/BottomMenuBar';
import { withRouter } from 'react-router-dom';
class
|
extends React.Component{
render(){
return(
<div className='main-containerBlack'>
<div className='main-container'>
<UpMenu />
<Mapp />
</div>
<BottomMenu />
</div>
)
}
}
export default withRouter(InService);
|
InService
|
name_def.py
|
import pyeccodes.accessors as _
def
|
(h):
def wrapped(h):
discipline = h.get_l('discipline')
parameterCategory = h.get_l('parameterCategory')
parameterNumber = h.get_l('parameterNumber')
if discipline == 0 and parameterCategory == 1 and parameterNumber == 10:
return 'Total convective Precipitation'
return wrapped
|
load
|
duplicated-external-mods.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:anon-extern-mod-cross-crate-1.rs
// aux-build:anon-extern-mod-cross-crate-1.rs
extern crate anonexternmod;
pub fn main()
|
{ }
|
|
index.js
|
/**
* Helper to manage className attribute on node or nodelist
* eg.
* removeClass(el, 'myClass'); or
* addClass(document.querySelectorAll('li'), 'list-item'); or
* hasClass(el, 'is-open');
*/
export function addClass(node, classString) {
if (node.length) {
return Array.prototype.forEach(node, (el) => {
addClass(el, classString);
});
}
const classList = node.className.split(' ');
const classNames = classString.split(' ');
classNames.forEach((className) => {
if (classList.indexOf(className) < 0) {
classList.push(className);
}
});
node.setAttribute('class', classList.join(' '));
}
export function removeClass(node, classString) {
if (node.length) {
return Array.prototype.forEach(node, (el) => {
removeClass(el, classString);
});
}
const classNames = classString.split(' ');
const classList = node.className.split(' ').filter((className) => {
const match = (classNames.indexOf(className) > -1);
return !match;
});
node.setAttribute('class', classList.join(' '));
}
export function hasClass(node, classString) {
const classList = node.className.split(' ');
const classNames = classString.split(' ');
let exists = true;
classNames.forEach((className) => {
if (classList.indexOf(className) < 0) {
exists = false;
}
});
return exists;
}
export default {
addClass,
removeClass,
hasClass,
add: addClass,
|
remove: removeClass,
has: hasClass,
};
|
|
client.go
|
package redisearch
import (
"errors"
"log"
"reflect"
"strconv"
"strings"
"github.com/gomodule/redigo/redis"
)
// Client is an interface to redisearch's redis commands
type Client struct {
pool ConnPool
name string
}
var maxConns = 500
// NewClient creates a new client connecting to the redis host, and using the given name as key prefix.
// Addr can be a single host:port pair, or a comma separated list of host:port,host:port...
// In the case of multiple hosts we create a multi-pool and select connections at random
func NewClient(addr, name string) *Client {
addrs := strings.Split(addr, ",")
var pool ConnPool
if len(addrs) == 1 {
pool = NewSingleHostPool(addrs[0])
} else {
pool = NewMultiHostPool(addrs)
}
ret := &Client{
pool: pool,
name: name,
}
return ret
}
// NewClientFromPool creates a new Client with the given pool and index name
func NewClientFromPool(pool *redis.Pool, name string) *Client {
ret := &Client{
pool: pool,
name: name,
}
return ret
}
// CreateIndex configures the index and creates it on redis
func (i *Client) CreateIndex(schema *Schema) (err error) {
return i.indexWithDefinition(i.name, schema, nil)
}
// CreateIndexWithIndexDefinition configures the index and creates it on redis
// IndexDefinition is used to define a index definition for automatic indexing on Hash update
func (i *Client) CreateIndexWithIndexDefinition(schema *Schema, definition *IndexDefinition) (err error) {
return i.indexWithDefinition(i.name, schema, definition)
}
// internal method
func (i *Client) indexWithDefinition(indexName string, schema *Schema, definition *IndexDefinition) (err error) {
args := redis.Args{indexName}
if definition != nil {
args = definition.Serialize(args)
}
// Set flags based on options
args, err = SerializeSchema(schema, args)
if err != nil {
return
}
conn := i.pool.Get()
defer conn.Close()
_, err = conn.Do("FT.CREATE", args...)
return
}
// AddField Adds a new field to the index.
func (i *Client) AddField(f Field) error {
args := redis.Args{i.name}
args = append(args, "SCHEMA", "ADD")
args, err := serializeField(f, args)
if err != nil {
return err
}
conn := i.pool.Get()
defer conn.Close()
_, err = conn.Do("FT.ALTER", args...)
return err
}
// Index indexes a list of documents with the default options
func (i *Client) Index(docs ...Document) error {
return i.IndexOptions(DefaultIndexingOptions, docs...)
}
// Search searches the index for the given query, and returns documents,
// the total number of results, or an error if something went wrong
func (i *Client) Search(q *Query) (docs []Document, total int, err error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{i.name}
args = append(args, q.serialize()...)
res, err := redis.Values(conn.Do("FT.SEARCH", args...))
if err != nil {
return
}
if total, err = redis.Int(res[0], nil); err != nil {
return
}
docs = make([]Document, 0, len(res)-1)
skip := 1
scoreIdx := -1
fieldsIdx := -1
payloadIdx := -1
if q.Flags&QueryWithScores != 0 {
scoreIdx = 1
skip++
}
if q.Flags&QueryWithPayloads != 0 {
payloadIdx = skip
skip++
}
if q.Flags&QueryNoContent == 0 {
fieldsIdx = skip
skip++
}
if len(res) > skip {
for i := 1; i < len(res); i += skip {
if d, e := loadDocument(res, i, scoreIdx, payloadIdx, fieldsIdx); e == nil {
docs = append(docs, d)
} else {
log.Print("Error parsing doc: ", e)
}
}
}
return
}
// AliasAdd adds an alias to an index.
// Indexes can have more than one alias, though an alias cannot refer to another alias.
func (i *Client) AliasAdd(name string) (err error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{name}.Add(i.name)
_, err = redis.String(conn.Do("FT.ALIASADD", args...))
return
}
// AliasDel deletes an alias from index.
func (i *Client) AliasDel(name string) (err error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{name}
_, err = redis.String(conn.Do("FT.ALIASDEL", args...))
return
}
// AliasUpdate differs from the AliasAdd in that it will remove the alias association with
// a previous index, if any. AliasAdd will fail, on the other hand, if the alias is already
// associated with another index.
func (i *Client) AliasUpdate(name string) (err error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{name}.Add(i.name)
_, err = redis.String(conn.Do("FT.ALIASUPDATE", args...))
return
}
// DictAdd adds terms to a dictionary.
func (i *Client) DictAdd(dictionaryName string, terms []string) (newTerms int, err error) {
conn := i.pool.Get()
defer conn.Close()
newTerms = 0
args := redis.Args{dictionaryName}.AddFlat(terms)
newTerms, err = redis.Int(conn.Do("FT.DICTADD", args...))
return
}
// DictDel deletes terms from a dictionary
func (i *Client) DictDel(dictionaryName string, terms []string) (deletedTerms int, err error) {
conn := i.pool.Get()
defer conn.Close()
deletedTerms = 0
args := redis.Args{dictionaryName}.AddFlat(terms)
deletedTerms, err = redis.Int(conn.Do("FT.DICTDEL", args...))
return
}
// DictDump dumps all terms in the given dictionary.
func (i *Client) DictDump(dictionaryName string) (terms []string, err error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{dictionaryName}
terms, err = redis.Strings(conn.Do("FT.DICTDUMP", args...))
return
}
// SpellCheck performs spelling correction on a query, returning suggestions for misspelled terms,
// the total number of results, or an error if something went wrong
func (i *Client) SpellCheck(q *Query, s *SpellCheckOptions) (suggs []MisspelledTerm, total int, err error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{i.name}
args = append(args, q.serialize()...)
args = append(args, s.serialize()...)
res, err := redis.Values(conn.Do("FT.SPELLCHECK", args...))
if err != nil {
return
}
total = 0
suggs = make([]MisspelledTerm, 0)
// Each misspelled term, in turn, is a 3-element array consisting of
// - the constant string "TERM" ( 3-element position 0 -- we dont use it )
// - the term itself ( 3-element position 1 )
// - an array of suggestions for spelling corrections ( 3-element position 2 )
termIdx := 1
suggIdx := 2
for i := 0; i < len(res); i++ {
var termArray []interface{} = nil
termArray, err = redis.Values(res[i], nil)
if err != nil {
return
}
if d, e := loadMisspelledTerm(termArray, termIdx, suggIdx); e == nil {
suggs = append(suggs, d)
if d.Len() > 0 {
total++
}
} else {
log.Print("Error parsing misspelled suggestion: ", e)
}
}
return
}
// Aggregate
func (i *Client) Aggregate(q *AggregateQuery) (aggregateReply [][]string, total int, err error) {
conn := i.pool.Get()
defer conn.Close()
hasCursor := q.WithCursor
validCursor := q.CursorHasResults()
var res []interface{} = nil
if !validCursor {
args := redis.Args{i.name}
args = append(args, q.Serialize()...)
res, err = redis.Values(conn.Do("FT.AGGREGATE", args...))
} else {
args := redis.Args{"READ", i.name, q.Cursor.Id}
res, err = redis.Values(conn.Do("FT.CURSOR", args...))
}
if err != nil {
return
}
// has no cursor
if !hasCursor {
total, aggregateReply, err = processAggReply(res)
// has cursor
} else {
var partialResults, err = redis.Values(res[0], nil)
if err != nil {
return aggregateReply, total, err
}
q.Cursor.Id, err = redis.Int(res[1], nil)
if err != nil {
return aggregateReply, total, err
}
total, aggregateReply, err = processAggReply(partialResults)
}
return
}
// Get - Returns the full contents of a document
func (i *Client) Get(docId string) (doc *Document, err error) {
doc = nil
conn := i.pool.Get()
defer conn.Close()
var reply interface{}
args := redis.Args{i.name, docId}
reply, err = conn.Do("FT.GET", args...)
if reply != nil {
var array_reply []interface{}
array_reply, err = redis.Values(reply, err)
if err != nil {
return
}
if len(array_reply) > 0 {
document := NewDocument(docId, 1)
document.loadFields(array_reply)
doc = &document
}
}
return
}
// MultiGet - Returns the full contents of multiple documents.
// Returns an array with exactly the same number of elements as the number of keys sent to the command.
// Each element in it is either an Document or nil if it was not found.
func (i *Client) MultiGet(documentIds []string) (docs []*Document, err error) {
docs = make([]*Document, len(documentIds))
conn := i.pool.Get()
defer conn.Close()
var reply interface{}
args := redis.Args{i.name}.AddFlat(documentIds)
reply, err = conn.Do("FT.MGET", args...)
if reply != nil {
var array_reply []interface{}
array_reply, err = redis.Values(reply, err)
if err != nil {
return
}
for i := 0; i < len(array_reply); i++ {
if array_reply[i] != nil {
var innerArray []interface{}
innerArray, err = redis.Values(array_reply[i], nil)
if err != nil {
return
}
if len(array_reply) > 0 {
document := NewDocument(documentIds[i], 1)
document.loadFields(innerArray)
docs[i] = &document
}
} else {
docs[i] = nil
}
}
}
return
}
// Explain Return a textual string explaining the query (execution plan)
func (i *Client) Explain(q *Query) (string, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{i.name}
args = append(args, q.serialize()...)
return redis.String(conn.Do("FT.EXPLAIN", args...))
}
// Drop deletes the index and all the keys associated with it.
func (i *Client) Drop() error {
conn := i.pool.Get()
defer conn.Close()
_, err := conn.Do("FT.DROP", i.name)
return err
}
// Deletes the secondary index and optionally the associated hashes
//
// Available since RediSearch 2.0.
//
// By default, DropIndex() which is a wrapper for RediSearch FT.DROPINDEX does not delete the document
// hashes associated with the index. Setting the argument deleteDocuments to true deletes the hashes as well.
func (i *Client) DropIndex(deleteDocuments bool) error {
conn := i.pool.Get()
defer conn.Close()
var err error = nil
if deleteDocuments {
_, err = conn.Do("FT.DROPINDEX", i.name, "DD")
} else {
_, err = conn.Do("FT.DROPINDEX", i.name)
}
return err
}
// Delete the document from the index, optionally delete the actual document
// WARNING: As of RediSearch 2.0 and above, FT.DEL always deletes the underlying document.
// Deprecated: This function is deprecated on RediSearch 2.0 and above, use DeleteDocument() instead
func (i *Client) Delete(docId string, deleteDocument bool) (err error) {
return i.delDoc(docId, deleteDocument)
}
// DeleteDocument delete the document from the index and also delete the HASH key in which the document is stored
func (i *Client) DeleteDocument(docId string) (err error) {
return i.delDoc(docId, true)
}
// Internal method to be used by Delete() and DeleteDocument()
func (i *Client) delDoc(docId string, deleteDocument bool) (err error) {
conn := i.pool.Get()
defer conn.Close()
if deleteDocument {
_, err = conn.Do("FT.DEL", i.name, docId, "DD")
} else {
_, err = conn.Do("FT.DEL", i.name, docId)
}
return
}
// Internal method to be used by Info()
func (info *IndexInfo) setTarget(key string, value interface{}) error {
v := reflect.ValueOf(info).Elem()
for i := 0; i < v.NumField(); i++ {
tag := v.Type().Field(i).Tag.Get("redis")
if tag == key {
targetInfo := v.Field(i)
switch targetInfo.Kind() {
case reflect.String:
s, _ := redis.String(value, nil)
targetInfo.SetString(s)
case reflect.Uint64:
u, _ := redis.Uint64(value, nil)
targetInfo.SetUint(u)
case reflect.Float64:
f, _ := redis.Float64(value, nil)
targetInfo.SetFloat(f)
case reflect.Bool:
f, _ := redis.Uint64(value, nil)
if f == 0 {
targetInfo.SetBool(false)
} else {
targetInfo.SetBool(true)
}
default:
panic("Tag set without handler")
}
return nil
}
}
return errors.New("setTarget: No handler defined for :" + key)
}
func
|
(haystack []string, needle string) int {
for pos, elem := range haystack {
if elem == needle {
return pos
}
}
return -1
}
func (info *IndexInfo) loadSchema(values []interface{}, options []string) {
// Values are a list of fields
scOptions := Options{}
for _, opt := range options {
switch strings.ToUpper(opt) {
case "NOFIELDS":
scOptions.NoFieldFlags = true
case "NOFREQS":
scOptions.NoFrequencies = true
case "NOOFFSETS":
scOptions.NoOffsetVectors = true
}
}
sc := NewSchema(scOptions)
for _, specTmp := range values {
// spec, isArr := specTmp.([]string)
// if !isArr {
// panic("Value is not an array of strings!")
// }
rawSpec, err := redis.Values(specTmp, nil)
if err != nil {
log.Printf("Warning: Couldn't read schema. %s\n", err.Error())
continue
}
spec := make([]string, 0)
// Convert all to string, if not already string
for _, elem := range rawSpec {
s, isString := elem.(string)
if !isString {
s, err = redis.String(elem, err)
if err != nil {
log.Printf("Warning: Couldn't read schema. %s\n", err.Error())
continue
}
}
spec = append(spec, s)
}
// Name, Type,
if len(spec) < 3 {
log.Printf("Invalid spec")
continue
}
var options []string
if len(spec) > 3 {
options = spec[3:]
} else {
options = []string{}
}
f := Field{Name: spec[0]}
switch strings.ToUpper(spec[2]) {
case "TAG":
f.Type = TagField
tfOptions := TagFieldOptions{}
if wIdx := sliceIndex(options, "SEPARATOR"); wIdx != -1 {
tfOptions.Separator = options[wIdx+1][0]
}
f.Options = tfOptions
case "GEO":
f.Type = GeoField
case "NUMERIC":
f.Type = NumericField
nfOptions := NumericFieldOptions{}
if sliceIndex(options, "SORTABLE") != -1 {
nfOptions.Sortable = true
}
f.Options = nfOptions
case "TEXT":
f.Type = TextField
tfOptions := TextFieldOptions{}
if sliceIndex(options, "SORTABLE") != -1 {
tfOptions.Sortable = true
}
if wIdx := sliceIndex(options, "WEIGHT"); wIdx != -1 && wIdx+1 != len(spec) {
weightString := options[wIdx+1]
weight64, _ := strconv.ParseFloat(weightString, 32)
tfOptions.Weight = float32(weight64)
}
f.Options = tfOptions
}
sc = sc.AddField(f)
}
info.Schema = *sc
}
// Info - Get information about the index. This can also be used to check if the
// index exists
func (i *Client) Info() (*IndexInfo, error) {
conn := i.pool.Get()
defer conn.Close()
res, err := redis.Values(conn.Do("FT.INFO", i.name))
if err != nil {
return nil, err
}
ret := IndexInfo{}
var schemaFields []interface{}
var indexOptions []string
// Iterate over the values
for ii := 0; ii < len(res); ii += 2 {
key, _ := redis.String(res[ii], nil)
if err := ret.setTarget(key, res[ii+1]); err == nil {
continue
}
switch key {
case "index_options":
indexOptions, _ = redis.Strings(res[ii+1], nil)
case "fields":
schemaFields, _ = redis.Values(res[ii+1], nil)
}
}
if schemaFields != nil {
ret.loadSchema(schemaFields, indexOptions)
}
return &ret, nil
}
// Set runtime configuration option
func (i *Client) SetConfig(option string, value string) (string, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{"SET", option, value}
return redis.String(conn.Do("FT.CONFIG", args...))
}
// Get runtime configuration option value
func (i *Client) GetConfig(option string) (map[string]string, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{"GET", option}
values, err := redis.Values(conn.Do("FT.CONFIG", args...))
if err != nil {
return nil, err
}
m := make(map[string]string)
valLen := len(values)
for i := 0; i < valLen; i++ {
kvs, _ := redis.Strings(values[i], nil)
if kvs != nil && len(kvs) == 2 {
m[kvs[0]] = kvs[1]
}
}
return m, nil
}
// Get the distinct tags indexed in a Tag field
func (i *Client) GetTagVals(index string, filedName string) ([]string, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{index, filedName}
return redis.Strings(conn.Do("FT.TAGVALS", args...))
}
// SynAdd adds a synonym group.
// Deprecated: This function is not longer supported on RediSearch 2.0 and above, use SynUpdate instead
func (i *Client) SynAdd(indexName string, terms []string) (int64, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{indexName}.AddFlat(terms)
return redis.Int64(conn.Do("FT.SYNADD", args...))
}
// SynUpdate updates a synonym group, with additional terms.
func (i *Client) SynUpdate(indexName string, synonymGroupId int64, terms []string) (string, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{indexName, synonymGroupId}.AddFlat(terms)
return redis.String(conn.Do("FT.SYNUPDATE", args...))
}
// SynDump dumps the contents of a synonym group.
func (i *Client) SynDump(indexName string) (map[string][]int64, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{indexName}
values, err := redis.Values(conn.Do("FT.SYNDUMP", args...))
if err != nil {
return nil, err
}
valLen := len(values)
if valLen%2 != 0 {
return nil, errors.New("SynDump: expects even number of values result")
}
m := make(map[string][]int64, valLen/2)
for i := 0; i < valLen; i += 2 {
key := values[i].([]byte)
gids, err := redis.Int64s(values[i+1], nil)
if err != nil {
return nil, err
}
m[string(key)] = gids
}
return m, nil
}
// Adds a document to the index from an existing HASH key in Redis.
// Deprecated: This function is not longer supported on RediSearch 2.0 and above, use HSET instead
// See the example ExampleClient_CreateIndexWithIndexDefinition for a deeper understanding on how to move towards using hashes on your application
func (i *Client) AddHash(docId string, score float32, language string, replace bool) (string, error) {
conn := i.pool.Get()
defer conn.Close()
args := redis.Args{i.name, docId, score}
if language != "" {
args = args.Add("LANGUAGE", language)
}
if replace {
args = args.Add("REPLACE")
}
return redis.String(conn.Do("FT.ADDHASH", args...))
}
// Returns a list of all existing indexes.
func (i *Client) List() ([]string, error) {
conn := i.pool.Get()
defer conn.Close()
res, err := redis.Values(conn.Do("FT._LIST"))
if err != nil {
return nil, err
}
var indexes []string
// Iterate over the values
for ii := 0; ii < len(res); ii += 1 {
key, _ := redis.String(res[ii], nil)
indexes = append(indexes, key)
}
return indexes, nil
}
|
sliceIndex
|
basic.rs
|
use conrod;
use elements::{*, action::*};
const DEBUG: bool = false;
/*
d88888b .88b d88. d8888b. d888888b db db
88' 88'YbdP`88 88 `8D `~~88~~' `8b d8'
88ooooo 88 88 88 88oodD' 88 `8bd8'
88~~~~~ 88 88 88 88~~~ 88 88
88. 88 88 88 88 88 88
Y88888P YP YP YP 88 YP YP
*/
pub struct Empty {
parent: Option<conrod::widget::id::Id>,
is_setup: bool,
frame: Frame<i32>,
window_center: Vec2<i32>,
min_size: Vec2<i32>,
max_size: Vec2<i32>,
}
impl Empty {
pub fn new() -> Box<Self> {
Box::new(Empty{
parent: None,
is_setup: false,
frame: Frame::new(),
window_center: Vec2::zero(),
min_size: Vec2::zero(),
max_size: Vec2 {x: i32::MAX, y: i32::MAX},
})
}
}
impl Element for Empty {
fn setup(&mut self, _ui: &mut conrod::Ui) { self.is_setup = true }
fn is_setup(&self) -> bool { self.is_setup }
fn set_parent_widget(&mut self, parent: conrod::widget::id::Id) {
self.parent = Some(parent);
}
fn set_floating(&mut self, _floating: bool) {}
fn build_window(&self, _ui: &mut conrod::UiCell, _ressources: &WindowRessources) {}
fn get_frame(&self) -> Frame<i32> { self.frame }
fn set_frame(&mut self, frame: Frame<i32>, window_center: Vec2<i32>) {
self.frame = frame;
self.window_center = window_center;
}
fn set_min_size(&mut self, size: Vec2<i32>) {
self.min_size = size;
}
fn get_min_size(&self) -> Vec2<i32> {
self.min_size
}
fn set_max_size(&mut self, size: Vec2<i32>) {
self.max_size = size;
}
fn get_max_size(&self) -> Vec2<i32> {
self.max_size
}
fn transmit_msg(&mut self, _msg: ActionMsg, _stop: bool) {}
}
/*
d8888b. db .d8b. d8b db d88888b
88 `8D 88 d8' `8b 888o 88 88'
88oodD' 88 88ooo88 88V8o 88 88ooooo
88~~~ 88 88~~~88 88 V8o88 88~~~~~
88 88booo. 88 88 88 V888 88.
88 Y88888P YP YP VP V8P Y88888P
*/
widget_ids!(
#[derive(Clone)]
struct PlaneIds {
plane,
}
);
#[derive(Clone)]
pub struct Plane {
ids: Option<PlaneIds>,
parent: Option<conrod::widget::id::Id>,
floating: bool,
is_setup: bool,
global_center: Vec2<i32>,
frame: Frame<i32>,
graphic: Graphic,
min_size: Vec2<i32>,
max_size: Vec2<i32>,
}
impl Plane {
pub fn new(graphic: Graphic) -> Box<Self> {
use std::i32;
Box::new(Plane {
ids: None,
parent: None,
floating: false,
is_setup: false,
global_center: Vec2::zero(),
frame: Frame::new(),
graphic,
min_size: Vec2::zero(),
max_size: Vec2 {x: i32::MAX, y: i32::MAX},
})
}
fn build_textured(
&self,
_ui: &mut conrod::UiCell,
_ressources: &WindowRessources,
texture: (u32,u32,conrod::image::Id),
texture_properties: &Texture
) {
use conrod::{Widget, widget, Positionable, Sizeable};
if DEBUG { println!("building textured plane with image id {:?}", texture);}
let c = self.frame.center()-self.global_center;
if DEBUG { println!("creating plane image...");}
let mut img = widget::primitive::image::Image::new(texture.2)
.source_rectangle(texture_properties.get_cut(
self.frame.width() as u32, self.frame.height() as u32, texture.0, texture.1
))
.floating(self.floating)
.x_y(c.x as f64, c.y as f64)
.w_h(self.frame.width() as f64,self.frame.height() as f64);
if let Some(parent) = self.parent {
img = img.parent(parent);
}
if let Some(ref ids) = self.ids {
img.set(ids.plane, _ui);
}
if DEBUG { println!("Plane build.");}
}
fn build_flat(&self, ui: &mut conrod::UiCell, _ressources: &WindowRessources, color: conrod::Color) {
use conrod::{Positionable, Widget};
if DEBUG { println!("building flat plane");}
if let Some(ref ids) = self.ids {
let c = self.frame.center()-self.global_center;
if DEBUG { println!("creating plane color...");}
let mut rect = conrod::widget::Rectangle::fill_with(
[self.frame.width() as f64, self.frame.height() as f64],
color
).x_y(c.x as f64, c.y as f64);
if let Some(parent) = self.parent {
rect = rect.parent(parent);
}
rect.set(ids.plane, ui);
}
if DEBUG { println!("Plane build.");}
}
}
impl Graphicable for Plane {
fn with_graphic(mut self, fg: Graphic) -> Box<Self> {
self.graphic = fg;
Box::new(self)
}
fn set_graphic(&mut self, fg: Graphic) {
self.graphic = fg;
}
}
impl Element for Plane {
fn setup(&mut self, ui: &mut conrod::Ui) {
self.ids = Some(PlaneIds::new(ui.widget_id_generator()));
self.is_setup = true;
}
fn is_setup(&self) -> bool { self.is_setup }
fn set_parent_widget(&mut self, parent: conrod::widget::id::Id) {
self.parent = Some(parent);
}
fn set_floating(&mut self, floating: bool) {
self.floating = floating;
}
fn build_window(&self, ui: &mut conrod::UiCell, ressources: &WindowRessources) {
match self.graphic {
Graphic::Texture(ref texture) => {
if let Some(tex) = ressources.image(&texture.get_id()) {
self.build_textured(ui, ressources, *tex, &texture);
return;
};
},
Graphic::Color(color) => {
self.build_flat(ui, ressources, color)
},
Graphic::None => ()
}
}
fn get_frame(&self) -> Frame<i32> {
self.frame
}
fn set_frame(&mut self, frame: Frame<i32>, window_center: Vec2<i32>) {
self.global_center = window_center;
self.frame = frame;
}
fn set_min_size(&mut self, size: Vec2<i32>) {
self.min_size = size;
}
fn get_min_size(&self) -> Vec2<i32> {
self.min_size
}
fn set_max_size(&mut self, size: Vec2<i32>) {
self.max_size = size;
}
fn get_max_size(&self) -> Vec2<i32> {
self.max_size
}
fn transmit_msg(&mut self, _msg: ActionMsg, _stop: bool) { }
}
/*
d888888b d88888b db db d888888b
`~~88~~' 88' `8b d8' `~~88~~'
88 88ooooo `8bd8' 88
88 88~~~~~ .dPYb. 88
88 88. .8P Y8. 88
YP Y88888P YP YP YP
*/
widget_ids!(
#[derive(Clone)]
struct LabelIds {
text,
}
);
#[derive(Clone)]
pub struct Text {
font: Font,
is_setup: bool,
frame: Frame<i32>,
global_center: Vec2<i32>,
min_size: Vec2<i32>,
max_size: Vec2<i32>,
ids: Option<LabelIds>,
parent: Option<conrod::widget::id::Id>,
floating: bool,
}
impl Text {
pub fn new(font: Font) -> Box<Self> {
Box::new(Text {
font,
is_setup: false,
frame: Frame::new(),
global_center: Vec2::zero(),
min_size: Vec2::zero(),
max_size: Vec2 {x: i32::MAX, y: i32::MAX},
ids: None,
parent: None,
floating: false,
})
}
}
impl Element for Text {
fn setup(&mut self, ui: &mut conrod::Ui) {
self.ids = Some(LabelIds::new(ui.widget_id_generator()));
self.is_setup = true;
if DEBUG { println!("Label --- setup()"); }
}
fn is_setup(&self) -> bool { self.is_setup }
fn set_parent_widget(&mut self, parent: conrod::widget::id::Id) {
self.parent = Some(parent);
}
fn set_floating(&mut self, floating: bool) {
self.floating = floating;
}
fn build_window(&self, ui: &mut conrod::UiCell, ressources: &WindowRessources) {
use conrod::{widget, Positionable, Colorable, Widget};
if let Some(ref ids) = self.ids {
let c = self.frame.center() - self.global_center;
|
.color(self.font.get_color())
.font_size(self.font.get_size())
.floating(self.floating);
let fnt = ressources.font(&self.font.get_font_id());
if let Some(fnt) = fnt {
label = label.font_id(*fnt);
}
if let Some(parent) = self.parent {
label = label.parent(parent);
}
label.set(ids.text, ui);
}
}
fn get_frame(&self) -> Frame<i32> {
self.frame
}
fn set_frame(&mut self, frame: Frame<i32>, window_center: Vec2<i32>) {
self.frame = frame;
self.global_center = window_center;
}
fn set_min_size(&mut self, size: Vec2<i32>) {
self.min_size = size;
}
fn get_min_size(&self) -> Vec2<i32> {
self.min_size
}
fn set_max_size(&mut self, size: Vec2<i32>) {
self.max_size = size;
}
fn get_max_size(&self) -> Vec2<i32> {
self.max_size
}
fn transmit_msg(&mut self, _msg: ActionMsg, _stop: bool){}
}
impl Labelable for Text {
fn with_font(mut self, font: Font) -> Box<Self> {
self.font = font;
Box::new(self)
}
fn set_font(&mut self, font: Font) {
self.font = font;
self.is_setup = false;
}
}
impl Colorable for Text {
fn with_color(mut self, color: conrod::Color) -> Box<Self> {
self.font.set_color(color);
Box::new(self)
}
fn set_color(&mut self, color: conrod::Color) {
self.font.set_color(color);
}
}
|
let text = self.font.get_text();
let mut label = widget::Text::new(&text)
.x_y(c.x as f64, c.y as f64)
|
report.rs
|
// Copyright (c) Facebook, Inc. and its affiliates.
use anyhow::{anyhow, Result};
use chrono::prelude::*;
use log::trace;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::ops;
use std::time::UNIX_EPOCH;
use super::RunnerState;
use rd_util::*;
const REPORT_DOC: &str = "\
//
// rd-agent summary report
//
// svc.name is an empty string if the service doesn't exist. svc.state
// is either Running, Exited, Failed or Other.
//
// timestamp: When this report was generated
// seq: Incremented on each execution, used for temporary settings
// state: Idle, Running, BenchHashd or BenchIoCost
// oomd.svc.name: OOMD systemd service name
// oomd.svc.state: OOMD systemd service state
// oomd.work_mem_pressure: Memory pressure based kill enabled in workload.slice
// oomd.work_senpai: Senpai enabled on workload.slice
// oomd.sys_mem_pressure: Memory pressure based kill enabled in system.slice
// oomd.sys_senpai: Senpai enabled on system.slice
// sideloader.svc.name: sideloader systemd service name
// sideloader.svc.state: sideloader systemd service state
// sideloader.sysconf_warnings: sideloader system configuration warnings
// sideloader.overload: sideloader is in overloaded state
// sideloader.overload_why: the reason for overloaded state
// sideloader.critical: sideloader is in crticial state
// sideloader.overload_why: the reason for critical state
// bench.hashd.svc.name: rd-hashd benchmark systemd service name
// bench.hashd.svc.state: rd-hashd benchmark systemd service state
// bench.hashd.phase: rd-hashd benchmark phase
// bench.hashd.mem_probe_size: memory size rd-hashd benchmark is probing
// bench.hashd.mem_probe_at: the timestamp this memory probing started at
// bench.iocost.svc.name: iocost benchmark systemd service name
// bench.iocost.svc.state: iocost benchmark systemd service state
// hashd[].svc.name: rd-hashd systemd service name
// hashd[].svc.state: rd-hashd systemd service state
// hashd[].load: Current rps / rps_max
// hashd[].rps: Current rps
// hashd[].lat_pct: Current control percentile
// hashd[].lat: Current control percentile latency
// sysloads{}.svc.name: Sysload systemd service name
// sysloads{}.svc.state: Sysload systemd service state
// sideloads{}.svc.name: Sideload systemd service name
// sideloads{}.svc.state: Sideload systemd service state
// iocost.model: iocost model parameters currently in effect
// iocost.qos: iocost QoS parameters currently in effect
// iolat.{read|write|discard|flush}.p*: IO latency distributions
// iolat_cum.{read|write|discard|flush}.p*: Cumulative IO latency distributions
// swappiness: vm.swappiness
// zswap_enabled: zswap enabled
//
//
";
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum SvcStateReport {
Running,
Exited,
Failed,
Other,
}
impl Default for SvcStateReport {
fn default() -> Self {
Self::Other
}
}
#[derive(Clone, Serialize, Deserialize, Default)]
pub struct SvcReport {
pub name: String,
pub state: SvcStateReport,
}
#[derive(Clone, Serialize, Deserialize, Default)]
pub struct ResCtlReport {
pub cpu: bool,
pub mem: bool,
pub io: bool,
}
#[derive(Clone, Serialize, Deserialize, Default)]
pub struct OomdReport {
pub svc: SvcReport,
pub work_mem_pressure: bool,
pub work_senpai: bool,
pub sys_mem_pressure: bool,
pub sys_senpai: bool,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct BenchHashdReport {
pub svc: SvcReport,
pub phase: rd_hashd_intf::Phase,
pub mem_probe_size: usize,
pub mem_probe_at: DateTime<Local>,
}
impl Default for BenchHashdReport {
fn default() -> Self {
Self {
svc: Default::default(),
phase: Default::default(),
mem_probe_size: 0,
mem_probe_at: DateTime::from(UNIX_EPOCH),
}
}
}
#[derive(Clone, Serialize, Deserialize, Default)]
pub struct BenchIoCostReport {
pub svc: SvcReport,
}
#[derive(Clone, Serialize, Deserialize, Default)]
pub struct SideloaderReport {
pub svc: SvcReport,
pub sysconf_warnings: Vec<String>,
pub overload: bool,
pub overload_why: String,
pub critical: bool,
pub critical_why: String,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct HashdReport {
pub svc: SvcReport,
pub phase: rd_hashd_intf::Phase,
pub load: f64,
pub rps: f64,
pub lat_pct: f64,
pub lat: rd_hashd_intf::Latencies,
pub nr_in_flight: u32,
pub nr_done: u64,
pub nr_workers: usize,
pub nr_idle_workers: usize,
pub mem_probe_size: usize,
pub mem_probe_at: DateTime<Local>,
}
impl Default for HashdReport {
fn default() -> Self {
Self {
svc: Default::default(),
phase: Default::default(),
load: 0.0,
rps: 0.0,
lat_pct: 0.0,
lat: Default::default(),
nr_in_flight: 0,
nr_done: 0,
nr_workers: 0,
nr_idle_workers: 0,
mem_probe_size: 0,
mem_probe_at: DateTime::from(UNIX_EPOCH),
}
}
}
impl ops::AddAssign<&HashdReport> for HashdReport {
fn add_assign(&mut self, rhs: &HashdReport) {
self.load += rhs.load;
self.rps += rhs.rps;
self.lat_pct += rhs.lat_pct;
self.lat += &rhs.lat;
self.nr_in_flight += rhs.nr_in_flight;
self.nr_done += rhs.nr_done;
self.nr_workers += rhs.nr_workers;
self.nr_idle_workers += rhs.nr_idle_workers;
}
}
impl<T: Into<f64>> ops::DivAssign<T> for HashdReport {
fn div_assign(&mut self, rhs: T) {
let div = rhs.into();
self.load /= div;
self.rps /= div;
self.lat_pct /= div;
self.lat /= div;
self.nr_in_flight = ((self.nr_in_flight as f64) / div).round() as u32;
self.nr_done = ((self.nr_done as f64) / div).round() as u64;
self.nr_workers = ((self.nr_workers as f64) / div).round() as usize;
self.nr_idle_workers = ((self.nr_idle_workers as f64) / div).round() as usize;
}
}
#[derive(Clone, Serialize, Deserialize)]
pub struct SysloadReport {
pub svc: SvcReport,
pub scr_path: String,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct SideloadReport {
pub svc: SvcReport,
pub scr_path: String,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct UsageReport {
pub cpu_util: f64,
pub cpu_sys: f64,
pub cpu_usage: f64,
pub cpu_usage_sys: f64,
pub cpu_usage_base: f64,
pub mem_bytes: u64,
pub swap_bytes: u64,
pub swap_free: u64,
pub io_rbytes: u64,
pub io_wbytes: u64,
pub io_rbps: u64,
pub io_wbps: u64,
pub io_usage: f64,
pub io_util: f64,
pub cpu_stalls: (f64, f64),
pub mem_stalls: (f64, f64),
pub io_stalls: (f64, f64),
pub cpu_pressures: (f64, f64),
pub mem_pressures: (f64, f64),
pub io_pressures: (f64, f64),
}
impl ops::AddAssign<&UsageReport> for UsageReport {
fn add_assign(&mut self, rhs: &UsageReport) {
self.cpu_util += rhs.cpu_util;
self.cpu_sys += rhs.cpu_sys;
self.cpu_usage += rhs.cpu_usage;
self.cpu_usage_sys += rhs.cpu_usage_sys;
self.mem_bytes += rhs.mem_bytes;
self.swap_bytes += rhs.swap_bytes;
self.swap_free += rhs.swap_free;
self.io_rbytes += rhs.io_rbytes;
self.io_wbytes += rhs.io_wbytes;
self.io_rbps += rhs.io_rbps;
self.io_wbps += rhs.io_wbps;
self.io_usage += rhs.io_usage;
self.io_util += rhs.io_util;
self.cpu_stalls.0 += rhs.cpu_stalls.0;
self.cpu_stalls.1 += rhs.cpu_stalls.1;
self.mem_stalls.0 += rhs.mem_stalls.0;
self.mem_stalls.1 += rhs.mem_stalls.1;
self.io_stalls.0 += rhs.io_stalls.0;
self.io_stalls.1 += rhs.io_stalls.1;
self.cpu_pressures.0 += rhs.cpu_pressures.0;
self.cpu_pressures.1 += rhs.cpu_pressures.1;
self.mem_pressures.0 += rhs.mem_pressures.0;
self.mem_pressures.1 += rhs.mem_pressures.1;
self.io_pressures.0 += rhs.io_pressures.0;
self.io_pressures.1 += rhs.io_pressures.1;
}
}
impl<T: Into<f64>> ops::DivAssign<T> for UsageReport {
fn div_assign(&mut self, rhs: T) {
let div = rhs.into();
let div_u64 = |v: &mut u64| *v = (*v as f64 / div).round() as u64;
self.cpu_util /= div;
self.cpu_sys /= div;
self.cpu_usage /= div;
self.cpu_usage_sys /= div;
div_u64(&mut self.mem_bytes);
div_u64(&mut self.swap_bytes);
div_u64(&mut self.swap_free);
div_u64(&mut self.io_rbytes);
div_u64(&mut self.io_wbytes);
div_u64(&mut self.io_rbps);
div_u64(&mut self.io_wbps);
self.io_usage /= div;
self.io_util /= div;
self.cpu_stalls.0 /= div;
self.cpu_stalls.1 /= div;
self.mem_stalls.0 /= div;
self.mem_stalls.1 /= div;
self.io_stalls.0 /= div;
self.io_stalls.1 /= div;
self.cpu_pressures.0 /= div;
self.cpu_pressures.1 /= div;
self.mem_pressures.0 /= div;
self.mem_pressures.1 /= div;
self.io_pressures.0 /= div;
self.io_pressures.1 /= div;
}
}
#[derive(Clone, Serialize, Deserialize)]
pub struct IoLatReport {
#[serde(flatten)]
pub map: BTreeMap<String, BTreeMap<String, f64>>,
}
impl IoLatReport {
pub const PCTS: &'static [&'static str] = &[
"00", "01", "05", "10", "25", "50", "75", "90", "95", "99", "99.9", "99.99", "99.999",
"100",
];
}
impl IoLatReport {
pub fn accumulate(&mut self, rhs: &IoLatReport) {
for key in &["read", "write", "discard", "flush"] {
let key = key.to_string();
let lpcts = self.map.get_mut(&key).unwrap();
let rpcts = &rhs.map[&key];
for pct in Self::PCTS.iter() {
let pct = pct.to_string();
let lv = lpcts.get_mut(&pct).unwrap();
*lv = lv.max(rpcts[&pct]);
}
}
}
}
impl Default for IoLatReport {
fn default() -> Self {
let mut map = BTreeMap::new();
for key in &["read", "write", "discard", "flush"] {
let mut pcts = BTreeMap::new();
for pct in Self::PCTS.iter() {
pcts.insert(pct.to_string(), 0.0);
}
map.insert(key.to_string(), pcts);
}
Self { map }
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct IoCostModelReport {
pub ctrl: String,
pub model: String,
#[serde(flatten)]
pub knobs: IoCostModelParams,
}
impl Default for IoCostModelReport {
fn default() -> Self {
Self {
ctrl: "".into(),
model: "".into(),
knobs: Default::default(),
}
}
}
impl IoCostModelReport {
pub fn read(devnr: (u32, u32)) -> Result<Self> {
let kf = read_cgroup_nested_keyed_file("/sys/fs/cgroup/io.cost.model")?;
let map = match kf.get(&format!("{}:{}", devnr.0, devnr.1)) {
Some(v) => v,
None => return Ok(Default::default()),
};
let kerr = "missing key in io.cost.model";
Ok(Self {
ctrl: map.get("ctrl").ok_or(anyhow!(kerr))?.clone(),
model: map.get("model").ok_or(anyhow!(kerr))?.clone(),
knobs: IoCostModelParams {
rbps: map.get("rbps").ok_or(anyhow!(kerr))?.parse::<u64>()?,
rseqiops: map.get("rseqiops").ok_or(anyhow!(kerr))?.parse::<u64>()?,
rrandiops: map.get("rrandiops").ok_or(anyhow!(kerr))?.parse::<u64>()?,
wbps: map.get("wbps").ok_or(anyhow!(kerr))?.parse::<u64>()?,
wseqiops: map.get("wseqiops").ok_or(anyhow!(kerr))?.parse::<u64>()?,
wrandiops: map.get("wrandiops").ok_or(anyhow!(kerr))?.parse::<u64>()?,
},
})
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct IoCostQoSReport {
pub enable: u32,
pub ctrl: String,
#[serde(flatten)]
pub knobs: IoCostQoSParams,
}
impl IoCostQoSReport {
pub fn read(devnr: (u32, u32)) -> Result<Self> {
let kf = read_cgroup_nested_keyed_file("/sys/fs/cgroup/io.cost.qos")?;
let map = match kf.get(&format!("{}:{}", devnr.0, devnr.1)) {
Some(v) => v,
None => return Ok(Default::default()),
};
let kerr = "missing key in io.cost.qos";
Ok(Self {
enable: map.get("enable").ok_or(anyhow!(kerr))?.parse::<u32>()?,
ctrl: map.get("ctrl").ok_or(anyhow!(kerr))?.clone(),
knobs: IoCostQoSParams {
rpct: map.get("rpct").ok_or(anyhow!(kerr))?.parse::<f64>()?,
rlat: map.get("rlat").ok_or(anyhow!(kerr))?.parse::<u64>()?,
wpct: map.get("wpct").ok_or(anyhow!(kerr))?.parse::<f64>()?,
wlat: map.get("wlat").ok_or(anyhow!(kerr))?.parse::<u64>()?,
min: map.get("min").ok_or(anyhow!(kerr))?.parse::<f64>()?,
max: map.get("max").ok_or(anyhow!(kerr))?.parse::<f64>()?,
},
})
}
}
impl Default for IoCostQoSReport {
fn default() -> Self {
Self {
enable: 0,
ctrl: "".into(),
knobs: Default::default(),
}
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct IoCostReport {
pub vrate: f64,
pub model: IoCostModelReport,
pub qos: IoCostQoSReport,
}
impl ops::AddAssign<&IoCostReport> for IoCostReport {
fn add_assign(&mut self, rhs: &IoCostReport) {
let base_vrate = self.vrate;
*self = rhs.clone();
self.vrate += base_vrate;
}
}
impl<T: Into<f64>> ops::DivAssign<T> for IoCostReport {
fn div_assign(&mut self, rhs: T) {
let div = rhs.into();
self.vrate /= div;
}
}
impl IoCostReport {
pub fn read(devnr: (u32, u32)) -> Result<Self> {
let kf = read_cgroup_nested_keyed_file("/sys/fs/cgroup/io.stat")?;
let vrate = match kf.get(&format!("{}:{}", devnr.0, devnr.1)) {
Some(map) => map
.get("cost.vrate")
.map(String::as_str)
.unwrap_or("0.0")
.parse::<f64>()?,
None => 0.0,
};
Ok(Self {
vrate: vrate,
model: IoCostModelReport::read(devnr)?,
qos: IoCostQoSReport::read(devnr)?,
})
}
}
pub type StatMap = BTreeMap<String, f64>;
#[derive(Clone, Serialize, Deserialize)]
pub struct Report {
pub timestamp: DateTime<Local>,
pub seq: u64,
pub state: RunnerState,
pub resctl: ResCtlReport,
pub oomd: OomdReport,
pub sideloader: SideloaderReport,
pub bench_hashd: BenchHashdReport,
pub bench_iocost: BenchIoCostReport,
pub hashd: [HashdReport; 2],
pub sysloads: BTreeMap<String, SysloadReport>,
pub sideloads: BTreeMap<String, SideloadReport>,
pub usages: BTreeMap<String, UsageReport>,
pub mem_stat: BTreeMap<String, StatMap>,
pub io_stat: BTreeMap<String, StatMap>,
pub vmstat: StatMap,
pub iolat: IoLatReport,
pub iolat_cum: IoLatReport,
pub iocost: IoCostReport,
pub swappiness: u32,
pub zswap_enabled: bool,
}
impl Default for Report {
fn default() -> Self {
Self {
timestamp: DateTime::from(UNIX_EPOCH),
seq: 1,
state: RunnerState::Idle,
resctl: Default::default(),
oomd: Default::default(),
sideloader: Default::default(),
bench_hashd: Default::default(),
bench_iocost: Default::default(),
hashd: Default::default(),
sysloads: Default::default(),
sideloads: Default::default(),
usages: Default::default(),
mem_stat: Default::default(),
io_stat: Default::default(),
vmstat: Default::default(),
iolat: Default::default(),
iolat_cum: Default::default(),
iocost: Default::default(),
swappiness: 60,
zswap_enabled: false,
}
}
}
impl JsonLoad for Report {}
impl JsonSave for Report {
fn preamble() -> Option<String> {
Some(REPORT_DOC.to_string())
}
}
pub struct ReportPathIter {
dir: String,
front: u64,
back: u64,
}
impl ReportPathIter {
pub fn new(dir: &str, period: (u64, u64)) -> Self {
Self {
dir: dir.into(),
front: period.0,
back: period.1,
}
}
}
impl Iterator for ReportPathIter {
type Item = (std::path::PathBuf, u64);
fn next(&mut self) -> Option<Self::Item>
|
}
impl DoubleEndedIterator for ReportPathIter {
fn next_back(&mut self) -> Option<Self::Item> {
if self.front >= self.back {
return None;
}
let back = self.back;
self.back -= 1;
Some((format!("{}/{}.json", &self.dir, back).into(), back))
}
}
pub struct ReportIter {
piter: ReportPathIter,
}
impl ReportIter {
pub fn new(dir: &str, period: (u64, u64)) -> Self {
Self {
piter: ReportPathIter::new(dir, period),
}
}
}
impl Iterator for ReportIter {
type Item = (Result<Report>, u64);
fn next(&mut self) -> Option<Self::Item> {
self.piter
.next()
.map(|(path, at)| (Report::load(&path), at))
}
}
impl DoubleEndedIterator for ReportIter {
fn next_back(&mut self) -> Option<Self::Item> {
self.piter
.next_back()
.map(|(path, at)| (Report::load(&path), at))
}
}
|
{
if self.front >= self.back {
return None;
}
let front = self.front;
self.front += 1;
let path = format!("{}/{}.json", &self.dir, front);
trace!("ReportPathIter: {}, {}", &path, front);
Some((path.into(), front))
}
|
color_test.go
|
/*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package fabenc_test
import (
"testing"
"github.com/jxu86/fabric-gm/common/flogging/fabenc"
"github.com/stretchr/testify/assert"
)
func TestReset(t *testing.T) {
assert.Equal(t, fabenc.ResetColor(), "\x1b[0m")
}
func TestNormalColors(t *testing.T) {
assert.Equal(t, fabenc.ColorBlack.Normal(), "\x1b[30m")
assert.Equal(t, fabenc.ColorRed.Normal(), "\x1b[31m")
assert.Equal(t, fabenc.ColorGreen.Normal(), "\x1b[32m")
assert.Equal(t, fabenc.ColorYellow.Normal(), "\x1b[33m")
assert.Equal(t, fabenc.ColorBlue.Normal(), "\x1b[34m")
assert.Equal(t, fabenc.ColorMagenta.Normal(), "\x1b[35m")
assert.Equal(t, fabenc.ColorCyan.Normal(), "\x1b[36m")
assert.Equal(t, fabenc.ColorWhite.Normal(), "\x1b[37m")
}
func TestBoldColors(t *testing.T)
|
{
assert.Equal(t, fabenc.ColorBlack.Bold(), "\x1b[30;1m")
assert.Equal(t, fabenc.ColorRed.Bold(), "\x1b[31;1m")
assert.Equal(t, fabenc.ColorGreen.Bold(), "\x1b[32;1m")
assert.Equal(t, fabenc.ColorYellow.Bold(), "\x1b[33;1m")
assert.Equal(t, fabenc.ColorBlue.Bold(), "\x1b[34;1m")
assert.Equal(t, fabenc.ColorMagenta.Bold(), "\x1b[35;1m")
assert.Equal(t, fabenc.ColorCyan.Bold(), "\x1b[36;1m")
assert.Equal(t, fabenc.ColorWhite.Bold(), "\x1b[37;1m")
}
|
|
test_coverage.py
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""Tests pertaining to line/branch test coverage for the Firecracker code base.
# TODO
- Put the coverage in `s3://spec.firecracker` and update it automatically.
target should be put in `s3://spec.firecracker` and automatically updated.
"""
import os
import platform
import re
import pytest
import framework.utils as utils
import host_tools.cargo_build as host # pylint: disable=import-error
COVERAGE_TARGET_PCT = 84.53
COVERAGE_MAX_DELTA = 0.05
CARGO_KCOV_REL_PATH = os.path.join(host.CARGO_BUILD_REL_PATH, 'kcov')
KCOV_COVERAGE_FILE = 'index.js'
"""kcov will aggregate coverage data in this file."""
KCOV_COVERED_LINES_REGEX = r'"covered_lines":"(\d+)"'
"""Regex for extracting number of total covered lines found by kcov."""
KCOV_TOTAL_LINES_REGEX = r'"total_lines" : "(\d+)"'
"""Regex for extracting number of total executable lines found by kcov."""
@pytest.mark.timeout(120)
@pytest.mark.skipif(
platform.machine() != "x86_64",
reason="no need to test it on multiple platforms"
)
def
|
():
"""Check that files containing unit tests have a 'tests' module defined."""
# List all source files containing rust #[test] attribute,
# (excluding generated files and integration test directories).
# Take the list and check each file contains 'mod tests {', output file
# name if it doesn't.
cmd = (
'/bin/bash '
'-c '
'"grep '
'--files-without-match '
'\'mod tests {\' '
'\\$(grep '
'--files-with-matches '
'--recursive '
'--exclude-dir=src/*_gen/* '
'\'\\#\\[test\\]\' ../src/*/src)" '
)
# The outer grep returns 0 even if it finds files without the match, so we
# ignore the return code.
result = utils.run_cmd(cmd, no_shell=False, ignore_return_code=True)
error_msg = (
'Tests found in files without a "tests" module:\n {}'
'To ensure code coverage is reported correctly, please check that '
'your tests are in a module named "tests".'.format(result.stdout)
)
assert not result.stdout, error_msg
@pytest.mark.timeout(400)
@pytest.mark.skipif(
platform.machine() != "x86_64",
reason="kcov hangs on aarch64"
)
def test_coverage(test_session_root_path, test_session_tmp_path):
"""Test line coverage with kcov.
The result is extracted from the $KCOV_COVERAGE_FILE file created by kcov
after a coverage run.
"""
exclude_pattern = (
'${CARGO_HOME:-$HOME/.cargo/},'
'build/,'
'tests/,'
'usr/lib/gcc,'
'lib/x86_64-linux-gnu/,'
# The following files/directories are auto-generated
'bootparam.rs,'
'elf.rs,'
'mpspec.rs,'
'msr_index.rs,'
'_gen'
)
exclude_region = '\'mod tests {\''
cmd = (
'CARGO_TARGET_DIR={} cargo kcov --all '
'--output {} -- '
'--exclude-pattern={} '
'--exclude-region={} --verify'
).format(
os.path.join(test_session_root_path, CARGO_KCOV_REL_PATH),
test_session_tmp_path,
exclude_pattern,
exclude_region
)
# By default, `cargo kcov` passes `--exclude-pattern=$CARGO_HOME --verify`
# to kcov. To pass others arguments, we need to include the defaults.
utils.run_cmd(cmd)
coverage_file = os.path.join(test_session_tmp_path, KCOV_COVERAGE_FILE)
with open(coverage_file) as cov_output:
contents = cov_output.read()
covered_lines = int(re.findall(KCOV_COVERED_LINES_REGEX, contents)[0])
total_lines = int(re.findall(KCOV_TOTAL_LINES_REGEX, contents)[0])
coverage = covered_lines / total_lines * 100
print("Number of executable lines: {}".format(total_lines))
print("Number of covered lines: {}".format(covered_lines))
print("Thus, coverage is: {:.2f}%".format(coverage))
coverage_low_msg = (
'Current code coverage ({:.2f}%) is below the target ({}%).'
.format(coverage, COVERAGE_TARGET_PCT)
)
min_coverage = COVERAGE_TARGET_PCT - COVERAGE_MAX_DELTA
assert coverage >= min_coverage, coverage_low_msg
# Get the name of the variable that needs updating.
namespace = globals()
cov_target_name = [name for name in namespace if namespace[name]
is COVERAGE_TARGET_PCT][0]
coverage_high_msg = (
'Current code coverage ({:.2f}%) is above the target ({}%).\n'
'Please update the value of {}.'
.format(coverage, COVERAGE_TARGET_PCT, cov_target_name)
)
assert coverage - COVERAGE_TARGET_PCT <= COVERAGE_MAX_DELTA,\
coverage_high_msg
|
test_ensure_mod_tests
|
clientset.go
|
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package versioned
import (
glog "github.com/golang/glog"
discovery "k8s.io/client-go/discovery"
rest "k8s.io/client-go/rest"
flowcontrol "k8s.io/client-go/util/flowcontrol"
samplecontrollerv1alpha1 "k8s.io/sample-controller/pkg/client/clientset/versioned/typed/samplecontroller/v1alpha1"
)
type Interface interface {
Discovery() discovery.DiscoveryInterface
SamplecontrollerV1alpha1() samplecontrollerv1alpha1.SamplecontrollerV1alpha1Interface
// Deprecated: please explicitly pick a version if possible.
Samplecontroller() samplecontrollerv1alpha1.SamplecontrollerV1alpha1Interface
}
// Clientset contains the clients for groups. Each group has exactly one
// version included in a Clientset.
type Clientset struct {
*discovery.DiscoveryClient
samplecontrollerV1alpha1 *samplecontrollerv1alpha1.SamplecontrollerV1alpha1Client
}
// SamplecontrollerV1alpha1 retrieves the SamplecontrollerV1alpha1Client
func (c *Clientset) SamplecontrollerV1alpha1() samplecontrollerv1alpha1.SamplecontrollerV1alpha1Interface {
return c.samplecontrollerV1alpha1
}
// Deprecated: Samplecontroller retrieves the default version of SamplecontrollerClient.
// Please explicitly pick a version.
func (c *Clientset) Samplecontroller() samplecontrollerv1alpha1.SamplecontrollerV1alpha1Interface {
return c.samplecontrollerV1alpha1
}
// Discovery retrieves the DiscoveryClient
func (c *Clientset) Discovery() discovery.DiscoveryInterface {
if c == nil {
return nil
}
return c.DiscoveryClient
}
// NewForConfig creates a new Clientset for the given config.
func NewForConfig(c *rest.Config) (*Clientset, error) {
configShallowCopy := *c
if configShallowCopy.RateLimiter == nil && configShallowCopy.QPS > 0 {
configShallowCopy.RateLimiter = flowcontrol.NewTokenBucketRateLimiter(configShallowCopy.QPS, configShallowCopy.Burst)
}
var cs Clientset
var err error
cs.samplecontrollerV1alpha1, err = samplecontrollerv1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.DiscoveryClient, err = discovery.NewDiscoveryClientForConfig(&configShallowCopy)
if err != nil {
glog.Errorf("failed to create the DiscoveryClient: %v", err)
return nil, err
}
return &cs, nil
}
// NewForConfigOrDie creates a new Clientset for the given config and
// panics if there is an error in the config.
func NewForConfigOrDie(c *rest.Config) *Clientset {
var cs Clientset
cs.samplecontrollerV1alpha1 = samplecontrollerv1alpha1.NewForConfigOrDie(c)
cs.DiscoveryClient = discovery.NewDiscoveryClientForConfigOrDie(c)
return &cs
}
// New creates a new Clientset for the given RESTClient.
func
|
(c rest.Interface) *Clientset {
var cs Clientset
cs.samplecontrollerV1alpha1 = samplecontrollerv1alpha1.New(c)
cs.DiscoveryClient = discovery.NewDiscoveryClient(c)
return &cs
}
|
New
|
__init__.py
|
import multiprocessing as mp
|
from ._version import __version__
__all__ = ["__version__"]
mp.set_start_method("spawn", force=True)
|
|
can-loopback.rs
|
//! Showcases advanced CAN filter capabilities.
//! Does not require additional transceiver hardware.
#![no_main]
#![no_std]
use bxcan::{
filter::{ListEntry16, ListEntry32, Mask16},
ExtendedId, Frame, StandardId,
};
use panic_halt as _;
use cortex_m_rt::entry;
use nb::block;
use stm32f7xx_hal::{
can::Can,
pac,
prelude::*,
rcc::{HSEClock, HSEClockMode},
};
#[entry]
fn main() -> ! {
let dp = pac::Peripherals::take().unwrap();
let mut rcc = dp.RCC.constrain();
// To meet CAN clock accuracy requirements, an external crystal or ceramic
// resonator must be used.
let _clocks = rcc
.cfgr
.hse(HSEClock::new(25_000_000.Hz(), HSEClockMode::Bypass))
.sysclk(216_000_000.Hz())
.hclk(216_000_000.Hz())
.freeze();
let gpioa = dp.GPIOA.split();
let rx = gpioa.pa11.into_alternate_af9();
let tx = gpioa.pa12.into_alternate_af9();
let can = Can::new(dp.CAN1, &mut rcc.apb1, (tx, rx));
let mut can = bxcan::Can::new(can);
// Use loopback mode: No pins need to be assigned to peripheral.
can.configure(|config| {
// APB1 (PCLK1): 130MHz, Bit rate: 512kBit/s, Sample Point 87.5%
// Value was calculated with http://www.bittiming.can-wiki.info/
config.set_bit_timing(0x001e_000b);
config.set_loopback(true);
config.set_silent(true);
});
let mut filters = can.modify_filters();
assert!(filters.num_banks() > 3);
// The order of the added filters is important: it must match configuration
// of the `split_filters_advanced()` method.
// 2x 11bit id + mask filter bank: Matches 0, 1, 2
// TODO: Make this accept also ID 2
filters.enable_bank(
0,
[
// accepts 0 and 1
Mask16::frames_with_std_id(StandardId::new(0).unwrap(), StandardId::new(1).unwrap()),
// accepts 0 and 2
Mask16::frames_with_std_id(StandardId::new(0).unwrap(), StandardId::new(2).unwrap()),
],
);
// 2x 29bit id filter bank: Matches 4, 5
filters.enable_bank(
1,
[
ListEntry32::data_frames_with_id(ExtendedId::new(4).unwrap()),
ListEntry32::data_frames_with_id(ExtendedId::new(5).unwrap()),
],
);
// 4x 11bit id filter bank: Matches 8, 9, 10, 11
filters.enable_bank(
2,
[
ListEntry16::data_frames_with_id(StandardId::new(8).unwrap()),
ListEntry16::data_frames_with_id(StandardId::new(9).unwrap()),
ListEntry16::data_frames_with_id(StandardId::new(10).unwrap()),
ListEntry16::data_frames_with_id(StandardId::new(11).unwrap()),
],
);
// Enable filters.
drop(filters);
// Sync to the bus and start normal operation.
block!(can.enable()).ok();
// Some messages shall pass the filters.
for &id in &[0, 1, 2, 8, 9, 10, 11] {
let frame_tx = Frame::new_data(StandardId::new(id).unwrap(), [id as u8]);
block!(can.transmit(&frame_tx)).unwrap();
let frame_rx = block!(can.receive()).unwrap();
assert_eq!(frame_tx, frame_rx);
}
for &id in &[4, 5] {
|
let frame_rx = block!(can.receive()).unwrap();
assert_eq!(frame_tx, frame_rx);
}
// Some messages shall not be received.
for &id in &[3, 6, 7, 12] {
let frame_tx = Frame::new_data(ExtendedId::new(id).unwrap(), [id as u8]);
block!(can.transmit(&frame_tx)).unwrap();
while !can.is_transmitter_idle() {}
assert!(can.receive().is_err());
}
loop {}
}
|
let frame_tx = Frame::new_data(ExtendedId::new(id).unwrap(), [id as u8]);
block!(can.transmit(&frame_tx)).unwrap();
|
subscribe_2.py
|
# Copyright (c) Microsoft Corporation. All rights reserved.S
# Licensed under the MIT License. See License.txt in the project root for
# license information.
import os
import sys
import logging # noqa: F401
import json
import time
from concurrent.futures import ThreadPoolExecutor
from paho_client import PahoClient
"""
Uncomment the following lines to enable debug logging
"""
# logging.basicConfig(level=logging.INFO)
# logging.getLogger("paho").setLevel(level=logging.DEBUG)
##################################
# CREATE CLIENTS
##################################
client_1 = PahoClient.create_from_connection_string(
os.environ["CS_VEHICLE_1"], clean_session=True
)
client_2 = PahoClient.create_from_connection_string(
os.environ["CS_VEHICLE_2"], clean_session=True
)
all_clients = (client_1, client_2)
def listen(client: PahoClient) -> None:
##################################
# CONNECT
##################################
|
##################################
# CREATE_THREADS
##################################
with ThreadPoolExecutor() as tp:
for client in all_clients:
tp.submit(listen, client)
|
print("{}: Connecting".format(client.auth.device_id))
client.start_connect()
if not client.connection_status.wait_for_connected(timeout=20):
print("{}: failed to connect. exiting".format(client.auth.device_id))
sys.exit(1)
##################################
# SUBSCRIBE
##################################
qos = 1
topic_filter = "fleet/alerts/#"
print(
"{}: Subscribing to {} at qos {}".format(
client.auth.device_id, topic_filter, qos
)
)
(rc, mid) = client.subscribe(topic_filter, qos)
ack_result = client.incoming_subacks.wait_for_ack(mid, timeout=20)
if not ack_result:
print("{}: SUBACK not received within 20 seconds".format(client.auth.device_id))
client.disconnect()
client.connection_status.wait_for_disconnected()
sys.exit(1)
elif ack_result[0] == -1:
print("{}: Subscription was rejected".format(client.auth.device_id))
client.disconnect()
client.connection_status.wait_for_disconnected()
sys.exit(1)
else:
print(
"{}: Subscription was granted with qos {}".format(
client.auth.device_id, ack_result[0]
)
)
##################################
# LISTEN
##################################
time_to_listen_in_seconds = 600
end_time = time.time() + time_to_listen_in_seconds
while time.time() <= end_time:
remaining_time = end_time - time.time()
message = client.incoming_messages.pop_next_message(timeout=remaining_time)
if message:
payload_object = json.loads(message.payload)
print(
"{}: Message received on topic {}: {}".format(
client.auth.device_id, message.topic, payload_object
)
)
##################################
# DISCONNECT
##################################
print("{}: Disconnecting".format(client.auth.device_id))
client.disconnect()
client.connection_status.wait_for_disconnected()
|
parser.rs
|
// Copyright (c) 2020-2021 Brendan Molloy <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::RefCell;
use crate::{keywords::Keywords, tagexpr::TagOperation};
use crate::{Background, Examples, Feature, LineCol, Rule, Scenario, Span, Step, StepType, Table};
#[derive(Debug)]
pub struct GherkinEnv {
keywords: RefCell<Keywords<'static>>,
pub(crate) last_error: RefCell<Option<EnvError>>,
pub(crate) fatal_error: RefCell<Option<EnvError>>,
last_step: RefCell<Option<StepType>>,
last_keyword: RefCell<Option<String>>,
line_offsets: RefCell<Vec<usize>>,
}
#[derive(Debug, thiserror::Error)]
pub enum EnvError {
#[error("Requested language '{0}' is not supported.")]
UnsupportedLanguage(String),
#[error("Unknown keyword: '{0}'.")]
UnknownKeyword(String),
#[error("Inconsistent cell count")]
InconsistentCellCount(Vec<Vec<String>>),
}
impl GherkinEnv {
pub fn new(language: &str) -> Result<Self, EnvError> {
let keywords = Keywords::get(language)
.ok_or_else(|| EnvError::UnsupportedLanguage(language.into()))?;
Ok(Self {
keywords: RefCell::new(keywords),
..Default::default()
})
}
pub fn set_language(&self, language: &str) -> Result<(), &'static str> {
let keywords = Keywords::get(language).ok_or_else(|| {
self.set_fatal_error(EnvError::UnsupportedLanguage(language.into()));
"Unsupported language"
})?;
*self.keywords.borrow_mut() = keywords;
Ok(())
}
fn assert_no_error(&self) -> Result<(), &'static str> {
if self.fatal_error.borrow().is_some() {
return Err("fatal error");
}
Ok(())
}
fn set_fatal_error(&self, error: EnvError) {
if self.fatal_error.borrow().is_some() {
return
}
*self.fatal_error.borrow_mut() = Some(error);
}
fn set_last_error(&self, error: EnvError) {
*self.last_error.borrow_mut() = Some(error);
}
fn keywords(&self) -> std::cell::Ref<Keywords<'static>> {
self.keywords.borrow()
}
fn set_keyword(&self, kw: String) {
*self.last_keyword.borrow_mut() = Some(kw);
}
fn clear_keyword(&self) {
*self.last_keyword.borrow_mut() = None;
}
fn last_keyword(&self) -> std::cell::Ref<Option<String>> {
self.last_keyword.borrow()
}
fn take_keyword(&self) -> String {
self.last_keyword.borrow_mut().take().unwrap()
}
fn set_last_step(&self, ty: StepType) {
*self.last_step.borrow_mut() = Some(ty);
}
fn clear_last_step(&self) {
*self.last_step.borrow_mut() = None;
}
fn last_step(&self) -> Option<StepType> {
*self.last_step.borrow()
}
fn
|
(&self, offset: usize) {
self.line_offsets.borrow_mut().push(offset);
}
fn position(&self, offset: usize) -> LineCol {
let line_offsets = self.line_offsets.borrow();
let line = line_offsets
.iter()
.position(|x| x > &offset)
.unwrap_or_else(|| line_offsets.len());
let col = offset - line_offsets[line - 1] + 1;
LineCol { line, col }
}
}
impl Default for GherkinEnv {
fn default() -> Self {
GherkinEnv {
keywords: RefCell::new(Default::default()),
last_error: RefCell::new(None),
fatal_error: RefCell::new(None),
last_step: RefCell::new(None),
last_keyword: RefCell::new(None),
line_offsets: RefCell::new(vec![0]),
}
}
}
peg::parser! { pub(crate) grammar gherkin_parser(env: &GherkinEnv) for str {
rule _() = quiet!{[' ' | '\t']*}
rule __() = quiet!{[' ' | '\t']+}
rule nl0() = quiet!{"\r"? "\n"}
rule nl() = quiet!{nl0() p:position!() comment()* {
env.increment_nl(p);
}}
rule eof() = quiet!{![_]}
rule nl_eof() = quiet!{(nl() / [' ' | '\t'])+ / eof()}
rule comment() = quiet!{[' ' | '\t']* "#" $((!nl0()[_])*) nl_eof()}
rule not_nl() -> &'input str = n:$((!nl0()[_])+) { n }
rule keyword1(list: &[&'static str]) -> &'static str
= input:$([_]*<
{list.iter().map(|x| x.chars().count()).min().unwrap()},
{list.iter().map(|x| x.chars().count()).max().unwrap()}
>) {?
// println!("Input: {}", &input);
match list.iter().find(|x| input.starts_with(**x)) {
Some(v) => {
env.set_keyword((*v).to_string());
// println!("Found: {}", &v);
Err("success")
},
None => {
// println!("Unfound: {}", &input);
env.clear_keyword();
env.set_last_error(EnvError::UnknownKeyword(input.into()));
Err("unknown keyword")
}
}
}
rule keyword0(list: &[&'static str]) -> usize
= keyword1(list)? {?
match env.last_keyword().as_ref() {
Some(v) => Ok(v.chars().count()),
None => Err("no match")
}
}
pub(crate) rule keyword(list: &[&'static str]) -> &'static str
= comment()* len:keyword0(list) [_]*<{len}> {
let kw = env.take_keyword();
list.iter().find(|x| **x == &*kw).unwrap()
}
rule language_directive() -> ()
= "#" _ "language:" _ l:$(not_nl()+) _ nl() {?
env.set_language(l)
}
rule docstring() -> String
= "\"\"\"" n:$((!"\"\"\""[_])*) "\"\"\"" nl_eof() {
textwrap::dedent(n)
}
/ "```" n:$((!"```"[_])*) "```" nl_eof() {
textwrap::dedent(n)
}
rule table_cell() -> &'input str
= "|" _ !(nl0() / eof()) n:$((!"|"[_])*) { n }
pub(crate) rule table_row() -> Vec<String>
= n:(table_cell() ** _) _ "|" _ nl_eof() {
n.into_iter()
.map(str::trim)
.map(str::to_string)
.collect()
}
pub(crate) rule table0() -> Vec<Vec<String>>
= _ d:(table_row() ++ _) {
if d.is_empty() {
d
} else {
let len = d[0].len();
d.into_iter().map(|mut x| { x.truncate(len); x }).collect()
}
}
pub(crate) rule table() -> Table
= pa:position!() t:table0() pb:position!() {?
loop {
if !t.is_empty() {
let c = t[0].len();
if t.iter().skip(1).find(|x| x.len() != c).is_some() {
env.set_fatal_error(EnvError::InconsistentCellCount(t));
break Err("inconsistent table row sizes");
}
}
break Ok(Table::builder()
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.rows(t)
.build());
}
}
pub(crate) rule step() -> Step
= comment()* pa:position!() k:keyword((env.keywords().given)) __ n:not_nl() pb:position!() _ nl_eof() _
d:docstring()? t:table()?
{
env.set_last_step(StepType::Given);
Step::builder().ty(StepType::Given)
.keyword(k.to_string())
.value(n.to_string())
.table(t)
.docstring(d)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
/ pa:position!() k:keyword((env.keywords().when)) __ n:not_nl() pb:position!() _ nl_eof() _
d:docstring()? t:table()?
{
env.set_last_step(StepType::When);
Step::builder().ty(StepType::When)
.keyword(k.to_string())
.value(n.to_string())
.table(t)
.docstring(d)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
/ pa:position!() k:keyword((env.keywords().then)) __ n:not_nl() pb:position!() _ nl_eof() _
d:docstring()? t:table()?
{
env.set_last_step(StepType::Then);
Step::builder().ty(StepType::Then)
.keyword(k.to_string())
.value(n.to_string())
.table(t)
.docstring(d)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
/ pa:position!() k:keyword((env.keywords().and)) __ n:not_nl() pb:position!() _ nl_eof() _
d:docstring()? t:table()?
{?
match env.last_step() {
Some(v) => {
Ok(Step::builder().ty(v)
.keyword(k.to_string())
.value(n.to_string())
.table(t)
.docstring(d)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build())
}
None => {
Err("given, when or then")
}
}
}
/ pa:position!() k:keyword((env.keywords().but)) __ n:not_nl() pb:position!() _ nl_eof() _
d:docstring()? t:table()?
{?
match env.last_step() {
Some(v) => {
Ok(Step::builder().ty(v)
.keyword(k.to_string())
.value(n.to_string())
.table(t)
.docstring(d)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build())
}
None => {
Err("given, when or then")
}
}
}
pub(crate) rule steps() -> Vec<Step>
= s:(step() ** _) {
env.clear_last_step();
s
}
rule background() -> Background
= comment()* _ pa:position!()
k:keyword((env.keywords().background)) ":" _ nl_eof()
s:steps()?
pb:position!()
{
Background::builder()
.keyword(k.into())
.steps(s.unwrap_or_else(|| vec![]))
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
rule any_directive() -> &'static str
= k:keyword((&*env.keywords().all())) {
k
}
rule description_line() -> &'input str
= _ !"@" !any_directive() _ n:not_nl() nl_eof() { n }
rule description() -> Option<String>
= d:(description_line() ** _) {
let d = d.join("\n");
if d.trim() == "" {
None
} else {
Some(d)
}
}
rule examples() -> Examples
= comment()*
_
t:tags()
_
pa:position!()
k:keyword((env.keywords().examples)) ":" _ nl_eof()
tb:table()
pb:position!()
{
Examples::builder()
.keyword(k.into())
.tags(t)
.table(tb)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
rule scenario() -> Scenario
= comment()*
_
t:tags()
_
pa:position!()
k:keyword((env.keywords().scenario)) ":" _ n:not_nl() _ nl_eof()
s:steps()?
e:examples()*
pb:position!()
{
Scenario::builder()
.keyword(k.into())
.name(n.to_string())
.tags(t)
.steps(s.unwrap_or_else(|| vec![]))
.examples(e)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
/ comment()*
_
t:tags()
_
pa:position!()
k:keyword((env.keywords().scenario_outline)) ":" _ n:not_nl() _ nl_eof()
s:steps()?
e:examples()*
pb:position!()
{
Scenario::builder()
.keyword(k.into())
.name(n.to_string())
.tags(t)
.steps(s.unwrap_or_else(|| vec![]))
.examples(e)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
rule tag_char() -> &'input str
= s:$([_]) {?
let x = s.chars().next().unwrap();
if x.is_alphanumeric() || x == '_' || x == '-' {
Ok(s)
} else {
Err("tag character")
}
}
pub(crate) rule tag() -> String
= "@" s:tag_char()+ { s.join("") }
pub(crate) rule tags() -> Vec<String>
= t:(tag() ** ([' ']+)) _ nl() { t }
/ { vec![] }
rule rule_() -> Rule
= _
t:tags()
_
pa:position!()
k:keyword((env.keywords().rule)) ":" _ n:not_nl() _ nl_eof()
b:background()? nl()*
s:scenarios()? nl()*
// e:examples()?
pb:position!()
{
Rule::builder()
.keyword(k.into())
.name(n.to_string())
.tags(t)
.background(b)
.scenarios(s.unwrap_or_else(|| vec![]))
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build()
}
rule rules() -> Vec<Rule>
= _ r:(rule_() ** _)? { r.unwrap_or_else(|| vec![]) }
pub(crate) rule scenarios() -> Vec<Scenario>
= _ s:(scenario() ** _)? { s.unwrap_or_else(|| vec![]) }
pub(crate) rule feature() -> Feature
= _ language_directive()?
nl()*
t:tags() nl()*
pa:position!()
k:keyword((env.keywords().feature)) ":" _ n:not_nl() _ nl()+
d:description()? nl()*
b:background()? nl()*
s:scenarios() nl()*
r:rules() pb:position!()
nl()*
{?
loop {
if let Err(e) = env.assert_no_error() {
break Err(e);
}
break Ok(Feature::builder()
.keyword(k.into())
.tags(t)
.name(n.to_string())
.description(d.flatten())
.background(b)
.scenarios(s)
.rules(r)
.span(Span { start: pa, end: pb })
.position(env.position(pa))
.build())
}
}
pub(crate) rule tag_operation() -> TagOperation = precedence!{
x:@ _ "and" _ y:(@) { TagOperation::And(Box::new(x), Box::new(y)) }
x:@ _ "or" _ y:(@) { TagOperation::Or(Box::new(x), Box::new(y)) }
"not" _ x:(@) { TagOperation::Not(Box::new(x)) }
--
t:tag() { TagOperation::Tag(t) }
"(" t:tag_operation() ")" _ { t }
}
}}
#[cfg(test)]
mod test {
use super::*;
const FOO: &str = "# language: formal\r\n
@hot-stuff
Section: 4.2. The thing we care about
A description just jammed in here for no reason
@lol @a @rule @with-spaces
Rule: All gubbins must be placed in the airlock
@bad_idea
Evidence: A gubbins in an airlock
Given a gubbins
\"\"\"
That's a gubbins
and that is
and so is that
\"\"\"
When a gubbins is forced into this weird corner
| a | b | c |
| 1 | 2 | 3 |
| 4 | 5 | 6 |
Then a gubbins is proven to be in an airlock
";
// From Gherkin 6 documentation
const RULE_WITH_BACKGROUND: &str = "
Feature: Overdue tasks
Let users know when tasks are overdue, even when using other
features of the app
Rule: Users are notified about overdue tasks on first use of the day
Background:
Given I have overdue tasks
Example: First use of the day
Given I last used the app yesterday
When I use the app
Then I am notified about overdue tasks
Example: Already used today
Given I last used the app earlier today
When I use the app
Then I am not notified about overdue tasks
";
const DOCSTRING: &str = r#"
Feature: Meow
Scenario: Meow
Given meow
"""
Docstring life!
"""
"#;
const DOCSTRING2: &str = r#"
Feature: Meow
Scenario: Meow
Given meow
```
Docstring life!
```
"#;
#[test]
fn smoke() {
let env = GherkinEnv::default();
assert!(gherkin_parser::feature(FOO, &env).is_ok());
}
#[test]
fn smoke2() {
let env = GherkinEnv::default();
let d = env!("CARGO_MANIFEST_DIR");
let s = std::fs::read_to_string(format!("{}/tests/test.feature", d)).unwrap();
assert!(gherkin_parser::feature(&s, &env).is_ok());
}
#[test]
fn rule_with_background() {
let env = GherkinEnv::default();
assert!(
gherkin_parser::feature(RULE_WITH_BACKGROUND, &env).is_ok(),
"RULE_IN_BACKGROUND was not parsed correctly!"
);
}
#[test]
fn docstring() {
let env = GherkinEnv::default();
assert!(
gherkin_parser::feature(DOCSTRING, &env).is_ok(),
"DOCSTRING was not parsed correctly!"
);
}
#[test]
fn docstring2() {
let env = GherkinEnv::default();
assert!(
gherkin_parser::feature(DOCSTRING2, &env).is_ok(),
"DOCSTRING2 was not parsed correctly!"
);
}
#[test]
fn feature_name_and_scenario() {
let env = GherkinEnv::default();
let input = r#"Feature: Basic functionality
here's some text
really
Scenario: Hello
Given a step"#;
let feature = gherkin_parser::feature(input, &env).unwrap();
println!("{:#?}", feature);
assert_eq!(feature.scenarios.len(), 1);
assert!(feature.description.is_some());
assert!(feature.scenarios[0].steps[0].position.line != 0)
}
#[test]
fn feature_only() {
let env = GherkinEnv::default();
let input = r#"Feature: Basic functionality
"#;
let feature = gherkin_parser::feature(input, &env).unwrap();
println!("{:#?}", feature);
assert_eq!(feature.scenarios.len(), 0);
assert!(feature.description.is_none());
}
}
|
increment_nl
|
attributes.js
|
"use strict";
// Class definition
var KTDatatableRemoteAjaxDemo = function() {
// basic demo
var demo = function() {
var datatable = $('#kt_datatable').KTDatatable({
// datasource definition
data: {
type: 'remote',
source: {
read: {
url: '/dashboard/attributes',
method:'GET',
// sample custom headers
// headers: {'x-my-custom-header': 'some value', 'x-test-header': 'the value'},
map: function(raw) {
// sample data mapping
var dataSet = raw;
if (typeof raw.data !== 'undefined') {
dataSet = raw.data;
}
return dataSet;
},
},
},
pageSize: 10,
serverPaging: true,
serverFiltering: false,
serverSorting: false,
},
// layout definition
layout: {
scroll: false,
footer: false,
},
// column sorting
sortable: true,
pagination: true,
search: {
input: $('#generalSearch'),
}, rows: {
afterTemplate: function (row, data, index) {
row.find('.delete-item').on('click', function () {
swal.fire({
text: "هـل أنـت متـأكد مـن حـذف هـذا العنـصر ؟ ",
confirmButtonText: "نعــم, أمسح !",
icon: "warning",
confirmButtonClass: "btn font-weight-bold btn-danger",
showCancelButton: true,
cancelButtonText: "لا , ألغي",
cancelButtonClass: "btn font-weight-bold btn-primary"
}).then(function (result) {
if (result.value) {
swal.fire({
title: "تحميل ...",
onOpen: function () {
swal.showLoading();
}
});
$.ajax({
method: 'delete',
headers: {'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content')},
url: '/dashboard/attributes/' + data.id,
error: function (err) {
if (err.hasOwnProperty('responseJSON')) {
if (err.responseJSON.hasOwnProperty('message')) {
swal.fire({
title: "حطـأ !",
text: err.responseJSON.message,
confirmButtonText: "موافق",
icon: "error",
confirmButtonClass: "btn font-weight-bold btn-primary",
});
}
}
console.log(err);
}
}).done(function (res) {
swal.fire({
text: "تم الحذف بنجاح",
confirmButtonText: "موافق",
icon: "success",
confirmButtonClass: "btn font-weight-bold btn-primary",
});
datatable.reload();
});
}
});
});
}
},
// columns definition
columns: [{
field: 'id',
title: '#',
sortable: 'asc',
width: 30,
type: 'number',
selector: false,
textAlign: 'center',
}, {
field: 'name',
title: "الأسم",
selector: false,
textAlign: 'center',
}, {
field: 'type',
title: "النوع",
selector: false,
textAlign: 'center',
},{
field: 'Actions',
title: "الأجراءات",
sortable: false,
width: 125,
overflow: 'visible',
selector: false,
textAlign: 'center',
autoHide: false,
template: function(row) {
return '\
<div class="dropdown dropdown-inline">\
<a href="/dashboard/attributes/' + row.id + '" class="btn btn-sm btn-clean btn-icon mr-2" title="عـرض">\
\<i class="flaticon-eye"></i>\
</a>\
</div>\
<a href="/dashboard/attributes/'+ row.id +'/edit" class="btn btn-sm btn-clean btn-icon mr-2" title="تعديل">\
<span class="svg-icon svg-icon-md">\
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="24px" height="24px" viewBox="0 0 24 24" version="1.1">\
<g stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">\
<rect x="0" y="0" width="24" height="24"/>\
<path d="M8,17.9148182 L8,5.96685884 C8,5.56391781 8.16211443,5.17792052 8.44982609,4.89581508 L10.965708,2.42895648 C11.5426798,1.86322723 12.4640974,1.85620921 13.0496196,2.41308426 L15.5337377,4.77566479 C15.8314604,5.0588212 16,5.45170806 16,5.86258077 L16,17.9148182 C16,18.7432453 15.3284271,19.4148182 14.5,19.4148182 L9.5,19.4148182 C8.67157288,19.4148182 8,18.7432453 8,17.9148182 Z" fill="#000000" fill-rule="nonzero"\ transform="translate(12.000000, 10.707409) rotate(-135.000000) translate(-12.000000, -10.707409) "/>\
<rect fill="#000000" opacity="0.3" x="5" y="20" width="15" height="2" rx="1"/>\
</g>\
</svg>\
</span>\
|
<span class="svg-icon svg-icon-md">\
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="24px" height="24px" viewBox="0 0 24 24" version="1.1">\
<g stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">\
<rect x="0" y="0" width="24" height="24"/>\
<path d="M6,8 L6,20.5 C6,21.3284271 6.67157288,22 7.5,22 L16.5,22 C17.3284271,22 18,21.3284271 18,20.5 L18,8 L6,8 Z" fill="#000000" fill-rule="nonzero"/>\
<path d="M14,4.5 L14,4 C14,3.44771525 13.5522847,3 13,3 L11,3 C10.4477153,3 10,3.44771525 10,4 L10,4.5 L5.5,4.5 C5.22385763,4.5 5,4.72385763 5,5 L5,5.5 C5,5.77614237 5.22385763,6 5.5,6 L18.5,6 C18.7761424,6 19,5.77614237 19,5.5 L19,5 C19,4.72385763 18.7761424,4.5 18.5,4.5 L14,4.5 Z" fill="#000000" opacity="0.3"/>\
</g>\
</svg>\
</span>\
</a>\
';
},
}],
});
$('#kt_datatable_search_status').on('change', function() {
datatable.search($(this).val().toLowerCase(), 'Status');
});
$('#kt_datatable_search_type').on('change', function() {
datatable.search($(this).val().toLowerCase(), 'Type');
});
$('#kt_datatable_search_status, #kt_datatable_search_type').selectpicker();
};
return {
// public functions
init: function() {
demo();
},
};
}();
jQuery(document).ready(function() {
KTDatatableRemoteAjaxDemo.init();
});
|
</a>\
<a href="javascript:;" class="btn btn-sm btn-clean btn-icon delete-item" title="حذف">\
|
setActionToString.js
|
'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
exports['default'] = setActionToString;
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _lodashForEach = require('lodash/forEach');
var _lodashForEach2 = _interopRequireDefault(_lodashForEach);
var stringSet = new Set();
// 设置函数action的toString方法
function setActionToString(prefix, map) {
(0, _lodashForEach2['default'])(map, function (val, key) {
var str = prefix + '/' + key;
if (stringSet.has(str)) {
throw new Error('action string (' + str + ') already used!');
|
};
});
}
module.exports = exports['default'];
|
}
stringSet.add(str);
val.toString = function () {
return str;
|
test-fixtures.ts
|
import { DatabaseProvider } from '../../../../../types';
import { text } from '../..';
export const name = 'Text with isNullable: true';
export const typeFunction = (config: any) => text({ ...config, isNullable: true });
export const exampleValue = () => 'foo';
export const exampleValue2 = () => 'bar';
export const supportsUnique = true;
export const skipRequiredTest = true;
export const fieldName = 'testField';
export const getTestFields = () => ({
testField: text({ isFilterable: true, isNullable: true }),
});
|
export const initItems = () => {
return [
{ name: 'a', testField: '' },
{ name: 'b', testField: 'other' },
{ name: 'c', testField: 'FOOBAR' },
{ name: 'd', testField: 'fooBAR' },
{ name: 'e', testField: 'foobar' },
{ name: 'f', testField: null },
{ name: 'g' },
];
};
export const storedValues = () => [
{ name: 'a', testField: '' },
{ name: 'b', testField: 'other' },
{ name: 'c', testField: 'FOOBAR' },
{ name: 'd', testField: 'fooBAR' },
{ name: 'e', testField: 'foobar' },
{ name: 'f', testField: null },
{ name: 'g', testField: null },
];
export const supportedFilters = (provider: DatabaseProvider) => [
'null_equality',
'equality',
provider !== 'sqlite' && 'equality_case_insensitive',
'in_empty_null',
'in_value',
provider !== 'sqlite' && 'string',
provider !== 'sqlite' && 'string_case_insensitive',
'unique_equality',
];
| |
crypt.py
|
#!/usr/bin/env python
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Routines for URL-safe encrypting/decrypting
"""
import base64
from Crypto.Cipher import AES
from Crypto import Random
from Crypto.Random import random
def urlsafe_encrypt(key, plaintext, blocksize=16):
"""
Encrypts plaintext. Resulting ciphertext will contain URL-safe characters
:param key: AES secret key
:param plaintext: Input text to be encrypted
:param blocksize: Non-zero integer multiple of AES blocksize in bytes (16)
:returns : Resulting ciphertext
"""
def pad(text):
"""
Pads text to be encrypted
"""
pad_length = (blocksize - len(text) % blocksize)
sr = random.StrongRandom()
pad = ''.join(chr(sr.randint(1, 0xFF)) for i in range(pad_length - 1))
# We use chr(0) as a delimiter between text and padding
return text + chr(0) + pad
# random initial 16 bytes for CBC
init_vector = Random.get_random_bytes(16)
cypher = AES.new(key, AES.MODE_CBC, init_vector)
padded = cypher.encrypt(pad(str(plaintext)))
return base64.urlsafe_b64encode(init_vector + padded)
def
|
(key, ciphertext):
"""
Decrypts URL-safe base64 encoded ciphertext
:param key: AES secret key
:param ciphertext: The encrypted text to decrypt
:returns : Resulting plaintext
"""
# Cast from unicode
ciphertext = base64.urlsafe_b64decode(str(ciphertext))
cypher = AES.new(key, AES.MODE_CBC, ciphertext[:16])
padded = cypher.decrypt(ciphertext[16:])
return padded[:padded.rfind(chr(0))]
|
urlsafe_decrypt
|
main_context_channel.rs
|
// Take a look at the license at the top of the repository in the LICENSE file.
use crate::translate::{mut_override, FromGlibPtrFull, IntoGlib};
use crate::Continue;
use crate::MainContext;
use crate::Priority;
use crate::Source;
use crate::SourceId;
use crate::ThreadGuard;
use std::collections::VecDeque;
use std::fmt;
use std::mem;
use std::ptr;
use std::sync::mpsc;
use std::sync::{Arc, Condvar, Mutex};
enum ChannelSourceState {
NotAttached,
Attached(*mut ffi::GSource),
Destroyed,
}
unsafe impl Send for ChannelSourceState {}
unsafe impl Sync for ChannelSourceState {}
struct ChannelInner<T> {
queue: VecDeque<T>,
source: ChannelSourceState,
num_senders: usize,
}
impl<T> ChannelInner<T> {
fn receiver_disconnected(&self) -> bool {
match self.source {
ChannelSourceState::Destroyed => true,
// Receiver exists but is already destroyed
ChannelSourceState::Attached(source)
if unsafe { ffi::g_source_is_destroyed(source) } != ffi::GFALSE =>
{
true
}
// Not attached yet so the Receiver still exists
ChannelSourceState::NotAttached => false,
// Receiver still running
ChannelSourceState::Attached(_) => false,
}
}
#[doc(alias = "g_source_set_ready_time")]
fn set_ready_time(&mut self, ready_time: i64) {
if let ChannelSourceState::Attached(source) = self.source {
unsafe {
ffi::g_source_set_ready_time(source, ready_time);
}
}
}
}
struct ChannelBound {
bound: usize,
cond: Condvar,
}
struct Channel<T>(Arc<(Mutex<ChannelInner<T>>, Option<ChannelBound>)>);
impl<T> Clone for Channel<T> {
fn clone(&self) -> Channel<T> {
Channel(self.0.clone())
}
}
impl<T> Channel<T> {
fn new(bound: Option<usize>) -> Channel<T> {
Channel(Arc::new((
Mutex::new(ChannelInner {
queue: VecDeque::new(),
source: ChannelSourceState::NotAttached,
num_senders: 0,
}),
bound.map(|bound| ChannelBound {
bound,
cond: Condvar::new(),
}),
)))
}
fn send(&self, t: T) -> Result<(), mpsc::SendError<T>> {
let mut inner = (self.0).0.lock().unwrap();
// If we have a bounded channel then we need to wait here until enough free space is
// available or the receiver disappears
//
// A special case here is a bound of 0: the queue must be empty for accepting
// new data and then we will again wait later for the data to be actually taken
// out
if let Some(ChannelBound { bound, ref cond }) = (self.0).1 {
while inner.queue.len() >= bound
&& !inner.queue.is_empty()
&& !inner.receiver_disconnected()
{
inner = cond.wait(inner).unwrap();
}
}
// Error out directly if the receiver is disconnected
if inner.receiver_disconnected() {
return Err(mpsc::SendError(t));
}
// Store the item on our queue
inner.queue.push_back(t);
// and then wake up the GSource
inner.set_ready_time(0);
// If we have a bound of 0 we need to wait until the receiver actually
// handled the data
if let Some(ChannelBound { bound: 0, ref cond }) = (self.0).1 {
while !inner.queue.is_empty() && !inner.receiver_disconnected() {
inner = cond.wait(inner).unwrap();
}
// If the receiver was destroyed in the meantime take out the item and report an error
if inner.receiver_disconnected() {
// If the item is not in the queue anymore then the receiver just handled it before
// getting disconnected and all is good
if let Some(t) = inner.queue.pop_front() {
return Err(mpsc::SendError(t));
}
}
}
Ok(())
}
fn try_send(&self, t: T) -> Result<(), mpsc::TrySendError<T>> {
let mut inner = (self.0).0.lock().unwrap();
let ChannelBound { bound, ref cond } = (self.0)
.1
.as_ref()
.expect("called try_send() on an unbounded channel");
// Check if the queue is full and handle the special case of a 0 bound
if inner.queue.len() >= *bound && !inner.queue.is_empty() {
|
// Error out directly if the receiver is disconnected
if inner.receiver_disconnected() {
return Err(mpsc::TrySendError::Disconnected(t));
}
// Store the item on our queue
inner.queue.push_back(t);
// and then wake up the GSource
inner.set_ready_time(0);
// If we have a bound of 0 we need to wait until the receiver actually
// handled the data
if *bound == 0 {
while !inner.queue.is_empty() && !inner.receiver_disconnected() {
inner = cond.wait(inner).unwrap();
}
// If the receiver was destroyed in the meantime take out the item and report an error
if inner.receiver_disconnected() {
// If the item is not in the queue anymore then the receiver just handled it before
// getting disconnected and all is good
if let Some(t) = inner.queue.pop_front() {
return Err(mpsc::TrySendError::Disconnected(t));
}
}
}
Ok(())
}
fn try_recv(&self) -> Result<T, mpsc::TryRecvError> {
let mut inner = (self.0).0.lock().unwrap();
// Pop item if we have any
if let Some(item) = inner.queue.pop_front() {
// Wake up a sender that is currently waiting, if any
if let Some(ChannelBound { ref cond, .. }) = (self.0).1 {
cond.notify_one();
}
return Ok(item);
}
// If there are no senders left we are disconnected or otherwise empty. That's the case if
// the only remaining strong reference is the one of the receiver
if inner.num_senders == 0 {
Err(mpsc::TryRecvError::Disconnected)
} else {
Err(mpsc::TryRecvError::Empty)
}
}
}
#[repr(C)]
struct ChannelSource<T, F: FnMut(T) -> Continue + 'static> {
source: ffi::GSource,
source_funcs: Option<Box<ffi::GSourceFuncs>>,
channel: Option<Channel<T>>,
callback: Option<ThreadGuard<F>>,
}
unsafe extern "C" fn dispatch<T, F: FnMut(T) -> Continue + 'static>(
source: *mut ffi::GSource,
callback: ffi::GSourceFunc,
_user_data: ffi::gpointer,
) -> ffi::gboolean {
let source = &mut *(source as *mut ChannelSource<T, F>);
assert!(callback.is_none());
// Set ready-time to -1 so that we won't get called again before a new item is added
// to the channel queue.
ffi::g_source_set_ready_time(&mut source.source, -1);
// Get a reference to the callback. This will panic if we're called from a different
// thread than where the source was attached to the main context.
let callback = source
.callback
.as_mut()
.expect("ChannelSource called before Receiver was attached")
.get_mut();
// Now iterate over all items that we currently have in the channel until it is
// empty again. If all senders are disconnected at some point we remove the GSource
// from the main context it was attached to as it will never ever be called again.
let channel = source
.channel
.as_ref()
.expect("ChannelSource without Channel");
loop {
match channel.try_recv() {
Err(mpsc::TryRecvError::Empty) => break,
Err(mpsc::TryRecvError::Disconnected) => return ffi::G_SOURCE_REMOVE,
Ok(item) => {
if callback(item) == Continue(false) {
return ffi::G_SOURCE_REMOVE;
}
}
}
}
ffi::G_SOURCE_CONTINUE
}
#[cfg(feature = "v2_64")]
unsafe extern "C" fn dispose<T, F: FnMut(T) -> Continue + 'static>(source: *mut ffi::GSource) {
let source = &mut *(source as *mut ChannelSource<T, F>);
if let Some(ref channel) = source.channel {
// Set the source inside the channel to None so that all senders know that there
// is no receiver left and wake up the condition variable if any
let mut inner = (channel.0).0.lock().unwrap();
inner.source = ChannelSourceState::Destroyed;
if let Some(ChannelBound { ref cond, .. }) = (channel.0).1 {
cond.notify_all();
}
}
}
unsafe extern "C" fn finalize<T, F: FnMut(T) -> Continue + 'static>(source: *mut ffi::GSource) {
let source = &mut *(source as *mut ChannelSource<T, F>);
// Drop all memory we own by taking it out of the Options
#[cfg(feature = "v2_64")]
{
let _ = source.channel.take().expect("Receiver without channel");
}
#[cfg(not(feature = "v2_64"))]
{
let channel = source.channel.take().expect("Receiver without channel");
// FIXME: This is the same as would otherwise be done in the dispose() function but
// unfortunately it doesn't exist in older version of GLib. Doing it only here can
// cause a channel sender to get a reference to the source with reference count 0
// if it happens just before the mutex is taken below.
//
// This is exactly the pattern why g_source_set_dispose_function() was added.
//
// Set the source inside the channel to None so that all senders know that there
// is no receiver left and wake up the condition variable if any
let mut inner = (channel.0).0.lock().unwrap();
inner.source = ChannelSourceState::Destroyed;
if let Some(ChannelBound { ref cond, .. }) = (channel.0).1 {
cond.notify_all();
}
}
let _ = source.source_funcs.take();
// Take the callback out of the source. This will panic if the value is dropped
// from a different thread than where the callback was created
let _ = source.callback.take();
}
/// A `Sender` that can be used to send items to the corresponding main context receiver.
///
/// This `Sender` behaves the same as `std::sync::mpsc::Sender`.
///
/// See [`MainContext::channel()`] for how to create such a `Sender`.
///
/// [`MainContext::channel()`]: struct.MainContext.html#method.channel
pub struct Sender<T>(Channel<T>);
impl<T> fmt::Debug for Sender<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Sender").finish()
}
}
impl<T> Clone for Sender<T> {
fn clone(&self) -> Sender<T> {
Self::new(&self.0)
}
}
impl<T> Sender<T> {
fn new(channel: &Channel<T>) -> Self {
let mut inner = (channel.0).0.lock().unwrap();
inner.num_senders += 1;
Self(channel.clone())
}
/// Sends a value to the channel.
pub fn send(&self, t: T) -> Result<(), mpsc::SendError<T>> {
self.0.send(t)
}
}
impl<T> Drop for Sender<T> {
fn drop(&mut self) {
// Decrease the number of senders and wake up the channel if this
// was the last sender that was dropped.
let mut inner = ((self.0).0).0.lock().unwrap();
inner.num_senders -= 1;
if inner.num_senders == 0 {
inner.set_ready_time(0);
}
}
}
/// A `SyncSender` that can be used to send items to the corresponding main context receiver.
///
/// This `SyncSender` behaves the same as `std::sync::mpsc::SyncSender`.
///
/// See [`MainContext::sync_channel()`] for how to create such a `SyncSender`.
///
/// [`MainContext::sync_channel()`]: struct.MainContext.html#method.sync_channel
pub struct SyncSender<T>(Channel<T>);
impl<T> fmt::Debug for SyncSender<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("SyncSender").finish()
}
}
impl<T> Clone for SyncSender<T> {
fn clone(&self) -> SyncSender<T> {
Self::new(&self.0)
}
}
impl<T> SyncSender<T> {
fn new(channel: &Channel<T>) -> Self {
let mut inner = (channel.0).0.lock().unwrap();
inner.num_senders += 1;
Self(channel.clone())
}
/// Sends a value to the channel and blocks if the channel is full.
pub fn send(&self, t: T) -> Result<(), mpsc::SendError<T>> {
self.0.send(t)
}
/// Sends a value to the channel.
pub fn try_send(&self, t: T) -> Result<(), mpsc::TrySendError<T>> {
self.0.try_send(t)
}
}
impl<T> Drop for SyncSender<T> {
fn drop(&mut self) {
// Decrease the number of senders and wake up the channel if this
// was the last sender that was dropped.
let mut inner = ((self.0).0).0.lock().unwrap();
inner.num_senders -= 1;
if inner.num_senders == 0 {
inner.set_ready_time(0);
}
}
}
/// A `Receiver` that can be attached to a main context to receive items from its corresponding
/// `Sender` or `SyncSender`.
///
/// See [`MainContext::channel()`] or [`MainContext::sync_channel()`] for how to create
/// such a `Receiver`.
///
/// [`MainContext::channel()`]: struct.MainContext.html#method.channel
/// [`MainContext::sync_channel()`]: struct.MainContext.html#method.sync_channel
pub struct Receiver<T>(Option<Channel<T>>, Priority);
impl<T> fmt::Debug for Receiver<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Receiver").finish()
}
}
// It's safe to send the Receiver to other threads for attaching it as
// long as the items to be sent can also be sent between threads.
unsafe impl<T: Send> Send for Receiver<T> {}
impl<T> Drop for Receiver<T> {
fn drop(&mut self) {
// If the receiver was never attached to a main context we need to let all the senders know
if let Some(channel) = self.0.take() {
let mut inner = (channel.0).0.lock().unwrap();
inner.source = ChannelSourceState::Destroyed;
if let Some(ChannelBound { ref cond, .. }) = (channel.0).1 {
cond.notify_all();
}
}
}
}
impl<T> Receiver<T> {
/// Attaches the receiver to the given `context` and calls `func` whenever an item is
/// available on the channel.
///
/// Passing `None` for the context will attach it to the thread default main context.
///
/// # Panics
///
/// This function panics if called from a thread that is not the owner of the provided
/// `context`, or, if `None` is provided, of the thread default main context.
pub fn attach<F: FnMut(T) -> Continue + 'static>(
mut self,
context: Option<&MainContext>,
func: F,
) -> SourceId {
unsafe {
let channel = self.0.take().expect("Receiver without channel");
let source_funcs = Box::new(ffi::GSourceFuncs {
check: None,
prepare: None,
dispatch: Some(dispatch::<T, F>),
finalize: Some(finalize::<T, F>),
closure_callback: None,
closure_marshal: None,
});
let source = ffi::g_source_new(
mut_override(&*source_funcs),
mem::size_of::<ChannelSource<T, F>>() as u32,
) as *mut ChannelSource<T, F>;
assert!(!source.is_null());
#[cfg(feature = "v2_64")]
{
ffi::g_source_set_dispose_function(
source as *mut ffi::GSource,
Some(dispose::<T, F>),
);
}
// Set up the GSource
{
let source = &mut *source;
let mut inner = (channel.0).0.lock().unwrap();
ffi::g_source_set_priority(mut_override(&source.source), self.1.into_glib());
// We're immediately ready if the queue is not empty or if no sender is left at this point
ffi::g_source_set_ready_time(
mut_override(&source.source),
if !inner.queue.is_empty() || inner.num_senders == 0 {
0
} else {
-1
},
);
inner.source = ChannelSourceState::Attached(&mut source.source);
}
// Store all our data inside our part of the GSource
{
let source = &mut *source;
ptr::write(&mut source.channel, Some(channel));
ptr::write(&mut source.callback, Some(ThreadGuard::new(func)));
ptr::write(&mut source.source_funcs, Some(source_funcs));
}
let source = Source::from_glib_full(mut_override(&(*source).source));
if let Some(context) = context {
assert!(context.is_owner());
source.attach(Some(context))
} else {
let context = MainContext::ref_thread_default();
assert!(context.is_owner());
source.attach(Some(&context))
}
}
}
}
impl MainContext {
/// Creates a channel for a main context.
///
/// The `Receiver` has to be attached to a main context at a later time, together with a
/// closure that will be called for every item sent to a `Sender`.
///
/// The `Sender` can be cloned and both the `Sender` and `Receiver` can be sent to different
/// threads as long as the item type implements the `Send` trait.
///
/// When the last `Sender` is dropped the channel is removed from the main context. If the
/// `Receiver` is dropped and not attached to a main context all sending to the `Sender`
/// will fail.
///
/// The returned `Sender` behaves the same as `std::sync::mpsc::Sender`.
pub fn channel<T>(priority: Priority) -> (Sender<T>, Receiver<T>) {
let channel = Channel::new(None);
let receiver = Receiver(Some(channel.clone()), priority);
let sender = Sender::new(&channel);
(sender, receiver)
}
/// Creates a synchronous channel for a main context with a given bound on the capacity of the
/// channel.
///
/// The `Receiver` has to be attached to a main context at a later time, together with a
/// closure that will be called for every item sent to a `SyncSender`.
///
/// The `SyncSender` can be cloned and both the `SyncSender` and `Receiver` can be sent to different
/// threads as long as the item type implements the `Send` trait.
///
/// When the last `SyncSender` is dropped the channel is removed from the main context. If the
/// `Receiver` is dropped and not attached to a main context all sending to the `SyncSender`
/// will fail.
///
/// The returned `SyncSender` behaves the same as `std::sync::mpsc::SyncSender`.
pub fn sync_channel<T>(priority: Priority, bound: usize) -> (SyncSender<T>, Receiver<T>) {
let channel = Channel::new(Some(bound));
let receiver = Receiver(Some(channel.clone()), priority);
let sender = SyncSender::new(&channel);
(sender, receiver)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::MainLoop;
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::thread;
use std::time;
#[test]
fn test_channel() {
let c = MainContext::new();
let l = MainLoop::new(Some(&c), false);
c.acquire();
let (sender, receiver) = MainContext::channel(Priority::default());
let sum = Rc::new(RefCell::new(0));
let sum_clone = sum.clone();
let l_clone = l.clone();
receiver.attach(Some(&c), move |item| {
*sum_clone.borrow_mut() += item;
if *sum_clone.borrow() == 6 {
l_clone.quit();
Continue(false)
} else {
Continue(true)
}
});
sender.send(1).unwrap();
sender.send(2).unwrap();
sender.send(3).unwrap();
l.run();
assert_eq!(*sum.borrow(), 6);
}
#[test]
fn test_drop_sender() {
let c = MainContext::new();
let l = MainLoop::new(Some(&c), false);
c.acquire();
let (sender, receiver) = MainContext::channel::<i32>(Priority::default());
struct Helper(MainLoop);
impl Drop for Helper {
fn drop(&mut self) {
self.0.quit();
}
}
let helper = Helper(l.clone());
receiver.attach(Some(&c), move |_| {
let _ = helper;
Continue(true)
});
drop(sender);
l.run();
}
#[test]
fn test_drop_receiver() {
let (sender, receiver) = MainContext::channel::<i32>(Priority::default());
drop(receiver);
assert_eq!(sender.send(1), Err(mpsc::SendError(1)));
}
#[test]
fn test_remove_receiver() {
let c = MainContext::new();
c.acquire();
let (sender, receiver) = MainContext::channel::<i32>(Priority::default());
let source_id = receiver.attach(Some(&c), move |_| Continue(true));
let source = c.find_source_by_id(&source_id).unwrap();
source.destroy();
assert_eq!(sender.send(1), Err(mpsc::SendError(1)));
}
#[test]
fn test_remove_receiver_and_drop_source() {
let c = MainContext::new();
c.acquire();
let (sender, receiver) = MainContext::channel::<i32>(Priority::default());
struct Helper(Arc<AtomicBool>);
impl Drop for Helper {
fn drop(&mut self) {
self.0.store(true, Ordering::Relaxed);
}
}
let dropped = Arc::new(AtomicBool::new(false));
let helper = Helper(dropped.clone());
let source_id = receiver.attach(Some(&c), move |_| {
let _helper = &helper;
Continue(true)
});
let source = c.find_source_by_id(&source_id).unwrap();
source.destroy();
// This should drop the closure
drop(source);
assert!(dropped.load(Ordering::Relaxed));
assert_eq!(sender.send(1), Err(mpsc::SendError(1)));
}
#[test]
fn test_sync_channel() {
let c = MainContext::new();
let l = MainLoop::new(Some(&c), false);
c.acquire();
let (sender, receiver) = MainContext::sync_channel(Priority::default(), 2);
let sum = Rc::new(RefCell::new(0));
let sum_clone = sum.clone();
let l_clone = l.clone();
receiver.attach(Some(&c), move |item| {
*sum_clone.borrow_mut() += item;
if *sum_clone.borrow() == 6 {
l_clone.quit();
Continue(false)
} else {
Continue(true)
}
});
let (wait_sender, wait_receiver) = mpsc::channel();
let thread = thread::spawn(move || {
// The first two must succeed
sender.try_send(1).unwrap();
sender.try_send(2).unwrap();
// This fills up the channel
assert!(sender.try_send(3).is_err());
wait_sender.send(()).unwrap();
// This will block
sender.send(3).unwrap();
});
// Wait until the channel is full, and then another
// 50ms to make sure the sender is blocked now and
// can wake up properly once an item was consumed
assert!(wait_receiver.recv().is_ok());
thread::sleep(time::Duration::from_millis(50));
l.run();
thread.join().unwrap();
assert_eq!(*sum.borrow(), 6);
}
#[test]
fn test_sync_channel_drop_wakeup() {
let c = MainContext::new();
let l = MainLoop::new(Some(&c), false);
c.acquire();
let (sender, receiver) = MainContext::sync_channel(Priority::default(), 3);
let sum = Rc::new(RefCell::new(0));
let sum_clone = sum.clone();
let l_clone = l.clone();
receiver.attach(Some(&c), move |item| {
*sum_clone.borrow_mut() += item;
if *sum_clone.borrow() == 6 {
l_clone.quit();
Continue(false)
} else {
Continue(true)
}
});
let (wait_sender, wait_receiver) = mpsc::channel();
let thread = thread::spawn(move || {
// The first three must succeed
sender.try_send(1).unwrap();
sender.try_send(2).unwrap();
sender.try_send(3).unwrap();
wait_sender.send(()).unwrap();
for i in 4.. {
// This will block at some point until the
// receiver is removed from the main context
if sender.send(i).is_err() {
break;
}
}
});
// Wait until the channel is full, and then another
// 50ms to make sure the sender is blocked now and
// can wake up properly once an item was consumed
assert!(wait_receiver.recv().is_ok());
thread::sleep(time::Duration::from_millis(50));
l.run();
thread.join().unwrap();
assert_eq!(*sum.borrow(), 6);
}
#[test]
fn test_sync_channel_drop_receiver_wakeup() {
let c = MainContext::new();
c.acquire();
let (sender, receiver) = MainContext::sync_channel(Priority::default(), 2);
let (wait_sender, wait_receiver) = mpsc::channel();
let thread = thread::spawn(move || {
// The first two must succeed
sender.try_send(1).unwrap();
sender.try_send(2).unwrap();
wait_sender.send(()).unwrap();
// This will block and then error out because the receiver is destroyed
assert!(sender.send(3).is_err());
});
// Wait until the channel is full, and then another
// 50ms to make sure the sender is blocked now and
// can wake up properly once an item was consumed
assert!(wait_receiver.recv().is_ok());
thread::sleep(time::Duration::from_millis(50));
drop(receiver);
thread.join().unwrap();
}
#[test]
fn test_sync_channel_rendezvous() {
let c = MainContext::new();
let l = MainLoop::new(Some(&c), false);
c.acquire();
let (sender, receiver) = MainContext::sync_channel(Priority::default(), 0);
let (wait_sender, wait_receiver) = mpsc::channel();
let thread = thread::spawn(move || {
wait_sender.send(()).unwrap();
sender.send(1).unwrap();
wait_sender.send(()).unwrap();
sender.send(2).unwrap();
wait_sender.send(()).unwrap();
sender.send(3).unwrap();
wait_sender.send(()).unwrap();
});
// Wait until the thread is started, then wait another 50ms and
// during that time it must not have proceeded yet to send the
// second item because we did not yet receive the first item.
assert!(wait_receiver.recv().is_ok());
assert_eq!(
wait_receiver.recv_timeout(time::Duration::from_millis(50)),
Err(mpsc::RecvTimeoutError::Timeout)
);
let sum = Rc::new(RefCell::new(0));
let sum_clone = sum.clone();
let l_clone = l.clone();
receiver.attach(Some(&c), move |item| {
// We consumed one item so there should be one item on
// the other receiver now.
assert!(wait_receiver.recv().is_ok());
*sum_clone.borrow_mut() += item;
if *sum_clone.borrow() == 6 {
// But as we didn't consume the next one yet, there must be no
// other item available yet
assert_eq!(
wait_receiver.recv_timeout(time::Duration::from_millis(50)),
Err(mpsc::RecvTimeoutError::Disconnected)
);
l_clone.quit();
Continue(false)
} else {
// But as we didn't consume the next one yet, there must be no
// other item available yet
assert_eq!(
wait_receiver.recv_timeout(time::Duration::from_millis(50)),
Err(mpsc::RecvTimeoutError::Timeout)
);
Continue(true)
}
});
l.run();
thread.join().unwrap();
assert_eq!(*sum.borrow(), 6);
}
}
|
return Err(mpsc::TrySendError::Full(t));
}
|
benchmark1.rs
|
use criterion::{black_box, criterion_group, criterion_main, Criterion};
fn run_main_loop(config: &mut csv2bibtex::args::Config) {
// that's strange, does not work by reference
let mut config = config.clone();
// run main function
csv2bibtex::run(&mut config).unwrap();
}
fn criterion_benchmark(c: &mut Criterion) {
// build config structure
let mut config = csv2bibtex::args::Config::default();
config.file_input = std::path::PathBuf::from("./benches/benchmark1-input1.csv");
config.file_output = std::path::PathBuf::from("./benches/tmp-benchmark1-output1.bib");
config.csv_delimiter = String::from("\t");
config.csv_lazy = true;
// benchmark runs (100 lines, 0 fields)
c.bench_function("0 fields, 100 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// add five fields and run again
config
.csv_field_mapping
.insert(String::from("entrytype"), String::from("article"));
config
.csv_field_mapping
.insert(String::from("author"), String::from("[[AU]]"));
config
.csv_field_mapping
.insert(String::from("title"), String::from("[[TI]]"));
config
|
.insert(String::from("volume"), String::from("[[VL]]"));
c.bench_function("5 valid fields, 100 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// add five fields and run again
config
.csv_field_mapping
.insert(String::from("number"), String::from("[[IS]]"));
config
.csv_field_mapping
.insert(String::from("pages"), String::from("[[BP]]--[[EP]]"));
config
.csv_field_mapping
.insert(String::from("doi"), String::from("[[DI]]"));
config
.csv_field_mapping
.insert(String::from("year"), String::from("[[PY]]"));
config
.csv_field_mapping
.insert(String::from("abstract"), String::from("[[AB]]"));
c.bench_function("10 valid fields, 100 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// clear fields, add five invalid fields and run again
config.csv_field_mapping.clear();
config
.csv_field_mapping
.insert(String::from("entrytype"), String::from("[[article]]"));
config
.csv_field_mapping
.insert(String::from("author"), String::from("[[authors]]"));
config
.csv_field_mapping
.insert(String::from("title"), String::from("[[titles]]"));
config
.csv_field_mapping
.insert(String::from("journal"), String::from("[[journals]]"));
config
.csv_field_mapping
.insert(String::from("volume"), String::from("[[volumes]]"));
c.bench_function("5 invalid fields, 100 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// add five invalid fields and run again
config
.csv_field_mapping
.insert(String::from("number"), String::from("[[numbers]]"));
config
.csv_field_mapping
.insert(String::from("pages"), String::from("[[pages]]"));
config
.csv_field_mapping
.insert(String::from("doi"), String::from("[[dois]]"));
config
.csv_field_mapping
.insert(String::from("year"), String::from("[[years]]"));
config
.csv_field_mapping
.insert(String::from("abstract"), String::from("[[abstracts]]"));
c.bench_function("10 invalid fields, 100 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// switch to 1000 lines input file, clear fields, run again
config.file_input = std::path::PathBuf::from("./benches/benchmark1-input2.csv");
config.csv_field_mapping.clear();
c.bench_function("0 fields, 1000 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// add five fields and run again
config
.csv_field_mapping
.insert(String::from("entrytype"), String::from("article"));
config
.csv_field_mapping
.insert(String::from("author"), String::from("[[AU]]"));
config
.csv_field_mapping
.insert(String::from("title"), String::from("[[TI]]"));
config
.csv_field_mapping
.insert(String::from("journal"), String::from("[[SO]]"));
config
.csv_field_mapping
.insert(String::from("volume"), String::from("[[VL]]"));
c.bench_function("5 valid fields, 1000 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// add five fields and run again
config
.csv_field_mapping
.insert(String::from("number"), String::from("[[IS]]"));
config
.csv_field_mapping
.insert(String::from("pages"), String::from("[[BP]]--[[EP]]"));
config
.csv_field_mapping
.insert(String::from("doi"), String::from("[[DI]]"));
config
.csv_field_mapping
.insert(String::from("year"), String::from("[[PY]]"));
config
.csv_field_mapping
.insert(String::from("abstract"), String::from("[[AB]]"));
c.bench_function("10 valid fields, 1000 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// clear fields, add five invalid fields and run again
config.csv_field_mapping.clear();
config
.csv_field_mapping
.insert(String::from("entrytype"), String::from("[[article]]"));
config
.csv_field_mapping
.insert(String::from("author"), String::from("[[authors]]"));
config
.csv_field_mapping
.insert(String::from("title"), String::from("[[titles]]"));
config
.csv_field_mapping
.insert(String::from("journal"), String::from("[[journals]]"));
config
.csv_field_mapping
.insert(String::from("volume"), String::from("[[volumes]]"));
c.bench_function("5 invalid fields, 1000 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// add five invalid fields and run again
config
.csv_field_mapping
.insert(String::from("number"), String::from("[[numbers]]"));
config
.csv_field_mapping
.insert(String::from("pages"), String::from("[[pages]]"));
config
.csv_field_mapping
.insert(String::from("doi"), String::from("[[dois]]"));
config
.csv_field_mapping
.insert(String::from("year"), String::from("[[years]]"));
config
.csv_field_mapping
.insert(String::from("abstract"), String::from("[[abstracts]]"));
c.bench_function("10 invalid fields, 1000 lines", |b| {
b.iter(|| run_main_loop(black_box(&mut config)))
});
// clean up
std::fs::remove_file("./benches/tmp-benchmark1-output1.bib").unwrap();
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
.csv_field_mapping
.insert(String::from("journal"), String::from("[[SO]]"));
config
.csv_field_mapping
|
unimported.go
|
package unimported
func
|
() {
//@unimported("", bytes, context, cryptoslashrand, externalpackage, time, unsafe)
}
// Create markers for unimported std lib packages. Only for use by this test.
/* bytes */ //@item(bytes, "bytes", "\"bytes\"", "package")
/* context */ //@item(context, "context", "\"context\"", "package")
/* rand */ //@item(cryptoslashrand, "rand", "\"crypto/rand\"", "package")
/* pkg */ //@item(externalpackage, "pkg", "\"example.com/extramodule/pkg\"", "package" )
/* unsafe */ //@item(unsafe, "unsafe", "\"unsafe\"", "package")
/* time */ //@item(time, "time", "\"time\"", "package")
|
_
|
attrs.rs
|
use proc_macro::{TokenStream, TokenTree};
pub(crate) fn parse_function_bind_attrs(attr: TokenStream) -> Result<Vec<String>, String>
|
{
let mut ret: Vec<String> = vec![];
for token in attr.into_iter() {
match token {
TokenTree::Group(group) => {
return Err(format!("unexpected function binder parameter: {}", group))
}
TokenTree::Ident(ident) => {
ret.push(ident.to_string())
}
TokenTree::Literal(lit) => {
ret.push(lit.to_string())
}
TokenTree::Punct(_) => {}
}
}
Ok(ret)
}
|
|
gapic_bazel.py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from typing import Optional, Union
import os
import shutil
import tempfile
from synthtool import _tracked_paths, metadata, shell
from synthtool.log import logger
from synthtool.sources import git
GOOGLEAPIS_URL: str = git.make_repo_clone_url("googleapis/googleapis")
GOOGLEAPIS_PRIVATE_URL: str = git.make_repo_clone_url("googleapis/googleapis-private")
DISCOVERY_ARTIFACT_MANAGER_URL: str = git.make_repo_clone_url(
"googleapis/discovery-artifact-manager"
)
LOCAL_GOOGLEAPIS: Optional[str] = os.environ.get("SYNTHTOOL_GOOGLEAPIS")
LOCAL_DISCOVERY_ARTIFACT_MANAGER: Optional[str] = os.environ.get(
"SYNTHTOOL_DISCOVERY_ARTIFACT_MANAGER"
)
class GAPICBazel:
"""A synthtool component that can produce libraries using bazel build.
"""
def __init__(self):
self._ensure_dependencies_installed()
self._googleapis = None
self._googleapis_private = None
self._discovery_artifact_manager = None
def py_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "python", **kwargs)
def go_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "go", **kwargs)
def node_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "nodejs", **kwargs)
def csharp_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "csharp", **kwargs)
def php_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "php", **kwargs)
def java_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "java", **kwargs)
def ruby_library(self, service: str, version: str, **kwargs) -> Path:
return self._generate_code(service, version, "ruby", **kwargs)
def _generate_code(
self,
service: str,
version: str,
language: str,
*,
private: bool = False,
discogapic: bool = False,
proto_path: Union[str, Path] = None,
output_dir: Union[str, Path] = None,
bazel_target: str = None,
include_protos: bool = False,
proto_output_path: Union[str, Path] = None,
):
# Determine which googleapis repo to use
if discogapic:
api_definitions_repo = self._clone_discovery_artifact_manager()
api_definitions_repo_name = "discovery-artifact-manager"
elif private:
api_definitions_repo = self._clone_googleapis_private()
api_definitions_repo_name = "googleapis_private"
|
api_definitions_repo = self._clone_googleapis()
api_definitions_repo_name = "googleapis"
# Sanity check: We should have a googleapis repo; if we do not,
# something went wrong, and we should abort.
if not api_definitions_repo:
raise RuntimeError(
f"Unable to generate {service}, the sources repository repository"
"is unavailable."
)
# Calculate proto_path if necessary.
if not bazel_target or include_protos:
# If bazel_target is not specified explicitly, we will need
# proto_path to calculate it. If include_protos is True,
# we will need the proto_path to copy the protos.
if not proto_path:
if bazel_target:
# Calculate proto_path from the full bazel target, which is
# in the format "//proto_path:target_name
proto_path = bazel_target.split(":")[0][2:]
else:
# If bazel_target is not specified, assume the protos are
# simply under google/cloud, where the most of the protos
# usually are.
proto_path = f"google/cloud/{service}/{version}"
protos = Path(proto_path)
if protos.is_absolute():
protos = protos.relative_to("/")
# Determine bazel target based on per-language patterns
# Java: google-cloud-{{assembly_name}}-{{version}}-java
# Go: gapi-cloud-{{assembly_name}}-{{version}}-go
# Python: {{assembly_name}}-{{version}}-py
# PHP: google-cloud-{{assembly_name}}-{{version}}-php
# Node.js: {{assembly_name}}-{{version}}-nodejs
# Ruby: google-cloud-{{assembly_name}}-{{version}}-ruby
# C#: google-cloud-{{assembly_name}}-{{version}}-csharp
if not bazel_target:
# Determine where the protos we are generating actually live.
# We can sometimes (but not always) determine this from the service
# and version; in other cases, the user must provide it outright.
parts = list(protos.parts)
while len(parts) > 0 and parts[0] != "google":
parts.pop(0)
if len(parts) == 0:
raise RuntimeError(
f"Cannot determine bazel_target from proto_path {protos}."
"Please set bazel_target explicitly."
)
if language == "python":
suffix = f"{service}-{version}-py"
elif language == "nodejs":
suffix = f"{service}-{version}-nodejs"
elif language == "go":
suffix = f"gapi-{'-'.join(parts[1:])}-go"
else:
suffix = f"{'-'.join(parts)}-{language}"
bazel_target = f"//{os.path.sep.join(parts)}:{suffix}"
# Sanity check: Do we have protos where we think we should?
if not (api_definitions_repo / protos).exists():
raise FileNotFoundError(
f"Unable to find directory for protos: {(api_definitions_repo / protos)}."
)
if not tuple((api_definitions_repo / protos).glob("*.proto")):
raise FileNotFoundError(
f"Directory {(api_definitions_repo / protos)} exists, but no protos found."
)
if not (api_definitions_repo / protos / "BUILD.bazel"):
raise FileNotFoundError(
f"File {(api_definitions_repo / protos / 'BUILD.bazel')} does not exist."
)
# Ensure the desired output directory exists.
# If none was provided, create a temporary directory.
if not output_dir:
output_dir = tempfile.mkdtemp()
output_dir = Path(output_dir).resolve()
# Let's build some stuff now.
cwd = os.getcwd()
os.chdir(str(api_definitions_repo))
bazel_run_args = [
"bazel",
"--max_idle_secs=240",
"build",
bazel_target,
]
logger.debug(f"Generating code for: {bazel_target}.")
shell.run(bazel_run_args)
# We've got tar file!
# its location: bazel-bin/google/cloud/language/v1/language-v1-nodejs.tar.gz
# bazel_target: //google/cloud/language/v1:language-v1-nodejs
tar_file = (
f"bazel-bin{os.path.sep}{bazel_target[2:].replace(':', os.path.sep)}.tar.gz"
)
tar_run_args = [
"tar",
"-C",
str(output_dir),
"--strip-components=1",
"-xzf",
tar_file,
]
shell.run(tar_run_args)
# Get the *.protos files and put them in a protos dir in the output
if include_protos:
proto_files = protos.glob("**/*.proto")
# By default, put the protos at the root in a folder named 'protos'.
# Specific languages can be cased here to put them in a more language
# appropriate place.
if not proto_output_path:
proto_output_path = output_dir / "protos"
if language == "python":
# place protos alongsize the *_pb2.py files
proto_output_path = (
output_dir / f"google/cloud/{service}_{version}/proto"
)
else:
proto_output_path = Path(output_dir / proto_output_path)
os.makedirs(proto_output_path, exist_ok=True)
for i in proto_files:
logger.debug(f"Copy: {i} to {proto_output_path / i.name}")
shutil.copyfile(i, proto_output_path / i.name)
logger.success(f"Placed proto files into {proto_output_path}.")
os.chdir(cwd)
# Sanity check: Does the output location have code in it?
# If not, complain.
if not tuple(output_dir.iterdir()):
raise RuntimeError(
f"Code generation seemed to succeed, but {output_dir} is empty."
)
# Huzzah, it worked.
logger.success(f"Generated code into {output_dir}.")
# Record this in the synthtool metadata.
metadata.add_client_destination(
source=api_definitions_repo_name,
api_name=service,
api_version=version,
language=language,
generator="bazel",
)
_tracked_paths.add(output_dir)
return output_dir
def _clone_googleapis(self):
if self._googleapis:
return self._googleapis
if LOCAL_GOOGLEAPIS:
self._googleapis = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(f"Using local googleapis at {self._googleapis}")
else:
logger.debug("Cloning googleapis.")
self._googleapis = git.clone(GOOGLEAPIS_URL)
return self._googleapis
def _clone_googleapis_private(self):
if self._googleapis_private:
return self._googleapis_private
if LOCAL_GOOGLEAPIS:
self._googleapis_private = Path(LOCAL_GOOGLEAPIS).expanduser()
logger.debug(
f"Using local googleapis at {self._googleapis_private} for googleapis-private"
)
else:
logger.debug("Cloning googleapis-private.")
self._googleapis_private = git.clone(GOOGLEAPIS_PRIVATE_URL)
return self._googleapis_private
def _clone_discovery_artifact_manager(self):
if self._discovery_artifact_manager:
return self._discovery_artifact_manager
if LOCAL_DISCOVERY_ARTIFACT_MANAGER:
self._discovery_artifact_manager = Path(
LOCAL_DISCOVERY_ARTIFACT_MANAGER
).expanduser()
logger.debug(
f"Using local discovery_artifact_manager at {self._discovery_artifact_manager} for googleapis-private"
)
else:
logger.debug("Cloning discovery-artifact-manager.")
self._discovery_artifact_manager = git.clone(DISCOVERY_ARTIFACT_MANAGER_URL)
return self._discovery_artifact_manager
def _ensure_dependencies_installed(self):
logger.debug("Ensuring dependencies.")
dependencies = ["bazel", "zip", "unzip", "tar"]
failed_dependencies = []
for dependency in dependencies:
return_code = shell.run(["which", dependency], check=False).returncode
if return_code:
failed_dependencies.append(dependency)
if failed_dependencies:
raise EnvironmentError(
f"Dependencies missing: {', '.join(failed_dependencies)}"
)
|
else:
|
gateio.py
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountNotEnabled
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class gateio(Exchange):
def describe(self):
return self.deep_extend(super(gateio, self).describe(), {
'id': 'gateio',
'name': 'Gate.io',
'countries': ['KR'],
'rateLimit': 10 / 3, # 300 requests per second or 3.33ms
'version': 'v4',
'certified': True,
'pro': True,
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/31784029-0313c702-b509-11e7-9ccc-bc0da6a0e435.jpg',
'doc': 'https://www.gate.io/docs/apiv4/en/index.html',
'www': 'https://gate.io/',
'api': {
'public': {
'wallet': 'https://api.gateio.ws/api/v4',
'futures': 'https://api.gateio.ws/api/v4',
'margin': 'https://api.gateio.ws/api/v4',
'delivery': 'https://api.gateio.ws/api/v4',
'spot': 'https://api.gateio.ws/api/v4',
'options': 'https://api.gateio.ws/api/v4',
},
'private': {
'withdrawals': 'https://api.gateio.ws/api/v4',
'wallet': 'https://api.gateio.ws/api/v4',
'futures': 'https://api.gateio.ws/api/v4',
'margin': 'https://api.gateio.ws/api/v4',
'delivery': 'https://api.gateio.ws/api/v4',
'spot': 'https://api.gateio.ws/api/v4',
'options': 'https://api.gateio.ws/api/v4',
},
},
'test': {
'public': {
'futures': 'https://fx-api-testnet.gateio.ws/api/v4',
'delivery': 'https://fx-api-testnet.gateio.ws/api/v4',
},
'private': {
'futures': 'https://fx-api-testnet.gateio.ws/api/v4',
'delivery': 'https://fx-api-testnet.gateio.ws/api/v4',
},
},
'referral': {
'url': 'https://www.gate.io/ref/2436035',
'discount': 0.2,
},
},
'has': {
'CORS': None,
'spot': True,
'margin': True,
'swap': True,
'future': True,
'option': None,
'cancelAllOrders': True,
'cancelOrder': True,
'createMarketOrder': False,
'createOrder': True,
'createPostOnlyOrder': True,
'createStopLimitOrder': True,
'createStopMarketOrder': False,
'createStopOrder': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchFundingHistory': True,
'fetchFundingRate': True,
'fetchFundingRateHistory': True,
'fetchFundingRates': True,
'fetchIndexOHLCV': True,
'fetchLeverage': False,
'fetchLeverageTiers': True,
'fetchMarketLeverageTiers': 'emulated',
'fetchMarkets': True,
'fetchMarkOHLCV': True,
'fetchMyTrades': True,
'fetchNetworkDepositAddress': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchPositions': True,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': False,
'fetchTrades': True,
'fetchTradingFee': True,
'fetchTradingFees': True,
'fetchTransactionFees': True,
'fetchWithdrawals': True,
'setLeverage': True,
'setMarginMode': False,
'transfer': True,
'withdraw': True,
},
'api': {
'public': {
'wallet': {
'get': {
'wallet/currency_chains': 1.5,
},
},
'spot': {
'get': {
'currencies': 1,
'currencies/{currency}': 1,
'currency_pairs': 1,
'currency_pairs/{currency_pair}': 1,
'tickers': 1,
'order_book': 1,
'trades': 1,
'candlesticks': 1,
},
},
'margin': {
'get': {
'currency_pairs': 1,
'currency_pairs/{currency_pair}': 1,
'cross/currencies': 1,
'cross/currencies/{currency}': 1,
'funding_book': 1,
},
},
'futures': {
'get': {
'{settle}/contracts': 1.5,
'{settle}/contracts/{contract}': 1.5,
'{settle}/order_book': 1.5,
'{settle}/trades': 1.5,
'{settle}/candlesticks': 1.5,
'{settle}/tickers': 1.5,
'{settle}/funding_rate': 1.5,
'{settle}/insurance': 1.5,
'{settle}/contract_stats': 1.5,
'{settle}/liq_orders': 1.5,
},
},
'delivery': {
'get': {
'{settle}/contracts': 1.5,
'{settle}/contracts/{contract}': 1.5,
'{settle}/order_book': 1.5,
'{settle}/trades': 1.5,
'{settle}/candlesticks': 1.5,
'{settle}/tickers': 1.5,
'{settle}/insurance': 1.5,
},
},
'options': {
'get': {
'underlyings': 1.5,
'expirations': 1.5,
'contracts': 1.5,
'contracts/{contract}': 1.5,
'settlements': 1.5,
'settlements/{contract}': 1.5,
'order_book': 1.5,
'tickers': 1.5,
'underlying/tickers/{underlying}': 1.5,
'candlesticks': 1.5,
'underlying/candlesticks': 1.5,
'trades': 1.5,
},
},
},
'private': {
'withdrawals': {
'post': {
'': 3000, # 3000 = 10 seconds
},
'delete': {
'{withdrawal_id}': 300,
},
},
'wallet': {
'get': {
'deposit_address': 300,
'withdrawals': 300,
'deposits': 300,
'sub_account_transfers': 300,
'withdraw_status': 300,
'sub_account_balances': 300,
'fee': 300,
'total_balance': 300,
},
'post': {
'transfers': 300,
'sub_account_transfers': 300,
},
},
'spot': {
'get': {
'accounts': 1,
'open_orders': 1,
'orders': 1,
'orders/{order_id}': 1,
'my_trades': 1,
'price_orders': 1,
'price_orders/{order_id}': 1,
},
'post': {
'batch_orders': 1,
'orders': 1,
'cancel_batch_orders': 1,
'price_orders': 1,
},
'delete': {
'orders': 1,
'orders/{order_id}': 1,
'price_orders': 1,
'price_orders/{order_id}': 1,
},
},
'margin': {
'get': {
'accounts': 1.5,
'account_book': 1.5,
'funding_accounts': 1.5,
'loans': 1.5,
'loans/{loan_id}': 1.5,
'loans/{loan_id}/repayment': 1.5,
'loan_records': 1.5,
'loan_records/{load_record_id}': 1.5,
'auto_repay': 1.5,
'transferable': 1.5,
'cross/accounts': 1.5,
'cross/account_book': 1.5,
'cross/loans': 1.5,
'cross/loans/{loan_id}': 1.5,
'cross/loans/repayments': 1.5,
'cross/transferable': 1.5,
'loan_records/{loan_record_id}': 1.5,
'borrowable': 1.5,
'cross/repayments': 1.5,
'cross/borrowable': 1.5,
},
'post': {
'loans': 1.5,
'merged_loans': 1.5,
'loans/{loan_id}/repayment': 1.5,
'auto_repay': 1.5,
'cross/loans': 1.5,
'cross/loans/repayments': 1.5,
'cross/repayments': 1.5,
},
'patch': {
'loans/{loan_id}': 1.5,
'loan_records/{loan_record_id}': 1.5,
},
'delete': {
'loans/{loan_id}': 1.5,
},
},
'futures': {
'get': {
'{settle}/accounts': 1.5,
'{settle}/account_book': 1.5,
'{settle}/positions': 1.5,
'{settle}/positions/{contract}': 1.5,
'{settle}/orders': 1.5,
'{settle}/orders/{order_id}': 1.5,
'{settle}/my_trades': 1.5,
'{settle}/position_close': 1.5,
'{settle}/liquidates': 1.5,
'{settle}/price_orders': 1.5,
'{settle}/price_orders/{order_id}': 1.5,
'{settle}/dual_comp/positions/{contract}': 1.5,
},
'post': {
'{settle}/positions/{contract}/margin': 1.5,
'{settle}/positions/{contract}/leverage': 1.5,
'{settle}/positions/{contract}/risk_limit': 1.5,
'{settle}/dual_mode': 1.5,
'{settle}/dual_comp/positions/{contract}': 1.5,
'{settle}/dual_comp/positions/{contract}/margin': 1.5,
'{settle}/dual_comp/positions/{contract}/leverage': 1.5,
'{settle}/dual_comp/positions/{contract}/risk_limit': 1.5,
'{settle}/orders': 1.5,
'{settle}/price_orders': 1.5,
},
'delete': {
'{settle}/orders': 1.5,
'{settle}/orders/{order_id}': 1.5,
'{settle}/price_orders': 1.5,
'{settle}/price_orders/{order_id}': 1.5,
},
},
'delivery': {
'get': {
'{settle}/accounts': 1.5,
'{settle}/account_book': 1.5,
'{settle}/positions': 1.5,
'{settle}/positions/{contract}': 1.5,
'{settle}/orders': 1.5,
'{settle}/orders/{order_id}': 1.5,
'{settle}/my_trades': 1.5,
'{settle}/position_close': 1.5,
'{settle}/liquidates': 1.5,
'{settle}/price_orders': 1.5,
'{settle}/price_orders/{order_id}': 1.5,
'{settle}/settlements': 1.5,
},
'post': {
'{settle}/positions/{contract}/margin': 1.5,
'{settle}/positions/{contract}/leverage': 1.5,
'{settle}/positions/{contract}/risk_limit': 1.5,
'{settle}/orders': 1.5,
'{settle}/price_orders': 1.5,
},
'delete': {
'{settle}/orders': 1.5,
'{settle}/orders/{order_id}': 1.5,
'{settle}/price_orders': 1.5,
'{settle}/price_orders/{order_id}': 1.5,
},
},
'options': {
'get': {
'accounts': 1.5,
'account_book': 1.5,
'positions': 1.5,
'positions/{contract}': 1.5,
'position_close': 1.5,
'orders': 1.5,
'orders/{order_id}': 1.5,
'my_trades': 1.5,
},
'post': {
'orders': 1.5,
},
'delete': {
'orders': 1.5,
'orders/{order_id}': 1.5,
},
},
},
},
'timeframes': {
'10s': '10s',
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'4h': '4h',
'8h': '8h',
'1d': '1d',
'7d': '7d',
'1w': '7d',
},
# copied from gateiov2
'commonCurrencies': {
'88MPH': 'MPH',
'AXIS': 'Axis DeFi',
'BIFI': 'Bitcoin File',
'BOX': 'DefiBox',
'BTCBEAR': 'BEAR',
'BTCBULL': 'BULL',
'BYN': 'BeyondFi',
'EGG': 'Goose Finance',
'GTC': 'Game.com', # conflict with Gitcoin and Gastrocoin
'GTC_HT': 'Game.com HT',
'GTC_BSC': 'Game.com BSC',
'HIT': 'HitChain',
'MM': 'Million', # conflict with MilliMeter
'MPH': 'Morpher', # conflict with 88MPH
'RAI': 'Rai Reflex Index', # conflict with RAI Finance
'SBTC': 'Super Bitcoin',
'TNC': 'Trinity Network Credit',
'TON': 'TONToken',
'VAI': 'VAIOT',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'headers': {
'X-Gate-Channel-Id': 'ccxt',
},
'options': {
'createOrder': {
'expiration': 86400, # for conditional orders
},
'networks': {
'TRC20': 'TRX',
'ERC20': 'ETH',
'BEP20': 'BSC',
},
'accountsByType': {
'funding': 'spot',
'spot': 'spot',
'margin': 'margin',
'cross_margin': 'cross_margin',
'cross': 'cross_margin',
'isolated': 'margin',
'swap': 'futures',
'future': 'delivery',
'futures': 'futures',
'delivery': 'delivery',
},
'defaultType': 'spot',
'swap': {
'fetchMarkets': {
'settlementCurrencies': ['usdt', 'btc'],
},
},
'future': {
'fetchMarkets': {
'settlementCurrencies': ['usdt', 'btc'],
},
},
},
'precisionMode': TICK_SIZE,
'fees': {
'trading': {
'tierBased': True,
'feeSide': 'get',
'percentage': True,
'maker': self.parse_number('0.002'),
'taker': self.parse_number('0.002'),
'tiers': {
# volume is in BTC
'maker': [
[self.parse_number('0'), self.parse_number('0.002')],
[self.parse_number('1.5'), self.parse_number('0.00185')],
[self.parse_number('3'), self.parse_number('0.00175')],
[self.parse_number('6'), self.parse_number('0.00165')],
[self.parse_number('12.5'), self.parse_number('0.00155')],
[self.parse_number('25'), self.parse_number('0.00145')],
[self.parse_number('75'), self.parse_number('0.00135')],
[self.parse_number('200'), self.parse_number('0.00125')],
[self.parse_number('500'), self.parse_number('0.00115')],
[self.parse_number('1250'), self.parse_number('0.00105')],
[self.parse_number('2500'), self.parse_number('0.00095')],
[self.parse_number('3000'), self.parse_number('0.00085')],
[self.parse_number('6000'), self.parse_number('0.00075')],
[self.parse_number('11000'), self.parse_number('0.00065')],
[self.parse_number('20000'), self.parse_number('0.00055')],
[self.parse_number('40000'), self.parse_number('0.00055')],
[self.parse_number('75000'), self.parse_number('0.00055')],
],
'taker': [
[self.parse_number('0'), self.parse_number('0.002')],
[self.parse_number('1.5'), self.parse_number('0.00195')],
[self.parse_number('3'), self.parse_number('0.00185')],
[self.parse_number('6'), self.parse_number('0.00175')],
[self.parse_number('12.5'), self.parse_number('0.00165')],
[self.parse_number('25'), self.parse_number('0.00155')],
[self.parse_number('75'), self.parse_number('0.00145')],
[self.parse_number('200'), self.parse_number('0.00135')],
[self.parse_number('500'), self.parse_number('0.00125')],
[self.parse_number('1250'), self.parse_number('0.00115')],
[self.parse_number('2500'), self.parse_number('0.00105')],
[self.parse_number('3000'), self.parse_number('0.00095')],
[self.parse_number('6000'), self.parse_number('0.00085')],
[self.parse_number('11000'), self.parse_number('0.00075')],
[self.parse_number('20000'), self.parse_number('0.00065')],
[self.parse_number('40000'), self.parse_number('0.00065')],
[self.parse_number('75000'), self.parse_number('0.00065')],
],
},
},
'swap': {
'tierBased': True,
'feeSide': 'base',
'percentage': True,
'maker': self.parse_number('0.0'),
'taker': self.parse_number('0.0005'),
'tiers': {
'maker': [
[self.parse_number('0'), self.parse_number('0.0000')],
[self.parse_number('1.5'), self.parse_number('-0.00005')],
[self.parse_number('3'), self.parse_number('-0.00005')],
[self.parse_number('6'), self.parse_number('-0.00005')],
[self.parse_number('12.5'), self.parse_number('-0.00005')],
[self.parse_number('25'), self.parse_number('-0.00005')],
[self.parse_number('75'), self.parse_number('-0.00005')],
[self.parse_number('200'), self.parse_number('-0.00005')],
[self.parse_number('500'), self.parse_number('-0.00005')],
[self.parse_number('1250'), self.parse_number('-0.00005')],
[self.parse_number('2500'), self.parse_number('-0.00005')],
[self.parse_number('3000'), self.parse_number('-0.00008')],
[self.parse_number('6000'), self.parse_number('-0.01000')],
[self.parse_number('11000'), self.parse_number('-0.01002')],
[self.parse_number('20000'), self.parse_number('-0.01005')],
[self.parse_number('40000'), self.parse_number('-0.02000')],
[self.parse_number('75000'), self.parse_number('-0.02005')],
],
'taker': [
[self.parse_number('0'), self.parse_number('0.00050')],
[self.parse_number('1.5'), self.parse_number('0.00048')],
[self.parse_number('3'), self.parse_number('0.00046')],
[self.parse_number('6'), self.parse_number('0.00044')],
[self.parse_number('12.5'), self.parse_number('0.00042')],
[self.parse_number('25'), self.parse_number('0.00040')],
[self.parse_number('75'), self.parse_number('0.00038')],
[self.parse_number('200'), self.parse_number('0.00036')],
[self.parse_number('500'), self.parse_number('0.00034')],
[self.parse_number('1250'), self.parse_number('0.00032')],
[self.parse_number('2500'), self.parse_number('0.00030')],
[self.parse_number('3000'), self.parse_number('0.00030')],
[self.parse_number('6000'), self.parse_number('0.00030')],
[self.parse_number('11000'), self.parse_number('0.00030')],
[self.parse_number('20000'), self.parse_number('0.00030')],
[self.parse_number('40000'), self.parse_number('0.00030')],
[self.parse_number('75000'), self.parse_number('0.00030')],
],
},
},
},
# https://www.gate.io/docs/apiv4/en/index.html#label-list
'exceptions': {
'exact': {
'INVALID_PARAM_VALUE': BadRequest,
'INVALID_PROTOCOL': BadRequest,
'INVALID_ARGUMENT': BadRequest,
'INVALID_REQUEST_BODY': BadRequest,
'MISSING_REQUIRED_PARAM': ArgumentsRequired,
'BAD_REQUEST': BadRequest,
'INVALID_CONTENT_TYPE': BadRequest,
'NOT_ACCEPTABLE': BadRequest,
'METHOD_NOT_ALLOWED': BadRequest,
'NOT_FOUND': ExchangeError,
'INVALID_CREDENTIALS': AuthenticationError,
'INVALID_KEY': AuthenticationError,
'IP_FORBIDDEN': AuthenticationError,
'READ_ONLY': PermissionDenied,
'INVALID_SIGNATURE': AuthenticationError,
'MISSING_REQUIRED_HEADER': AuthenticationError,
'REQUEST_EXPIRED': AuthenticationError,
'ACCOUNT_LOCKED': AccountSuspended,
'FORBIDDEN': PermissionDenied,
'SUB_ACCOUNT_NOT_FOUND': ExchangeError,
'SUB_ACCOUNT_LOCKED': AccountSuspended,
'MARGIN_BALANCE_EXCEPTION': ExchangeError,
'MARGIN_TRANSFER_FAILED': ExchangeError,
'TOO_MUCH_FUTURES_AVAILABLE': ExchangeError,
'FUTURES_BALANCE_NOT_ENOUGH': InsufficientFunds,
'ACCOUNT_EXCEPTION': ExchangeError,
'SUB_ACCOUNT_TRANSFER_FAILED': ExchangeError,
'ADDRESS_NOT_USED': ExchangeError,
'TOO_FAST': RateLimitExceeded,
'WITHDRAWAL_OVER_LIMIT': ExchangeError,
'API_WITHDRAW_DISABLED': ExchangeNotAvailable,
'INVALID_WITHDRAW_ID': ExchangeError,
'INVALID_WITHDRAW_CANCEL_STATUS': ExchangeError,
'INVALID_PRECISION': InvalidOrder,
'INVALID_CURRENCY': BadSymbol,
'INVALID_CURRENCY_PAIR': BadSymbol,
'POC_FILL_IMMEDIATELY': ExchangeError,
'ORDER_NOT_FOUND': OrderNotFound,
'CLIENT_ID_NOT_FOUND': OrderNotFound,
'ORDER_CLOSED': InvalidOrder,
'ORDER_CANCELLED': InvalidOrder,
'QUANTITY_NOT_ENOUGH': InvalidOrder,
'BALANCE_NOT_ENOUGH': InsufficientFunds,
'MARGIN_NOT_SUPPORTED': InvalidOrder,
'MARGIN_BALANCE_NOT_ENOUGH': InsufficientFunds,
'AMOUNT_TOO_LITTLE': InvalidOrder,
'AMOUNT_TOO_MUCH': InvalidOrder,
'REPEATED_CREATION': InvalidOrder,
'LOAN_NOT_FOUND': OrderNotFound,
'LOAN_RECORD_NOT_FOUND': OrderNotFound,
'NO_MATCHED_LOAN': ExchangeError,
'NOT_MERGEABLE': ExchangeError,
'NO_CHANGE': ExchangeError,
'REPAY_TOO_MUCH': ExchangeError,
'TOO_MANY_CURRENCY_PAIRS': InvalidOrder,
'TOO_MANY_ORDERS': InvalidOrder,
'MIXED_ACCOUNT_TYPE': InvalidOrder,
'AUTO_BORROW_TOO_MUCH': ExchangeError,
'TRADE_RESTRICTED': InsufficientFunds,
'USER_NOT_FOUND': AccountNotEnabled,
'CONTRACT_NO_COUNTER': ExchangeError,
'CONTRACT_NOT_FOUND': BadSymbol,
'RISK_LIMIT_EXCEEDED': ExchangeError,
'INSUFFICIENT_AVAILABLE': InsufficientFunds,
'LIQUIDATE_IMMEDIATELY': InvalidOrder,
'LEVERAGE_TOO_HIGH': InvalidOrder,
'LEVERAGE_TOO_LOW': InvalidOrder,
'ORDER_NOT_OWNED': ExchangeError,
'ORDER_FINISHED': ExchangeError,
'POSITION_CROSS_MARGIN': ExchangeError,
'POSITION_IN_LIQUIDATION': ExchangeError,
'POSITION_IN_CLOSE': ExchangeError,
'POSITION_EMPTY': InvalidOrder,
'REMOVE_TOO_MUCH': ExchangeError,
'RISK_LIMIT_NOT_MULTIPLE': ExchangeError,
'RISK_LIMIT_TOO_HIGH': ExchangeError,
'RISK_LIMIT_TOO_lOW': ExchangeError,
'PRICE_TOO_DEVIATED': InvalidOrder,
'SIZE_TOO_LARGE': InvalidOrder,
'SIZE_TOO_SMALL': InvalidOrder,
'PRICE_OVER_LIQUIDATION': InvalidOrder,
'PRICE_OVER_BANKRUPT': InvalidOrder,
'ORDER_POC_IMMEDIATE': InvalidOrder,
'INCREASE_POSITION': InvalidOrder,
'CONTRACT_IN_DELISTING': ExchangeError,
'INTERNAL': ExchangeNotAvailable,
'SERVER_ERROR': ExchangeNotAvailable,
'TOO_BUSY': ExchangeNotAvailable,
'CROSS_ACCOUNT_NOT_FOUND': ExchangeError,
},
},
'broad': {},
})
async def fetch_markets(self, params={}):
result = []
type, query = self.handle_market_type_and_params('fetchMarkets', None, params)
if type == 'spot' or type == 'margin':
result = await self.fetch_spot_markets(query)
if type == 'swap' or type == 'future':
result = await self.fetch_contract_markets(query) # futures and swaps
if type == 'option':
result = await self.fetch_option_markets(query)
resultLength = len(result)
if resultLength == 0:
raise ExchangeError(self.id + " does not support '" + type + "' type, set exchange.options['defaultType'] to " + "'spot', 'margin', 'swap', 'future' or 'option'") # eslint-disable-line quotes
return result
async def fetch_spot_markets(self, params):
marginResponse = await self.publicMarginGetCurrencyPairs(params)
spotMarketsResponse = await self.publicSpotGetCurrencyPairs(params)
marginMarkets = self.index_by(marginResponse, 'id')
#
# Spot
#
# [
# {
# "id": "QTUM_ETH",
# "base": "QTUM",
# "quote": "ETH",
# "fee": "0.2",
# "min_base_amount": "0.01",
# "min_quote_amount": "0.001",
# "amount_precision": 3,
# "precision": 6,
# "trade_status": "tradable",
# "sell_start": 0,
# "buy_start": 0
# }
# ]
#
# Margin
#
# [
# {
# "id": "ETH_USDT",
# "base": "ETH",
# "quote": "USDT",
# "leverage": 3,
# "min_base_amount": "0.01",
# "min_quote_amount": "100",
# "max_quote_amount": "1000000"
# }
# ]
#
result = []
for i in range(0, len(spotMarketsResponse)):
spotMarket = spotMarketsResponse[i]
id = self.safe_string(spotMarket, 'id')
marginMarket = self.safe_value(marginMarkets, id)
market = self.deep_extend(marginMarket, spotMarket)
baseId, quoteId = id.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
takerPercent = self.safe_string(market, 'fee')
makerPercent = self.safe_string(market, 'maker_fee_rate', takerPercent)
amountPrecisionString = self.safe_string(market, 'amount_precision')
pricePrecisionString = self.safe_string(market, 'precision')
tradeStatus = self.safe_string(market, 'trade_status')
leverage = self.safe_number(market, 'leverage')
defaultMinAmountLimit = self.parse_number(self.parse_precision(amountPrecisionString))
margin = leverage is not None
result.append({
'id': id,
'symbol': base + '/' + quote,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': margin,
'swap': False,
'future': False,
'option': False,
'active': (tradeStatus == 'tradable'),
'contract': False,
'linear': None,
'inverse': None,
# Fee is in %, so divide by 100
'taker': self.parse_number(Precise.string_div(takerPercent, '100')),
'maker': self.parse_number(Precise.string_div(makerPercent, '100')),
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.parse_number(self.parse_precision(amountPrecisionString)),
'price': self.parse_number(self.parse_precision(pricePrecisionString)),
},
'limits': {
'leverage': {
'min': self.parse_number('1'),
'max': self.safe_number(market, 'leverage', 1),
},
'amount': {
'min': self.safe_number(spotMarket, 'min_base_amount', defaultMinAmountLimit),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': self.safe_number(market, 'min_quote_amount'),
'max': self.safe_number(market, 'max_quote_amount'),
},
},
'info': market,
})
return result
async def fetch_contract_markets(self, params):
result = []
swapSettlementCurrencies = self.get_settlement_currencies('swap', 'fetchMarkets')
futureSettlementCurrencies = self.get_settlement_currencies('future', 'fetchMarkets')
for c in range(0, len(swapSettlementCurrencies)):
settleId = swapSettlementCurrencies[c]
query = params
query['settle'] = settleId
response = await self.publicFuturesGetSettleContracts(query)
for i in range(0, len(response)):
parsedMarket = self.parse_contract_market(response[i], settleId)
result.append(parsedMarket)
for c in range(0, len(futureSettlementCurrencies)):
settleId = futureSettlementCurrencies[c]
query = params
query['settle'] = settleId
response = await self.publicDeliveryGetSettleContracts(query)
for i in range(0, len(response)):
parsedMarket = self.parse_contract_market(response[i], settleId)
result.append(parsedMarket)
return result
def parse_contract_market(self, market, settleId):
#
# Perpetual swap
#
# {
# "name": "BTC_USDT",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "ref_discount_rate": "0",
# "order_price_deviate": "0.5",
# "maintenance_rate": "0.005",
# "mark_type": "index",
# "last_price": "38026",
# "mark_price": "37985.6",
# "index_price": "37954.92",
# "funding_rate_indicative": "0.000219",
# "mark_price_round": "0.01",
# "funding_offset": 0,
# "in_delisting": False,
# "risk_limit_base": "1000000",
# "interest_rate": "0.0003",
# "order_price_round": "0.1",
# "order_size_min": 1,
# "ref_rebate_rate": "0.2",
# "funding_interval": 28800,
# "risk_limit_step": "1000000",
# "leverage_min": "1",
# "leverage_max": "100",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "funding_rate": "0.002053",
# "order_size_max": 1000000,
# "funding_next_apply": 1610035200,
# "short_users": 977,
# "config_change_time": 1609899548,
# "trade_size": 28530850594,
# "position_size": 5223816,
# "long_users": 455,
# "funding_impact_value": "60000",
# "orders_limit": 50,
# "trade_id": 10851092,
# "orderbook_id": 2129638396
# }
#
# Delivery Futures
#
# {
# "name": "BTC_USDT_20200814",
# "underlying": "BTC_USDT",
# "cycle": "WEEKLY",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "mark_type": "index",
# "last_price": "9017",
# "mark_price": "9019",
# "index_price": "9005.3",
# "basis_rate": "0.185095",
# "basis_value": "13.7",
# "basis_impact_value": "100000",
# "settle_price": "0",
# "settle_price_interval": 60,
# "settle_price_duration": 1800,
# "settle_fee_rate": "0.0015",
# "expire_time": 1593763200,
# "order_price_round": "0.1",
# "mark_price_round": "0.1",
# "leverage_min": "1",
# "leverage_max": "100",
# "maintenance_rate": "1000000",
# "risk_limit_base": "140.726652109199",
# "risk_limit_step": "1000000",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "ref_discount_rate": "0",
# "ref_rebate_rate": "0.2",
# "order_price_deviate": "0.5",
# "order_size_min": 1,
# "order_size_max": 1000000,
# "orders_limit": 50,
# "orderbook_id": 63,
# "trade_id": 26,
# "trade_size": 435,
# "position_size": 130,
# "config_change_time": 1593158867,
# "in_delisting": False
# }
#
id = self.safe_string(market, 'name')
parts = id.split('_')
baseId = self.safe_string(parts, 0)
quoteId = self.safe_string(parts, 1)
date = self.safe_string(parts, 2)
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
settle = self.safe_currency_code(settleId)
expiry = self.safe_timestamp(market, 'expire_time')
symbol = ''
marketType = 'swap'
if date is not None:
symbol = base + '/' + quote + ':' + settle + '-' + self.yymmdd(expiry, '')
marketType = 'future'
else:
symbol = base + '/' + quote + ':' + settle
priceDeviate = self.safe_string(market, 'order_price_deviate')
markPrice = self.safe_string(market, 'mark_price')
minMultiplier = Precise.string_sub('1', priceDeviate)
maxMultiplier = Precise.string_add('1', priceDeviate)
minPrice = Precise.string_mul(minMultiplier, markPrice)
maxPrice = Precise.string_mul(maxMultiplier, markPrice)
takerPercent = self.safe_string(market, 'taker_fee_rate')
makerPercent = self.safe_string(market, 'maker_fee_rate', takerPercent)
isLinear = quote == settle
return {
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': settle,
'baseId': baseId,
'quoteId': quoteId,
'settleId': settleId,
'type': marketType,
'spot': False,
'margin': False,
'swap': marketType == 'swap',
'future': marketType == 'future',
'option': marketType == 'option',
'active': True,
'contract': True,
'linear': isLinear,
'inverse': not isLinear,
'taker': self.parse_number(Precise.string_div(takerPercent, '100')), # Fee is in %, so divide by 100
'maker': self.parse_number(Precise.string_div(makerPercent, '100')),
'contractSize': self.safe_number(market, 'quanto_multiplier'),
'expiry': expiry,
'expiryDatetime': self.iso8601(expiry),
'strike': None,
'optionType': None,
'precision': {
'amount': self.parse_number('1'),
'price': self.safe_number(market, 'order_price_round'),
},
'limits': {
'leverage': {
'min': self.safe_number(market, 'leverage_min'),
'max': self.safe_number(market, 'leverage_max'),
},
'amount': {
'min': self.safe_number(market, 'order_size_min'),
'max': self.safe_number(market, 'order_size_max'),
},
'price': {
'min': self.parse_number(minPrice),
'max': self.parse_number(maxPrice),
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
}
async def fetch_option_markets(self, params={}):
result = []
underlyings = await self.fetch_option_underlyings()
for i in range(0, len(underlyings)):
underlying = underlyings[i]
query = params
query['underlying'] = underlying
response = await self.publicOptionsGetContracts(query)
#
# [
# {
# "orders_limit": "50",
# "order_size_max": "100000",
# "mark_price_round": "0.1",
# "order_size_min": "1",
# "position_limit": "1000000",
# "orderbook_id": "575967",
# "order_price_deviate": "0.9",
# "is_call": True, # True means Call False means Put
# "last_price": "93.9",
# "bid1_size": "0",
# "bid1_price": "0",
# "taker_fee_rate": "0.0004",
# "underlying": "BTC_USDT",
# "create_time": "1646381188",
# "price_limit_fee_rate": "0.1",
# "maker_fee_rate": "0.0004",
# "trade_id": "727",
# "order_price_round": "0.1",
# "settle_fee_rate": "0.0001",
# "trade_size": "1982",
# "ref_rebate_rate": "0",
# "name": "BTC_USDT-20220311-44000-C",
# "underlying_price": "39194.26",
# "strike_price": "44000",
# "multiplier": "0.0001",
# "ask1_price": "0",
# "ref_discount_rate": "0",
# "expiration_time": "1646985600",
# "mark_price": "12.15",
# "position_size": "4",
# "ask1_size": "0",
# "tag": "WEEK"
# }
# ]
#
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'name')
parts = underlying.split('_')
baseId = self.safe_string(parts, 0)
quoteId = self.safe_string(parts, 1)
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
expiry = self.safe_timestamp(market, 'expiration_time')
strike = self.safe_string(market, 'strike_price')
isCall = self.safe_value(market, 'is_call')
optionLetter = 'C' if isCall else 'P'
optionType = 'call' if isCall else 'put'
symbol = symbol + ':' + quote + '-' + self.yymmdd(expiry) + ':' + strike + ':' + optionLetter
priceDeviate = self.safe_string(market, 'order_price_deviate')
markPrice = self.safe_string(market, 'mark_price')
minMultiplier = Precise.string_sub('1', priceDeviate)
maxMultiplier = Precise.string_add('1', priceDeviate)
minPrice = Precise.string_mul(minMultiplier, markPrice)
maxPrice = Precise.string_mul(maxMultiplier, markPrice)
takerPercent = self.safe_string(market, 'taker_fee_rate')
makerPercent = self.safe_string(market, 'maker_fee_rate', takerPercent)
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': quote,
'baseId': baseId,
'quoteId': quoteId,
'settleId': quoteId,
'type': 'option',
'spot': False,
'margin': False,
'swap': False,
'future': False,
'option': True,
'active': True,
'contract': True,
'linear': True,
'inverse': False,
'taker': self.parse_number(Precise.string_div(takerPercent, '100')), # Fee is in %, so divide by 100
'maker': self.parse_number(Precise.string_div(makerPercent, '100')),
'contractSize': self.parse_number('1'),
'expiry': expiry,
'expiryDatetime': self.iso8601(expiry),
'strike': strike,
'optionType': optionType,
'precision': {
'amount': self.parse_number('1'),
'price': self.safe_number(market, 'order_price_round'),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number(market, 'order_size_min'),
'max': self.safe_number(market, 'order_size_max'),
},
'price': {
'min': self.parse_number(minPrice),
'max': self.parse_number(maxPrice),
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
async def fetch_option_underlyings(self):
underlyingsResponse = await self.publicOptionsGetUnderlyings()
#
# [
# {
# "index_time": "1646915796",
# "name": "BTC_USDT",
# "index_price": "39142.73"
# }
# ]
#
underlyings = []
for i in range(0, len(underlyingsResponse)):
underlying = underlyingsResponse[i]
name = self.safe_string(underlying, 'name')
if name is not None:
underlyings.append(name)
return underlyings
def prepare_request(self, market=None, type=None, params={}):
"""
* @ignore
Fills request params contract, settle, currency_pair, market and account where applicable
:param dict market: CCXT market, required when type is None
:param str type: 'spot', 'swap', or 'future', required when market is None
:param dict params: request parameters
:returns: the api request object, and the new params object with non-needed parameters removed
"""
# * Do not call for multi spot order methods like cancelAllOrders and fetchOpenOrders. Use multiOrderSpotPrepareRequest instead
request = {}
if market is not None:
if market['contract']:
request['contract'] = market['id']
request['settle'] = market['settleId']
else:
request['currency_pair'] = market['id']
else:
swap = type == 'swap'
future = type == 'future'
if swap or future:
defaultSettle = 'usdt' if swap else 'btc'
settle = self.safe_string_lower(params, 'settle', defaultSettle)
params = self.omit(params, 'settle')
request['settle'] = settle
return [request, params]
def spot_order_prepare_request(self, market=None, stop=False, params={}):
"""
* @ignore
Fills request params currency_pair, market and account where applicable for spot order methods like fetchOpenOrders, cancelAllOrders
:param dict market: CCXT market
:param bool stop: True if for a stop order
:param dict params: request parameters
:returns: the api request object, and the new params object with non-needed parameters removed
"""
marginMode, query = self.get_margin_mode(stop, params)
request = {}
if not stop:
if market is None:
raise ArgumentsRequired(self.id + ' spotOrderPrepareRequest() requires a market argument for non-stop orders')
request['account'] = marginMode
request['currency_pair'] = market['id'] # Should always be set for non-stop
return [request, query]
def multi_order_spot_prepare_request(self, market=None, stop=False, params={}):
"""
* @ignore
Fills request params currency_pair, market and account where applicable for spot order methods like fetchOpenOrders, cancelAllOrders
:param dict market: CCXT market
:param bool stop: True if for a stop order
:param dict params: request parameters
:returns: the api request object, and the new params object with non-needed parameters removed
"""
marginMode, query = self.get_margin_mode(stop, params)
request = {
'account': marginMode,
}
if market is not None:
if stop:
# gateio spot and margin stop orders use the term market instead of currency_pair, and normal instead of spot. Neither parameter is used when fetching/cancelling a single order. They are used for creating a single stop order, but createOrder does not call self method
request['market'] = market['id']
else:
request['currency_pair'] = market['id']
return [request, query]
def get_margin_mode(self, stop, params):
"""
* @ignore
Gets the margin type for self api call
:param bool stop: True if for a stop order
:param dict params: Request params
:returns: The marginMode and the updated request params with marginMode removed, marginMode value is the value that can be read by the "account" property specified in gateios api docs
"""
defaultMarginMode = self.safe_string_lower_2(self.options, 'defaultMarginMode', 'marginMode', 'spot') # 'margin' is isolated margin on gateio's api
marginMode = self.safe_string_lower_2(params, 'marginMode', 'account', defaultMarginMode)
params = self.omit(params, ['marginMode', 'account'])
if marginMode == 'cross':
marginMode = 'cross_margin'
elif marginMode == 'isolated':
marginMode = 'margin'
elif marginMode == '':
marginMode = 'spot'
if stop:
if marginMode == 'spot':
# gateio spot stop orders use the term normal instead of spot
marginMode = 'normal'
if marginMode == 'cross_margin':
raise BadRequest(self.id + ' getMarginMode() does not support stop orders for cross margin')
return [marginMode, params]
def get_settlement_currencies(self, type, method):
options = self.safe_value(self.options, type, {}) # ['BTC', 'USDT'] unified codes
fetchMarketsContractOptions = self.safe_value(options, method, {})
defaultSettle = ['usdt'] if (type == 'swap') else ['btc']
return self.safe_value(fetchMarketsContractOptions, 'settlementCurrencies', defaultSettle)
async def fetch_currencies(self, params={}):
# sandbox/testnet only supports future markets
apiBackup = self.safe_value(self.urls, 'apiBackup')
if apiBackup is not None:
return None
response = await self.publicSpotGetCurrencies(params)
#
# {
# "currency": "BCN",
# "delisted": False,
# "withdraw_disabled": True,
# "withdraw_delayed": False,
# "deposit_disabled": True,
# "trade_disabled": False
# }
#
result = {}
# TODO: remove magic constants
amountPrecision = self.parse_number('1e-6')
for i in range(0, len(response)):
entry = response[i]
currencyId = self.safe_string(entry, 'currency')
currencyIdLower = self.safe_string_lower(entry, 'currency')
code = self.safe_currency_code(currencyId)
delisted = self.safe_value(entry, 'delisted')
withdrawDisabled = self.safe_value(entry, 'withdraw_disabled', False)
depositDisabled = self.safe_value(entry, 'deposit_disabled', False)
tradeDisabled = self.safe_value(entry, 'trade_disabled', False)
withdrawEnabled = not withdrawDisabled
depositEnabled = not depositDisabled
tradeEnabled = not tradeDisabled
listed = not delisted
active = listed and tradeEnabled and withdrawEnabled and depositEnabled
result[code] = {
'id': currencyId,
'lowerCaseId': currencyIdLower,
'name': None,
'code': code,
'precision': amountPrecision,
'info': entry,
'active': active,
'deposit': depositEnabled,
'withdraw': withdrawEnabled,
'fee': None,
'fees': [],
'limits': self.limits,
}
return result
async def fetch_funding_rate(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
if not market['swap']:
raise BadSymbol(self.id + ' fetchFundingRate() supports swap contracts only')
request, query = self.prepare_request(market, None, params)
response = await self.publicFuturesGetSettleContractsContract(self.extend(request, query))
#
# [
# {
# "name": "BTC_USDT",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "ref_discount_rate": "0",
# "order_price_deviate": "0.5",
# "maintenance_rate": "0.005",
# "mark_type": "index",
# "last_price": "38026",
# "mark_price": "37985.6",
# "index_price": "37954.92",
# "funding_rate_indicative": "0.000219",
# "mark_price_round": "0.01",
# "funding_offset": 0,
# "in_delisting": False,
# "risk_limit_base": "1000000",
# "interest_rate": "0.0003",
# "order_price_round": "0.1",
# "order_size_min": 1,
# "ref_rebate_rate": "0.2",
# "funding_interval": 28800,
# "risk_limit_step": "1000000",
# "leverage_min": "1",
# "leverage_max": "100",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "funding_rate": "0.002053",
# "order_size_max": 1000000,
# "funding_next_apply": 1610035200,
# "short_users": 977,
# "config_change_time": 1609899548,
# "trade_size": 28530850594,
# "position_size": 5223816,
# "long_users": 455,
# "funding_impact_value": "60000",
# "orders_limit": 50,
# "trade_id": 10851092,
# "orderbook_id": 2129638396
# }
# ]
#
return self.parse_funding_rate(response)
async def fetch_funding_rates(self, symbols=None, params={}):
await self.load_markets()
request, query = self.prepare_request(None, 'swap', params)
response = await self.publicFuturesGetSettleContracts(self.extend(request, query))
#
# [
# {
# "name": "BTC_USDT",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "ref_discount_rate": "0",
# "order_price_deviate": "0.5",
# "maintenance_rate": "0.005",
# "mark_type": "index",
# "last_price": "38026",
# "mark_price": "37985.6",
# "index_price": "37954.92",
# "funding_rate_indicative": "0.000219",
# "mark_price_round": "0.01",
# "funding_offset": 0,
# "in_delisting": False,
# "risk_limit_base": "1000000",
# "interest_rate": "0.0003",
# "order_price_round": "0.1",
# "order_size_min": 1,
# "ref_rebate_rate": "0.2",
# "funding_interval": 28800,
# "risk_limit_step": "1000000",
# "leverage_min": "1",
# "leverage_max": "100",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "funding_rate": "0.002053",
# "order_size_max": 1000000,
# "funding_next_apply": 1610035200,
# "short_users": 977,
# "config_change_time": 1609899548,
# "trade_size": 28530850594,
# "position_size": 5223816,
# "long_users": 455,
# "funding_impact_value": "60000",
# "orders_limit": 50,
# "trade_id": 10851092,
# "orderbook_id": 2129638396
# }
# ]
#
result = self.parse_funding_rates(response)
return self.filter_by_array(result, 'symbol', symbols)
def parse_funding_rate(self, contract, market=None):
#
# {
# "name": "BTC_USDT",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "ref_discount_rate": "0",
# "order_price_deviate": "0.5",
# "maintenance_rate": "0.005",
# "mark_type": "index",
# "last_price": "38026",
# "mark_price": "37985.6",
# "index_price": "37954.92",
# "funding_rate_indicative": "0.000219",
# "mark_price_round": "0.01",
# "funding_offset": 0,
# "in_delisting": False,
# "risk_limit_base": "1000000",
# "interest_rate": "0.0003",
# "order_price_round": "0.1",
# "order_size_min": 1,
# "ref_rebate_rate": "0.2",
# "funding_interval": 28800,
# "risk_limit_step": "1000000",
# "leverage_min": "1",
# "leverage_max": "100",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "funding_rate": "0.002053",
# "order_size_max": 1000000,
# "funding_next_apply": 1610035200,
# "short_users": 977,
# "config_change_time": 1609899548,
# "trade_size": 28530850594,
# "position_size": 5223816,
# "long_users": 455,
# "funding_impact_value": "60000",
# "orders_limit": 50,
# "trade_id": 10851092,
# "orderbook_id": 2129638396
# }
#
marketId = self.safe_string(contract, 'name')
symbol = self.safe_symbol(marketId, market)
markPrice = self.safe_number(contract, 'mark_price')
indexPrice = self.safe_number(contract, 'index_price')
interestRate = self.safe_number(contract, 'interest_rate')
fundingRate = self.safe_number(contract, 'funding_rate')
fundingTime = self.safe_integer(contract, 'funding_next_apply') * 1000
fundingRateIndicative = self.safe_number(contract, 'funding_rate_indicative')
return {
'info': contract,
'symbol': symbol,
'markPrice': markPrice,
'indexPrice': indexPrice,
'interestRate': interestRate,
'estimatedSettlePrice': None,
'timestamp': None,
'datetime': None,
'fundingRate': fundingRate,
'fundingTimestamp': fundingTime,
'fundingDatetime': self.iso8601(fundingTime),
'nextFundingRate': fundingRateIndicative,
'nextFundingTimestamp': None,
'nextFundingDatetime': None,
'previousFundingRate': None,
'previousFundingTimestamp': None,
'previousFundingDatetime': None,
}
async def fetch_network_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = await self.privateWalletGetDepositAddress(self.extend(request, params))
addresses = self.safe_value(response, 'multichain_addresses')
currencyId = self.safe_string(response, 'currency')
code = self.safe_currency_code(currencyId)
result = {}
for i in range(0, len(addresses)):
entry = addresses[i]
#
# {
# "chain": "ETH",
# "address": "0x359a697945E79C7e17b634675BD73B33324E9408",
# "payment_id": "",
# "payment_name": "",
# "obtain_failed": "0"
# }
#
obtainFailed = self.safe_integer(entry, 'obtain_failed')
if obtainFailed:
continue
network = self.safe_string(entry, 'chain')
address = self.safe_string(entry, 'address')
tag = self.safe_string(entry, 'payment_id')
tagLength = len(tag)
tag = tag if tagLength else None
result[network] = {
'info': entry,
'code': code,
'address': address,
'tag': tag,
}
return result
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
}
response = await self.privateWalletGetDepositAddress(self.extend(request, params))
#
# {
# "currency": "XRP",
# "address": "rHcFoo6a9qT5NHiVn1THQRhsEGcxtYCV4d 391331007",
# "multichain_addresses": [
# {
# "chain": "XRP",
# "address": "rHcFoo6a9qT5NHiVn1THQRhsEGcxtYCV4d",
# "payment_id": "391331007",
# "payment_name": "Tag",
# "obtain_failed": 0
# }
# ]
# }
#
currencyId = self.safe_string(response, 'currency')
code = self.safe_currency_code(currencyId)
addressField = self.safe_string(response, 'address')
tag = None
address = None
if addressField.find(' ') >= 0:
splitted = addressField.split(' ')
address = splitted[0]
tag = splitted[1]
else:
address = addressField
return {
'info': response,
'code': code,
'address': address,
'tag': tag,
'network': None,
}
async def fetch_trading_fee(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'currency_pair': market['id'],
}
response = await self.privateWalletGetFee(self.extend(request, params))
#
# {
# "user_id": 1486602,
# "taker_fee": "0.002",
# "maker_fee": "0.002",
# "gt_discount": True,
# "gt_taker_fee": "0.0015",
# "gt_maker_fee": "0.0015",
# "loan_fee": "0.18",
# "point_type": "0",
# "futures_taker_fee": "0.0005",
# "futures_maker_fee": "0"
# }
#
return self.parse_trading_fee(response, market)
async def fetch_trading_fees(self, params={}):
await self.load_markets()
response = await self.privateWalletGetFee(params)
#
# {
# "user_id": 1486602,
# "taker_fee": "0.002",
# "maker_fee": "0.002",
# "gt_discount": True,
# "gt_taker_fee": "0.0015",
# "gt_maker_fee": "0.0015",
# "loan_fee": "0.18",
# "point_type": "0",
# "futures_taker_fee": "0.0005",
# "futures_maker_fee": "0"
# }
#
return self.parse_trading_fees(response)
def parse_trading_fees(self, response):
result = {}
for i in range(0, len(self.symbols)):
symbol = self.symbols[i]
market = self.market(symbol)
result[symbol] = self.parse_trading_fee(response, market)
return result
def parse_trading_fee(self, info, market=None):
#
# {
# "user_id": 1486602,
# "taker_fee": "0.002",
# "maker_fee": "0.002",
# "gt_discount": True,
# "gt_taker_fee": "0.0015",
# "gt_maker_fee": "0.0015",
# "loan_fee": "0.18",
# "point_type": "0",
# "futures_taker_fee": "0.0005",
# "futures_maker_fee": "0"
# }
#
contract = self.safe_value(market, 'contract')
takerKey = 'futures_taker_fee' if contract else 'taker_fee'
makerKey = 'futures_maker_fee' if contract else 'maker_fee'
return {
'info': info,
'symbol': self.safe_string(market, 'symbol'),
'maker': self.safe_number(info, makerKey),
'taker': self.safe_number(info, takerKey),
}
async def fetch_transaction_fees(self, codes=None, params={}):
await self.load_markets()
response = await self.privateWalletGetWithdrawStatus(params)
#
# {
# "currency": "MTN",
# "name": "Medicalchain",
# "name_cn": "Medicalchain",
# "deposit": "0",
# "withdraw_percent": "0%",
# "withdraw_fix": "900",
# "withdraw_day_limit": "500000",
# "withdraw_day_limit_remain": "500000",
# "withdraw_amount_mini": "900.1",
# "withdraw_eachtime_limit": "90000000000",
# "withdraw_fix_on_chains": {
# "ETH": "900"
# }
# }
#
withdrawFees = {}
for i in range(0, len(response)):
entry = response[i]
currencyId = self.safe_string(entry, 'currency')
code = self.safe_currency_code(currencyId)
withdrawFees[code] = {}
withdrawFix = self.safe_value(entry, 'withdraw_fix_on_chains')
if withdrawFix is None:
withdrawFix = {}
withdrawFix[code] = self.safe_number(entry, 'withdraw_fix')
keys = list(withdrawFix.keys())
for i in range(0, len(keys)):
key = keys[i]
withdrawFees[code][key] = self.parse_number(withdrawFix[key])
return {
'info': response,
'withdraw': withdrawFees,
'deposit': {},
}
async def fetch_funding_history(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
# defaultType = 'future'
market = None
if symbol is not None:
market = self.market(symbol)
type, query = self.handle_market_type_and_params('fetchFundingHistory', market, params)
request, requestParams = self.prepare_request(market, type, query)
request['type'] = 'fund' # 'dnw' 'pnl' 'fee' 'refr' 'fund' 'point_dnw' 'point_fee' 'point_refr'
if since is not None:
request['from'] = since / 1000
if limit is not None:
request['limit'] = limit
method = self.get_supported_mapping(type, {
'swap': 'privateFuturesGetSettleAccountBook',
'future': 'privateDeliveryGetSettleAccountBook',
})
response = await getattr(self, method)(self.extend(request, requestParams))
#
# [
# {
# "time": 1646899200,
# "change": "-0.027722",
# "balance": "11.653120591841",
# "text": "XRP_USDT",
# "type": "fund"
# },
# ...
# ]
#
return self.parse_funding_histories(response, symbol, since, limit)
def parse_funding_histories(self, response, symbol, since, limit):
result = []
for i in range(0, len(response)):
entry = response[i]
funding = self.parse_funding_history(entry)
result.append(funding)
sorted = self.sort_by(result, 'timestamp')
return self.filter_by_symbol_since_limit(sorted, symbol, since, limit)
def parse_funding_history(self, info, market=None):
#
# {
# "time": 1646899200,
# "change": "-0.027722",
# "balance": "11.653120591841",
# "text": "XRP_USDT",
# "type": "fund"
# }
#
timestamp = self.safe_timestamp(info, 'time')
marketId = self.safe_string(info, 'text')
market = self.safe_market(marketId, market)
return {
'info': info,
'symbol': self.safe_string(market, 'symbol'),
'code': self.safe_string(market, 'settle'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'id': None,
'amount': self.safe_number(info, 'change'),
}
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
#
# request = {
# 'currency_pair': market['id'],
# 'interval': '0', # depth, 0 means no aggregation is applied, default to 0
# 'limit': limit, # maximum number of order depth data in asks or bids
# 'with_id': True, # return order book ID
# }
#
request, query = self.prepare_request(market, None, params)
method = self.get_supported_mapping(market['type'], {
'spot': 'publicSpotGetOrderBook',
'margin': 'publicSpotGetOrderBook',
'swap': 'publicFuturesGetSettleOrderBook',
'future': 'publicDeliveryGetSettleOrderBook',
})
if limit is not None:
request['limit'] = limit # default 10, max 100
request['with_id'] = True
response = await getattr(self, method)(self.extend(request, query))
#
# SPOT
#
# {
# "id": 6358770031
# "current": 1634345973275,
# "update": 1634345973271,
# "asks": [
# ["2.2241","12449.827"],
# ["2.2242","200"],
# ["2.2244","826.931"],
# ["2.2248","3876.107"],
# ["2.225","2377.252"],
# ["2.22509","439.484"],
# ["2.2251","1489.313"],
# ["2.2253","714.582"],
# ["2.2254","1349.784"],
# ["2.2256","234.701"]],
# "bids": [
# ["2.2236","32.465"],
# ["2.2232","243.983"],
# ["2.2231","32.207"],
# ["2.223","449.827"],
# ["2.2228","7.918"],
# ["2.2227","12703.482"],
# ["2.2226","143.033"],
# ["2.2225","143.027"],
# ["2.2224","1369.352"],
# ["2.2223","756.063"]
# ]
# }
#
# Perpetual Swap
#
# {
# "id": 6358770031
# "current": 1634350208.745,
# "asks": [
# {"s": 24909, "p": "61264.8"},
# {"s": 81, "p": "61266.6"},
# {"s": 2000, "p": "61267.6"},
# {"s": 490, "p": "61270.2"},
# {"s": 12, "p": "61270.4"},
# {"s": 11782, "p": "61273.2"},
# {"s": 14666, "p": "61273.3"},
# {"s": 22541, "p": "61273.4"},
# {"s": 33, "p": "61273.6"},
# {"s": 11980, "p": "61274.5"}
# ],
# "bids": [
# {"s": 41844, "p": "61264.7"},
# {"s": 13783, "p": "61263.3"},
# {"s": 1143, "p": "61259.8"},
# {"s": 81, "p": "61258.7"},
# {"s": 2471, "p": "61257.8"},
# {"s": 2471, "p": "61257.7"},
# {"s": 2471, "p": "61256.5"},
# {"s": 3, "p": "61254.2"},
# {"s": 114, "p": "61252.4"},
# {"s": 14372, "p": "61248.6"}
# ],
# "update": 1634350208.724
# }
#
timestamp = self.safe_integer(response, 'current')
if not market['spot']:
timestamp = timestamp * 1000
priceKey = 0 if market['spot'] else 'p'
amountKey = 1 if market['spot'] else 's'
nonce = self.safe_integer(response, 'id')
result = self.parse_order_book(response, symbol, timestamp, 'bids', 'asks', priceKey, amountKey)
result['nonce'] = nonce
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request, query = self.prepare_request(market, None, params)
method = self.get_supported_mapping(market['type'], {
'spot': 'publicSpotGetTickers',
'margin': 'publicSpotGetTickers',
'swap': 'publicFuturesGetSettleTickers',
'future': 'publicDeliveryGetSettleTickers',
})
response = await getattr(self, method)(self.extend(request, query))
ticker = self.safe_value(response, 0)
return self.parse_ticker(ticker, market)
def parse_ticker(self, ticker, market=None):
#
# SPOT
#
# {
# "currency_pair": "KFC_USDT",
# "last": "7.255",
# "lowest_ask": "7.298",
# "highest_bid": "7.218",
# "change_percentage": "-1.18",
# "base_volume": "1219.053687865",
# "quote_volume": "8807.40299875455",
# "high_24h": "7.262",
# "low_24h": "7.095"
# }
#
# LINEAR/DELIVERY
#
# {
# "contract": "BTC_USDT",
# "last": "6432",
# "low_24h": "6278",
# "high_24h": "6790",
# "change_percentage": "4.43",
# "total_size": "32323904",
# "volume_24h": "184040233284",
# "volume_24h_btc": "28613220",
# "volume_24h_usd": "184040233284",
# "volume_24h_base": "28613220",
# "volume_24h_quote": "184040233284",
# "volume_24h_settle": "28613220",
# "mark_price": "6534",
# "funding_rate": "0.0001",
# "funding_rate_indicative": "0.0001",
# "index_price": "6531"
# }
#
marketId = self.safe_string_2(ticker, 'currency_pair', 'contract')
symbol = self.safe_symbol(marketId, market)
last = self.safe_string(ticker, 'last')
ask = self.safe_string(ticker, 'lowest_ask')
bid = self.safe_string(ticker, 'highest_bid')
high = self.safe_string(ticker, 'high_24h')
low = self.safe_string(ticker, 'low_24h')
baseVolume = self.safe_string_2(ticker, 'base_volume', 'volume_24h_base')
quoteVolume = self.safe_string_2(ticker, 'quote_volume', 'volume_24h_quote')
percentage = self.safe_string(ticker, 'change_percentage')
return self.safe_ticker({
'symbol': symbol,
'timestamp': None,
'datetime': None,
'high': high,
'low': low,
'bid': bid,
'bidVolume': None,
'ask': ask,
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': percentage,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}, market, False)
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
type, query = self.handle_market_type_and_params('fetchTickers', None, params)
request, requestParams = self.prepare_request(None, type, query)
method = self.get_supported_mapping(type, {
'spot': 'publicSpotGetTickers',
'margin': 'publicSpotGetTickers',
'swap': 'publicFuturesGetSettleTickers',
'future': 'publicDeliveryGetSettleTickers',
})
response = await getattr(self, method)(self.extend(request, requestParams))
return self.parse_tickers(response, symbols)
def fetch_balance_helper(self, entry):
account = self.account()
account['used'] = self.safe_string_2(entry, 'freeze', 'locked')
account['free'] = self.safe_string(entry, 'available')
account['total'] = self.safe_string(entry, 'total')
return account
async def fetch_balance(self, params={}):
"""
:param dict params: exchange specific parameters
:param str params['type']: spot, margin, swap or future, if not provided self.options['defaultType'] is used
:param str params['settle']: 'btc' or 'usdt' - settle currency for perpetual swap and future - default="usdt" for swap and "btc" for future
:param str params['marginMode']: 'cross' or 'isolated' - marginMode for margin trading if not provided self.options['defaultMarginMode'] is used
:param str params['symbol']: margin only - unified ccxt symbol
"""
await self.load_markets()
symbol = self.safe_string(params, 'symbol')
params = self.omit(params, 'symbol')
type, query = self.handle_market_type_and_params('fetchBalance', None, params)
request, requestParams = self.prepare_request(None, type, query)
marginMode, requestQuery = self.get_margin_mode(False, requestParams)
if symbol is not None:
market = self.market(symbol)
request['currency_pair'] = market['id']
method = self.get_supported_mapping(type, {
'spot': self.get_supported_mapping(marginMode, {
'spot': 'privateSpotGetAccounts',
'margin': 'privateMarginGetAccounts',
'cross_margin': 'privateMarginGetCrossAccounts',
}),
'funding': 'privateMarginGetFundingAccounts',
'swap': 'privateFuturesGetSettleAccounts',
'future': 'privateDeliveryGetSettleAccounts',
})
response = await getattr(self, method)(self.extend(request, requestQuery))
contract = (type == 'swap' or type == 'future')
if contract:
response = [response]
#
# Spot / margin funding
#
# [
# {
# "currency": "DBC",
# "available": "0",
# "locked": "0"
# "lent": "0", # margin funding only
# "total_lent": "0" # margin funding only
# },
# ...
# ]
#
# Margin
#
# [
# {
# "currency_pair": "DOGE_USDT",
# "locked": False,
# "risk": "9999.99",
# "base": {
# "currency": "DOGE",
# "available": "0",
# "locked": "0",
# "borrowed": "0",
# "interest": "0"
# },
# "quote": {
# "currency": "USDT",
# "available": "0.73402",
# "locked": "0",
# "borrowed": "0",
# "interest": "0"
# }
# },
# ...
# ]
#
# Cross margin
#
# {
# "user_id": 10406147,
# "locked": False,
# "balances": {
# "USDT": {
# "available": "1",
# "freeze": "0",
# "borrowed": "0",
# "interest": "0"
# }
# },
# "total": "1",
# "borrowed": "0",
# "interest": "0",
# "risk": "9999.99"
# }
#
# Perpetual Swap
#
# {
# order_margin: "0",
# point: "0",
# bonus: "0",
# history: {
# dnw: "2.1321",
# pnl: "11.5351",
# refr: "0",
# point_fee: "0",
# fund: "-0.32340576684",
# bonus_dnw: "0",
# point_refr: "0",
# bonus_offset: "0",
# fee: "-0.20132775",
# point_dnw: "0",
# },
# unrealised_pnl: "13.315100000006",
# total: "12.51345151332",
# available: "0",
# in_dual_mode: False,
# currency: "USDT",
# position_margin: "12.51345151332",
# user: "6333333",
# }
#
# Delivery Future
#
# {
# order_margin: "0",
# point: "0",
# history: {
# dnw: "1",
# pnl: "0",
# refr: "0",
# point_fee: "0",
# point_dnw: "0",
# settle: "0",
# settle_fee: "0",
# point_refr: "0",
# fee: "0",
# },
# unrealised_pnl: "0",
# total: "1",
# available: "1",
# currency: "USDT",
# position_margin: "0",
# user: "6333333",
# }
#
result = {
'info': response,
}
crossMargin = marginMode == 'cross_margin'
margin = marginMode == 'margin'
data = response
if 'balances' in data: # True for cross_margin
flatBalances = []
balances = self.safe_value(data, 'balances', [])
# inject currency and create an artificial balance object
# so it can follow the existent flow
keys = list(balances.keys())
for i in range(0, len(keys)):
currencyId = keys[i]
content = balances[currencyId]
content['currency'] = currencyId
flatBalances.append(content)
data = flatBalances
for i in range(0, len(data)):
entry = data[i]
if margin and not crossMargin:
marketId = self.safe_string(entry, 'currency_pair')
symbol = self.safe_symbol(marketId, None, '_')
base = self.safe_value(entry, 'base', {})
quote = self.safe_value(entry, 'quote', {})
baseCode = self.safe_currency_code(self.safe_string(base, 'currency', {}))
quoteCode = self.safe_currency_code(self.safe_string(quote, 'currency', {}))
subResult = {}
subResult[baseCode] = self.fetch_balance_helper(base)
subResult[quoteCode] = self.fetch_balance_helper(quote)
result[symbol] = self.safe_balance(subResult)
else:
code = self.safe_currency_code(self.safe_string(entry, 'currency', {}))
result[code] = self.fetch_balance_helper(entry)
return result if (margin and not crossMargin) else self.safe_balance(result)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
price = self.safe_string(params, 'price')
request = {}
request, params = self.prepare_request(market, None, params)
request['interval'] = self.timeframes[timeframe]
method = 'publicSpotGetCandlesticks'
if market['contract']:
maxLimit = 1999
limit = maxLimit if (limit is None) else min(limit, maxLimit)
if market['future']:
method = 'publicDeliveryGetSettleCandlesticks'
elif market['swap']:
method = 'publicFuturesGetSettleCandlesticks'
isMark = (price == 'mark')
isIndex = (price == 'index')
if isMark or isIndex:
request['contract'] = price + '_' + market['id']
params = self.omit(params, 'price')
else:
maxLimit = 1000
limit = maxLimit if (limit is None) else min(limit, maxLimit)
request['limit'] = limit
if since is not None:
duration = self.parse_timeframe(timeframe)
request['from'] = int(since / 1000)
toTimestamp = self.sum(request['from'], limit * duration - 1)
currentTimestamp = self.seconds()
request['to'] = min(toTimestamp, currentTimestamp)
response = await getattr(self, method)(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def fetch_mark_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
request = {
'price': 'mark',
}
return await self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params))
async def fetch_funding_rate_history(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchFundingRateHistory() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
if not market['swap']:
raise BadSymbol(self.id + ' fetchFundingRateHistory() supports swap contracts only')
request, query = self.prepare_request(market, None, params)
if limit is not None:
request['limit'] = limit
method = 'publicFuturesGetSettleFundingRate'
response = await getattr(self, method)(self.extend(request, query))
#
# {
# "r": "0.00063521",
# "t": "1621267200000",
# }
#
rates = []
for i in range(0, len(response)):
entry = response[i]
timestamp = self.safe_timestamp(entry, 't')
rates.append({
'info': entry,
'symbol': symbol,
'fundingRate': self.safe_number(entry, 'r'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
sorted = self.sort_by(rates, 'timestamp')
return self.filter_by_symbol_since_limit(sorted, market['symbol'], since, limit)
async def fetch_index_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
request = {
'price': 'index',
}
return await self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params))
def parse_ohlcv(self, ohlcv, market=None):
#
# Spot market candles
#
# [
# "1626163200", # Unix timestamp in seconds
# "346711.933138181617", # Trading volume
# "33165.23", # Close price
# "33260", # Highest price
# "33117.6", # Lowest price
# "33184.47" # Open price
# ]
#
# Mark and Index price candles
#
# {
# "t":1632873600, # Unix timestamp in seconds
# "o": "41025", # Open price
# "h": "41882.17", # Highest price
# "c": "41776.92", # Close price
# "l": "40783.94" # Lowest price
# }
#
if isinstance(ohlcv, list):
return [
self.safe_timestamp(ohlcv, 0), # unix timestamp in seconds
self.safe_number(ohlcv, 5), # open price
self.safe_number(ohlcv, 3), # highest price
self.safe_number(ohlcv, 4), # lowest price
self.safe_number(ohlcv, 2), # close price
self.safe_number(ohlcv, 1), # trading volume
]
else:
# Mark and Index price candles
return [
self.safe_timestamp(ohlcv, 't'), # unix timestamp in seconds
self.safe_number(ohlcv, 'o'), # open price
self.safe_number(ohlcv, 'h'), # highest price
self.safe_number(ohlcv, 'l'), # lowest price
self.safe_number(ohlcv, 'c'), # close price
self.safe_number(ohlcv, 'v'), # trading volume, None for mark or index price
]
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
#
# spot
#
# request = {
# 'currency_pair': market['id'],
# 'limit': limit, # maximum number of records to be returned in a single list
# 'last_id': 'id', # specify list staring point using the id of last record in previous list-query results
# 'reverse': False, # True to retrieve records where id is smaller than the specified last_id, False to retrieve records where id is larger than the specified last_id
# }
#
# swap, future
#
# request = {
# 'settle': market['settleId'],
# 'contract': market['id'],
# 'limit': limit, # maximum number of records to be returned in a single list
# 'last_id': 'id', # specify list staring point using the id of last record in previous list-query results
# 'from': since / 1000), # starting time in seconds, if not specified, to and limit will be used to limit response items
# 'to': self.seconds(), # end time in seconds, default to current time
# }
#
request, query = self.prepare_request(market, None, params)
method = self.get_supported_mapping(market['type'], {
'spot': 'publicSpotGetTrades',
'margin': 'publicSpotGetTrades',
'swap': 'publicFuturesGetSettleTrades',
'future': 'publicDeliveryGetSettleTrades',
})
if limit is not None:
request['limit'] = limit # default 100, max 1000
if since is not None and (market['contract']):
request['from'] = int(since / 1000)
response = await getattr(self, method)(self.extend(request, query))
#
# spot
#
# [
# {
# id: "1852958144",
# create_time: "1634673259",
# create_time_ms: "1634673259378.105000",
# currency_pair: "ADA_USDT",
# side: "sell",
# amount: "307.078",
# price: "2.104",
# }
# ]
#
# perpetual swap
#
# [
# {
# size: "2",
# id: "2522911",
# create_time_ms: "1634673380.182",
# create_time: "1634673380.182",
# contract: "ADA_USDT",
# price: "2.10486",
# }
# ]
#
return self.parse_trades(response, market, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
Fetch personal trading history
:param str symbol: The symbol for the market to fetch trades for
:param int since: The earliest timestamp, in ms, that fetched trades were made
:param int limit: The max number of trades to fetch
:param dict params: Exchange specific parameters
:param str params['marginMode']: 'cross' or 'isolated' - marginMode for margin trading if not provided self.options['defaultMarginMode'] is used
:param str params['type']: 'spot', 'swap', or 'future', if not provided self.options['defaultMarginMode'] is used
:param int params['till']: The latest timestamp, in ms, that fetched trades were made
:param int params['page']: *spot only* Page number
:param str params['order_id']: *spot only* Filter trades with specified order ID. symbol is also required if self field is present
:param str params['order']: *contract only* Futures order ID, return related data only if specified
:param int params['offset']: *contract only* list offset, starting from 0
:param str params['last_id']: *contract only* specify list staring point using the id of last record in previous list-query results
:param int params['count_total']: *contract only* whether to return total number matched, default to 0(no return)
:returns: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
type = None
marginMode = None
request = {}
market = self.market(symbol) if (symbol is not None) else None
till = self.safe_number(params, 'till')
params = self.omit(params, 'till')
type, params = self.handle_market_type_and_params('fetchMyTrades', market, params)
contract = (type == 'swap') or (type == 'future')
if contract:
request, params = self.prepare_request(market, type, params)
else:
if market is not None:
request['currency_pair'] = market['id'] # Should always be set for non-stop
marginMode, params = self.get_margin_mode(False, params)
request['account'] = marginMode
if limit is not None:
request['limit'] = limit # default 100, max 1000
if since is not None:
request['from'] = int(since / 1000)
if till is not None:
request['to'] = int(till / 1000)
method = self.get_supported_mapping(type, {
'spot': 'privateSpotGetMyTrades',
'margin': 'privateSpotGetMyTrades',
'swap': 'privateFuturesGetSettleMyTrades',
'future': 'privateDeliveryGetSettleMyTrades',
})
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# [
# {
# "id": "2876130500",
# "create_time": "1645464610",
# "create_time_ms": "1645464610777.399200",
# "currency_pair": "DOGE_USDT",
# "side": "sell",
# "role": "taker",
# "amount": "10.97",
# "price": "0.137384",
# "order_id": "125924049993",
# "fee": "0.00301420496",
# "fee_currency": "USDT",
# "point_fee": "0",
# "gt_fee": "0"
# }
# ]
#
# perpetual swap
#
# [
# {
# "size": -5,
# "order_id": "130264979823",
# "id": 26884791,
# "role": "taker",
# "create_time": 1645465199.5472,
# "contract": "DOGE_USDT",
# "price": "0.136888"
# }
# ]
#
# future
#
# [
# {
# "id": 121234231,
# "create_time": 1514764800.123,
# "contract": "BTC_USDT",
# "order_id": "21893289839",
# "size": 100,
# "price": "100.123",
# "role": "taker"
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def parse_trade(self, trade, market=None):
#
# public
#
# {
# "id": "1334253759",
# "create_time": "1626342738",
# "create_time_ms": "1626342738331.497000",
# "currency_pair": "BTC_USDT",
# "side": "sell",
# "amount": "0.0022",
# "price": "32452.16"
# }
#
# public ws
#
# {
# id: 221994511,
# time: 1580311438.618647,
# price: '9309',
# amount: '0.0019',
# type: 'sell'
# }
#
# spot rest
#
# {
# "id": "2876130500",
# "create_time": "1645464610",
# "create_time_ms": "1645464610777.399200",
# "currency_pair": "DOGE_USDT",
# "side": "sell",
# "role": "taker",
# "amount": "10.97",
# "price": "0.137384",
# "order_id": "125924049993",
# "fee": "0.00301420496",
# "fee_currency": "USDT",
# "point_fee": "0","gt_fee":"0"
# }
#
# perpetual swap rest
#
# {
# "size": -5,
# "order_id": "130264979823",
# "id": 26884791,
# "role": "taker",
# "create_time": 1645465199.5472,
# "contract": "DOGE_USDT",
# "price": "0.136888"
# }
#
# future rest
#
# {
# "id": 121234231,
# "create_time": 1514764800.123,
# "contract": "BTC_USDT",
# "order_id": "21893289839",
# "size": 100,
# "price": "100.123",
# "role": "taker"
# }
#
id = self.safe_string(trade, 'id')
timestamp = self.safe_timestamp_2(trade, 'time', 'create_time')
timestamp = self.safe_integer(trade, 'create_time_ms', timestamp)
marketId = self.safe_string_2(trade, 'currency_pair', 'contract')
symbol = self.safe_symbol(marketId, market)
amountString = self.safe_string_2(trade, 'amount', 'size')
priceString = self.safe_string(trade, 'price')
contractSide = 'sell' if Precise.string_lt(amountString, '0') else 'buy'
amountString = Precise.string_abs(amountString)
side = self.safe_string_2(trade, 'side', 'type', contractSide)
orderId = self.safe_string(trade, 'order_id')
gtFee = self.safe_string(trade, 'gt_fee')
feeCurrency = None
feeCostString = None
if gtFee == '0':
feeCurrency = self.safe_string(trade, 'fee_currency')
feeCostString = self.safe_string(trade, 'fee')
else:
feeCurrency = 'GT'
feeCostString = gtFee
fee = {
'cost': feeCostString,
'currency': feeCurrency,
}
takerOrMaker = self.safe_string(trade, 'role')
return self.safe_trade({
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': takerOrMaker,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': fee,
}, market)
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
currency = None
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
if limit is not None:
request['limit'] = limit
if since is not None:
start = int(since / 1000)
request['from'] = start
request['to'] = self.sum(start, 30 * 24 * 60 * 60)
response = await self.privateWalletGetDeposits(self.extend(request, params))
return self.parse_transactions(response, currency)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
currency = None
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
if limit is not None:
request['limit'] = limit
if since is not None:
start = int(since / 1000)
request['from'] = start
request['to'] = self.sum(start, 30 * 24 * 60 * 60)
response = await self.privateWalletGetWithdrawals(self.extend(request, params))
return self.parse_transactions(response, currency)
async def withdraw(self, code, amount, address, tag=None, params={}):
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'address': address,
'amount': self.currency_to_precision(code, amount),
}
if tag is not None:
request['memo'] = tag
networks = self.safe_value(self.options, 'networks', {})
network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH
network = self.safe_string_lower(networks, network, network) # handle ETH>ERC20 alias
if network is not None:
request['chain'] = network
params = self.omit(params, 'network')
response = await self.privateWithdrawalsPost(self.extend(request, params))
#
# {
# "id": "w13389675",
# "currency": "USDT",
# "amount": "50",
# "address": "TUu2rLFrmzUodiWfYki7QCNtv1akL682p1",
# "memo": null
# }
#
return self.parse_transaction(response, currency)
def parse_transaction_status(self, status):
statuses = {
'PEND': 'pending',
'REQUEST': 'pending',
'DMOVE': 'pending',
'CANCEL': 'failed',
'DONE': 'ok',
'BCODE': 'ok', # GateCode withdrawal
}
return self.safe_string(statuses, status, status)
def parse_transaction_type(self, type):
types = {
'd': 'deposit',
'w': 'withdrawal',
}
return self.safe_string(types, type, type)
def parse_transaction(self, transaction, currency=None):
#
# deposits
#
# {
# "id": "d33361395",
# "currency": "USDT_TRX",
# "address": "TErdnxenuLtXfnMafLbfappYdHtnXQ5U4z",
# "amount": "100",
# "txid": "ae9374de34e558562fe18cbb1bf9ab4d9eb8aa7669d65541c9fa2a532c1474a0",
# "timestamp": "1626345819",
# "status": "DONE",
# "memo": ""
# }
#
# withdraw
#
# {
# "id": "w13389675",
# "currency": "USDT",
# "amount": "50",
# "address": "TUu2rLFrmzUodiWfYki7QCNtv1akL682p1",
# "memo": null
# }
#
id = self.safe_string(transaction, 'id')
type = None
amount = self.safe_string(transaction, 'amount')
if id[0] == 'b':
# GateCode handling
type = 'deposit' if Precise.string_gt(amount, '0') else 'withdrawal'
amount = Precise.string_abs(amount)
elif id is not None:
type = self.parse_transaction_type(id[0])
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId)
txid = self.safe_string(transaction, 'txid')
rawStatus = self.safe_string(transaction, 'status')
status = self.parse_transaction_status(rawStatus)
address = self.safe_string(transaction, 'address')
fee = self.safe_number(transaction, 'fee')
tag = self.safe_string(transaction, 'memo')
if tag == '':
tag = None
timestamp = self.safe_timestamp(transaction, 'timestamp')
return {
'info': transaction,
'id': id,
'txid': txid,
'currency': code,
'amount': self.parse_number(amount),
'network': None,
'address': address,
'addressTo': None,
'addressFrom': None,
'tag': tag,
'tagTo': None,
'tagFrom': None,
'status': status,
'type': type,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'updated': None,
'fee': fee,
}
async def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
Create an order on the exchange
:param str symbol: Unified CCXT market symbol
:param str type: "limit" or "market" *"market" is contract only*
:param str side: "buy" or "sell"
:param float amount: the amount of currency to trade
:param float price: *ignored in "market" orders* the price at which the order is to be fullfilled at in units of the quote currency
:param dict params: Extra parameters specific to the exchange API endpoint
:param float params['stopPrice']: The price at which a trigger order is triggered at
:param str params['timeInForce']: "GTC", "IOC", or "PO"
:param str params['marginMode']: 'cross' or 'isolated' - marginMode for margin trading if not provided self.options['defaultMarginMode'] is used
:param int params['iceberg']: Amount to display for the iceberg order, Null or 0 for normal orders, Set to -1 to hide the order completely
:param str params['text']: User defined information
:param str params['account']: *spot and margin only* "spot", "margin" or "cross_margin"
:param bool params['auto_borrow']: *margin only* Used in margin or cross margin trading to allow automatic loan of insufficient amount if balance is not enough
:param str params['settle']: *contract only* Unified Currency Code for settle currency
:param bool params['reduceOnly']: *contract only* Indicates if self order is to reduce the size of a position
:param bool params['close']: *contract only* Set as True to close the position, with size set to 0
:param bool params['auto_size']: *contract only* Set side to close dual-mode position, close_long closes the long side, while close_short the short one, size also needs to be set to 0
:returns: `An order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
contract = market['contract']
stopPrice = self.safe_number(params, 'stopPrice')
methodTail = 'Orders'
reduceOnly = self.safe_value_2(params, 'reduce_only', 'reduceOnly')
defaultTimeInForce = self.safe_value_2(params, 'tif', 'time_in_force', 'gtc')
timeInForce = self.safe_value(params, 'timeInForce', defaultTimeInForce)
postOnly = False
type, postOnly, timeInForce, params = self.is_post_only(type, timeInForce, None, params)
params = self.omit(params, ['stopPrice', 'reduce_only', 'reduceOnly', 'tif', 'time_in_force', 'timeInForce'])
if postOnly:
timeInForce = 'poc'
isLimitOrder = (type == 'limit')
isMarketOrder = (type == 'market')
if isLimitOrder and price is None:
raise ArgumentsRequired(self.id + ' createOrder() requires a price argument for ' + type + ' orders')
if contract:
amountToPrecision = self.amount_to_precision(symbol, amount)
signedAmount = Precise.string_neg(amountToPrecision) if (side == 'sell') else amountToPrecision
amount = int(signedAmount)
if isMarketOrder:
timeInForce = 'ioc'
price = 0
elif not isLimitOrder:
# Gateio doesn't have market orders for spot
raise InvalidOrder(self.id + ' createOrder() does not support ' + type + ' orders for ' + market['type'] + ' markets')
request = None
trigger = self.safe_value(params, 'trigger')
if stopPrice is None and trigger is None:
if contract:
# contract order
request = {
'contract': market['id'], # filled in prepareRequest above
'size': amount, # int64, positive = bid, negative = ask
# 'iceberg': 0, # int64, display size for iceberg order, 0 for non-iceberg, note that you will have to pay the taker fee for the hidden size
'price': self.price_to_precision(symbol, price), # 0 for market order with tif set as ioc
# 'close': False, # True to close the position, with size set to 0
# 'reduce_only': False, # St as True to be reduce-only order
# 'tif': 'gtc', # gtc, ioc, poc PendingOrCancelled == postOnly order
# 'text': clientOrderId, # 't-abcdef1234567890',
# 'auto_size': '', # close_long, close_short, note size also needs to be set to 0
'settle': market['settleId'], # filled in prepareRequest above
}
if reduceOnly is not None:
request['reduce_only'] = reduceOnly
if timeInForce is not None:
request['tif'] = timeInForce
else:
marginMode = None
marginMode, params = self.get_margin_mode(False, params)
# spot order
request = {
# 'text': clientOrderId, # 't-abcdef1234567890',
'currency_pair': market['id'], # filled in prepareRequest above
'type': type,
'account': marginMode, # 'spot', 'margin', 'cross_margin'
'side': side,
'amount': self.amount_to_precision(symbol, amount),
'price': self.price_to_precision(symbol, price),
# 'time_in_force': 'gtc', # gtc, ioc, poc PendingOrCancelled == postOnly order
# 'iceberg': 0, # amount to display for the iceberg order, null or 0 for normal orders, set to -1 to hide the order completely
# 'auto_borrow': False, # used in margin or cross margin trading to allow automatic loan of insufficient amount if balance is not enough
# 'auto_repay': False, # automatic repayment for automatic borrow loan generated by cross margin order, diabled by default
}
if timeInForce is not None:
request['time_in_force'] = timeInForce
clientOrderId = self.safe_string_2(params, 'text', 'clientOrderId')
if clientOrderId is not None:
# user-defined, must follow the rules if not empty
# prefixed with t-
# no longer than 28 bytes without t- prefix
# can only include 0-9, A-Z, a-z, underscores(_), hyphens(-) or dots(.)
if len(clientOrderId) > 28:
raise BadRequest(self.id + ' createOrder() clientOrderId or text param must be up to 28 characters')
params = self.omit(params, ['text', 'clientOrderId'])
if clientOrderId[0] != 't':
clientOrderId = 't-' + clientOrderId
request['text'] = clientOrderId
else:
if contract:
# contract conditional order
rule = 1 if (side == 'buy') else 2
request = {
'initial': {
'contract': market['id'],
'size': amount, # positive = buy, negative = sell, set to 0 to close the position
'price': self.price_to_precision(symbol, price), # set to 0 to use market price
# 'close': False, # set to True if trying to close the position
# 'tif': 'gtc', # gtc, ioc, if using market price, only ioc is supported
# 'text': clientOrderId, # web, api, app
# 'reduce_only': False,
},
'trigger': {
# 'strategy_type': 0, # 0 = by price, 1 = by price gap, only 0 is supported currently
# 'price_type': 0, # 0 latest deal price, 1 mark price, 2 index price
'price': self.price_to_precision(symbol, stopPrice), # price or gap
'rule': rule, # 1 means price_type >= price, 2 means price_type <= price
# 'expiration': expiration, how many seconds to wait for the condition to be triggered before cancelling the order
},
'settle': market['settleId'],
}
expiration = self.safe_integer(params, 'expiration')
if expiration is not None:
request['trigger']['expiration'] = expiration
params = self.omit(params, 'expiration')
if reduceOnly is not None:
request['initial']['reduce_only'] = reduceOnly
if timeInForce is not None:
request['initial']['tif'] = timeInForce
else:
# spot conditional order
options = self.safe_value(self.options, 'createOrder', {})
marginMode = None
marginMode, params = self.get_margin_mode(True, params)
defaultExpiration = self.safe_integer(options, 'expiration')
expiration = self.safe_integer(params, 'expiration', defaultExpiration)
rule = '>=' if (side == 'buy') else '<='
triggerPrice = self.safe_value(trigger, 'price', stopPrice)
request = {
'trigger': {
'price': self.price_to_precision(symbol, triggerPrice),
'rule': rule, # >= triggered when market price larger than or equal to price field, <= triggered when market price less than or equal to price field
'expiration': expiration, # required, how long(in seconds) to wait for the condition to be triggered before cancelling the order
},
'put': {
'type': type,
'side': side,
'price': self.price_to_precision(symbol, price),
'amount': self.amount_to_precision(symbol, amount),
'account': marginMode,
'time_in_force': timeInForce, # gtc, ioc for taker only
},
'market': market['id'],
}
methodTail = 'PriceOrders'
method = self.get_supported_mapping(market['type'], {
'spot': 'privateSpotPost' + methodTail,
'margin': 'privateSpotPost' + methodTail,
'swap': 'privateFuturesPostSettle' + methodTail,
'future': 'privateDeliveryPostSettle' + methodTail,
})
response = await getattr(self, method)(self.deep_extend(request, params))
#
# spot
#
# {
# "id": "95282841887",
# "text": "apiv4",
# "create_time": "1637383156",
# "update_time": "1637383156",
# "create_time_ms": 1637383156017,
# "update_time_ms": 1637383156017,
# "status": "open",
# "currency_pair": "ETH_USDT",
# "type": "limit",
# "account": "spot",
# "side": "buy",
# "amount": "0.01",
# "price": "3500",
# "time_in_force": "gtc",
# "iceberg": "0",
# "left": "0.01",
# "fill_price": "0",
# "filled_total": "0",
# "fee": "0",
# "fee_currency": "ETH",
# "point_fee": "0",
# "gt_fee": "0",
# "gt_discount": False,
# "rebated_fee": "0",
# "rebated_fee_currency": "USDT"
# }
#
# spot conditional
#
# {"id": 5891843}
#
# future and perpetual swaps
#
# {
# "id": 95938572327,
# "contract": "ETH_USDT",
# "mkfr": "0",
# "tkfr": "0.0005",
# "tif": "gtc",
# "is_reduce_only": False,
# "create_time": 1637384600.08,
# "price": "3000",
# "size": 1,
# "refr": "0",
# "left": 1,
# "text": "api",
# "fill_price": "0",
# "user": 2436035,
# "status": "open",
# "is_liq": False,
# "refu": 0,
# "is_close": False,
# "iceberg": 0
# }
#
# futures and perpetual swaps conditionals
#
# {"id": 7615567}
#
return self.parse_order(response, market)
def parse_order_status(self, status):
statuses = {
'_new': 'open',
'filled': 'closed',
'cancelled': 'canceled',
'liquidated': 'closed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# SPOT
# createOrder/cancelOrder/fetchOrder
#
# {
# "id": "62364648575",
# "text": "apiv4",
# "create_time": "1626354834",
# "update_time": "1626354834",
# "create_time_ms": "1626354833544",
# "update_time_ms": "1626354833544",
# "status": "open",
# "currency_pair": "BTC_USDT",
# "type": "limit",
# "account": "spot",
# "side": "buy",
# "amount": "0.0001",
# "price": "30000",
# "time_in_force": "gtc",
# "iceberg": "0",
# "left": "0.0001",
# "fill_price": "0",
# "filled_total": "0",
# "fee": "0",
# "fee_currency": "BTC",
# "point_fee": "0",
# "gt_fee": "0",
# "gt_discount": True,
# "rebated_fee": "0",
# "rebated_fee_currency": "USDT"
# }
#
# SPOT TRIGGER ORDERS
# createOrder
#
# {
# "id": 12604556
# }
#
# fetchOrder/cancelOrder
#
# {
# "market": "ADA_USDT",
# "user": 6392049,
# "trigger": {
# "price": "1.08", # stopPrice
# "rule": "\u003e=",
# "expiration": 86400
# },
# "put": {
# "type": "limit",
# "side": "buy",
# "price": "1.08", # order price
# "amount": "1.00000000000000000000",
# "account": "normal",
# "time_in_force": "gtc"
# },
# "id": 71639298,
# "ctime": 1643945985,
# "status": "open"
# }
#
# FUTURE AND SWAP
# createOrder/cancelOrder/fetchOrder
#
# {
# "id": 123028481731,
# "contract": "ADA_USDT",
# "mkfr": "-0.00005",
# "tkfr": "0.00048",
# "tif": "ioc",
# "is_reduce_only": False,
# "create_time": 1643950262.68,
# "finish_time": 1643950262.68,
# "price": "0",
# "size": 1,
# "refr": "0",
# "left":0,
# "text": "api",
# "fill_price": "1.05273",
# "user":6329238,
# "finish_as": "filled",
# "status": "finished",
# "is_liq": False,
# "refu":0,
# "is_close": False,
# "iceberg": 0
# }
#
# TRIGGER ORDERS(FUTURE AND SWAP)
# createOrder
#
# {
# "id": 12604556
# }
#
# fetchOrder/cancelOrder
#
# {
# "user": 6320300,
# "trigger": {
# "strategy_type": 0,
# "price_type": 0,
# "price": "1.03", # stopPrice
# "rule": 2,
# "expiration": 0
# },
# "initial": {
# "contract": "ADA_USDT",
# "size": -1,
# "price": "1.02",
# "tif": "gtc",
# "text": "",
# "iceberg": 0,
# "is_close": False,
# "is_reduce_only": False,
# "auto_size": ""
# },
# "id": 126393906,
# "trade_id": 0,
# "status": "open",
# "reason": "",
# "create_time": 1643953482,
# "finish_time": 1643953482,
# "is_stop_order": False,
# "stop_trigger": {
# "rule": 0,
# "trigger_price": "",
# "order_price": ""
# },
# "me_order_id": 0,
# "order_type": ""
# }
#
put = self.safe_value_2(order, 'put', 'initial')
trigger = self.safe_value(order, 'trigger')
contract = self.safe_string(put, 'contract')
type = self.safe_string(put, 'type')
timeInForce = self.safe_string_upper_2(put, 'time_in_force', 'tif')
amount = self.safe_string_2(put, 'amount', 'size')
side = self.safe_string(put, 'side')
price = self.safe_string(put, 'price')
contract = self.safe_string(order, 'contract', contract)
type = self.safe_string(order, 'type', type)
timeInForce = self.safe_string_upper_2(order, 'time_in_force', 'tif', timeInForce)
if timeInForce == 'POC':
timeInForce = 'PO'
postOnly = (timeInForce == 'PO')
amount = self.safe_string_2(order, 'amount', 'size', amount)
side = self.safe_string(order, 'side', side)
price = self.safe_string(order, 'price', price)
remaining = self.safe_string(order, 'left')
filled = Precise.string_sub(amount, remaining)
cost = self.safe_string(order, 'filled_total')
rawStatus = None
average = None
if put:
remaining = amount
filled = '0'
cost = '0'
if contract:
isMarketOrder = Precise.string_equals(price, '0') and (timeInForce == 'IOC')
type = 'market' if isMarketOrder else 'limit'
side = 'buy' if Precise.string_gt(amount, '0') else 'sell'
rawStatus = self.safe_string(order, 'finish_as', 'open')
average = self.safe_number(order, 'fill_price')
else:
rawStatus = self.safe_string(order, 'status')
timestamp = self.safe_integer(order, 'create_time_ms')
if timestamp is None:
timestamp = self.safe_timestamp_2(order, 'create_time', 'ctime')
lastTradeTimestamp = self.safe_integer(order, 'update_time_ms')
if lastTradeTimestamp is None:
lastTradeTimestamp = self.safe_timestamp_2(order, 'update_time', 'finish_time')
exchangeSymbol = self.safe_string_2(order, 'currency_pair', 'market', contract)
# Everything below self(above return) is related to fees
fees = []
gtFee = self.safe_string(order, 'gt_fee')
if gtFee:
fees.append({
'currency': 'GT',
'cost': gtFee,
})
fee = self.safe_string(order, 'fee')
if fee:
fees.append({
'currency': self.safe_currency_code(self.safe_string(order, 'fee_currency')),
'cost': fee,
})
rebate = self.safe_string(order, 'rebated_fee')
if rebate:
fees.append({
'currency': self.safe_currency_code(self.safe_string(order, 'rebated_fee_currency')),
'cost': Precise.string_neg(rebate),
})
numFeeCurrencies = len(fees)
multipleFeeCurrencies = numFeeCurrencies > 1
status = self.parse_order_status(rawStatus)
return self.safe_order({
'id': self.safe_string(order, 'id'),
'clientOrderId': self.safe_string(order, 'text'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'status': status,
'symbol': self.safe_symbol(exchangeSymbol),
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': self.parse_number(price),
'stopPrice': self.safe_number(trigger, 'price'),
'average': average,
'amount': self.parse_number(Precise.string_abs(amount)),
'cost': Precise.string_abs(cost),
'filled': self.parse_number(Precise.string_abs(filled)),
'remaining': self.parse_number(Precise.string_abs(remaining)),
'fee': None if multipleFeeCurrencies else self.safe_value(fees, 0),
'fees': fees if multipleFeeCurrencies else [],
'trades': None,
'info': order,
}, market)
async def create_reduce_only_order(self, symbol, type, side, amount, price=None, params={}):
request = {
'reduceOnly': True,
}
return await self.create_order(symbol, type, side, amount, price, self.extend(request, params))
async def fetch_order(self, id, symbol=None, params={}):
"""
Retrieves information on an order
:param str id: Order id
:param str symbol: Unified market symbol, *required for spot and margin*
:param dict params: Parameters specified by the exchange api
:param bool params['stop']: True if the order being fetched is a trigger order
:param str params['marginMode']: 'cross' or 'isolated' - marginMode for margin trading if not provided self.options['defaultMarginMode'] is used
:param str params['type']: 'spot', 'swap', or 'future', if not provided self.options['defaultMarginMode'] is used
:param str params['settle']: 'btc' or 'usdt' - settle currency for perpetual swap and future - market settle currency is used if symbol is not None, default="usdt" for swap and "btc" for future
:returns: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
stop = self.safe_value_2(params, 'is_stop_order', 'stop', False)
params = self.omit(params, ['is_stop_order', 'stop'])
clientOrderId = self.safe_string_2(params, 'text', 'clientOrderId')
orderId = id
if clientOrderId is not None:
params = self.omit(params, ['text', 'clientOrderId'])
if clientOrderId[0] != 't':
clientOrderId = 't-' + clientOrderId
orderId = clientOrderId
market = None if (symbol is None) else self.market(symbol)
type, query = self.handle_market_type_and_params('fetchOrder', market, params)
contract = (type == 'swap') or (type == 'future')
request, requestParams = self.prepare_request(market, type, query) if contract else self.spot_order_prepare_request(market, stop, query)
request['order_id'] = orderId
methodMiddle = 'PriceOrders' if stop else 'Orders'
method = self.get_supported_mapping(type, {
'spot': 'privateSpotGet' + methodMiddle + 'OrderId',
'margin': 'privateSpotGet' + methodMiddle + 'OrderId',
'swap': 'privateFuturesGetSettle' + methodMiddle + 'OrderId',
'future': 'privateDeliveryGetSettle' + methodMiddle + 'OrderId',
})
response = await getattr(self, method)(self.extend(request, requestParams))
return self.parse_order(response, market)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches all open orders
:param str symbol: Unified market symbol
:param int since: earliest time in ms for orders in the response
:param int limit: max number of order structures to return
:param dict params: exchange specific params
:param bool params['stop']: True for fetching stop orders
:param str params['type']: spot, margin, swap or future, if not provided self.options['defaultType'] is used
:param str params['marginMode']: 'cross' or 'isolated' - marginMode for type='margin', if not provided self.options['defaultMarginMode'] is used
:returns: An array of order structures
"""
return await self.fetch_orders_by_status('open', symbol, since, limit, params)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches all closed orders
:param str symbol: Unified market symbol of the market to fetch orders for
:param int since: earliest time in ms for orders in the response
:param int limit: max number of order structures to return
:param dict params: exchange specific params
:param bool params['stop']: True for fetching stop orders
:param str params['type']: spot, swap or future, if not provided self.options['defaultType'] is used
:param str params['marginMode']: 'cross' or 'isolated' - marginMode for margin trading if not provided self.options['defaultMarginMode'] is used
:returns: An array of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
return await self.fetch_orders_by_status('finished', symbol, since, limit, params)
async def fetch_orders_by_status(self, status, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None if (symbol is None) else self.market(symbol)
stop = self.safe_value(params, 'stop')
params = self.omit(params, 'stop')
type, query = self.handle_market_type_and_params('fetchOrdersByStatus', market, params)
spot = (type == 'spot') or (type == 'margin')
request, requestParams = self.multi_order_spot_prepare_request(market, stop, query) if spot else self.prepare_request(market, type, query)
if status == 'closed':
status = 'finished'
request['status'] = status
if limit is not None:
request['limit'] = limit
if since is not None and spot:
request['from'] = int(since / 1000)
methodTail = 'PriceOrders' if stop else 'Orders'
openSpotOrders = spot and (status == 'open') and not stop
if openSpotOrders:
methodTail = 'OpenOrders'
method = self.get_supported_mapping(type, {
'spot': 'privateSpotGet' + methodTail,
'margin': 'privateSpotGet' + methodTail,
'swap': 'privateFuturesGetSettle' + methodTail,
'future': 'privateDeliveryGetSettle' + methodTail,
})
response = await getattr(self, method)(self.extend(request, requestParams))
#
# SPOT Open Orders
#
# [
# {
# "currency_pair": "ADA_USDT",
# "total": 2,
# "orders": [
# {
# "id": "155498539874",
# "text": "apiv4",
# "create_time": "1652406843",
# "update_time": "1652406843",
# "create_time_ms": 1652406843295,
# "update_time_ms": 1652406843295,
# "status": "open",
# "currency_pair": "ADA_USDT",
# "type": "limit",
# "account": "spot",
# "side": "buy",
# "amount": "3",
# "price": "0.35",
# "time_in_force": "gtc",
# "iceberg": "0",
# "left": "3",
# "fill_price": "0",
# "filled_total": "0",
# "fee": "0",
# "fee_currency": "ADA",
# "point_fee": "0",
# "gt_fee": "0",
# "gt_discount": False,
# "rebated_fee": "0",
# "rebated_fee_currency": "USDT"
# },
# ...
# ]
# },
# ...
# ]
#
# SPOT
#
# [
# {
# "id": "8834234273",
# "text": "3",
# "create_time": "1635406193",
# "update_time": "1635406193",
# "create_time_ms": 1635406193361,
# "update_time_ms": 1635406193361,
# "status": "closed",
# "currency_pair": "BTC_USDT",
# "type": "limit",
# "account": "spot", # margin for margin orders
# "side": "sell",
# "amount": "0.0002",
# "price": "58904.01",
# "time_in_force": "gtc",
# "iceberg": "0",
# "left": "0.0000",
# "fill_price": "11.790516",
# "filled_total": "11.790516",
# "fee": "0.023581032",
# "fee_currency": "USDT",
# "point_fee": "0",
# "gt_fee": "0",
# "gt_discount": False,
# "rebated_fee_currency": "BTC"
# }
# ]
#
# Spot Stop
#
# [
# {
# "market": "ADA_USDT",
# "user": 10406147,
# "trigger": {
# "price": "0.65",
# "rule": "\u003c=",
# "expiration": 86400
# },
# "put": {
# "type": "limit",
# "side": "sell",
# "price": "0.65",
# "amount": "2.00000000000000000000",
# "account": "normal", # margin for margin orders
# "time_in_force": "gtc"
# },
# "id": 8449909,
# "ctime": 1652188982,
# "status": "open"
# }
# ]
#
# Perpetual Swap
#
# [
# {
# "status": "finished",
# "size": -1,
# "left": 0,
# "id": 82750739203,
# "is_liq": False,
# "is_close": False,
# "contract": "BTC_USDT",
# "text": "web",
# "fill_price": "60721.3",
# "finish_as": "filled",
# "iceberg": 0,
# "tif": "ioc",
# "is_reduce_only": True,
# "create_time": 1635403475.412,
# "finish_time": 1635403475.4127,
# "price": "0"
# }
# ]
#
result = response
if openSpotOrders:
result = []
for i in range(0, len(response)):
orders = self.safe_value(response[i], 'orders')
result = self.array_concat(result, orders)
orders = self.parse_orders(result, market, since, limit)
return self.filter_by_symbol_since_limit(orders, symbol, since, limit)
async def cancel_order(self, id, symbol=None, params={}):
"""
Cancels an open order
:param str id: Order id
:param str symbol: Unified market symbol
:param dict params: Parameters specified by the exchange api
:param bool params['stop']: True if the order to be cancelled is a trigger order
:returns: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
await self.load_markets()
market = None if (symbol is None) else self.market(symbol)
stop = self.safe_value_2(params, 'is_stop_order', 'stop', False)
params = self.omit(params, ['is_stop_order', 'stop'])
type, query = self.handle_market_type_and_params('cancelOrder', market, params)
request, requestParams = self.spot_order_prepare_request(market, stop, query) if (type == 'spot' or type == 'margin') else self.prepare_request(market, type, query)
request['order_id'] = id
pathMiddle = 'Price' if stop else ''
method = self.get_supported_mapping(type, {
'spot': 'privateSpotDelete' + pathMiddle + 'OrdersOrderId',
'margin': 'privateSpotDelete' + pathMiddle + 'OrdersOrderId',
'swap': 'privateFuturesDeleteSettle' + pathMiddle + 'OrdersOrderId',
'future': 'privateDeliveryDeleteSettle' + pathMiddle + 'OrdersOrderId',
})
response = await getattr(self, method)(self.extend(request, requestParams))
#
# spot
#
# {
# "id": "95282841887",
# "text": "apiv4",
# "create_time": "1637383156",
# "update_time": "1637383235",
# "create_time_ms": 1637383156017,
# "update_time_ms": 1637383235085,
# "status": "cancelled",
# "currency_pair": "ETH_USDT",
# "type": "limit",
# "account": "spot",
# "side": "buy",
# "amount": "0.01",
# "price": "3500",
# "time_in_force": "gtc",
# "iceberg": "0",
# "left": "0.01",
# "fill_price": "0",
# "filled_total": "0",
# "fee": "0",
# "fee_currency": "ETH",
# "point_fee": "0",
# "gt_fee": "0",
# "gt_discount": False,
# "rebated_fee": "0",
# "rebated_fee_currency": "USDT"
# }
#
# spot conditional
#
# {
# "market": "ETH_USDT",
# "user": 2436035,
# "trigger": {
# "price": "3500",
# "rule": "\u003c=",
# "expiration": 86400
# },
# "put": {
# "type": "limit",
# "side": "buy",
# "price": "3500",
# "amount": "0.01000000000000000000",
# "account": "normal",
# "time_in_force": "gtc"
# },
# "id": 5891843,
# "ctime": 1637382379,
# "ftime": 1637382673,
# "status": "canceled"
# }
#
# perpetual swaps
#
# {
# id: "82241928192",
# contract: "BTC_USDT",
# mkfr: "0",
# tkfr: "0.0005",
# tif: "gtc",
# is_reduce_only: False,
# create_time: "1635196145.06",
# finish_time: "1635196233.396",
# price: "61000",
# size: "4",
# refr: "0",
# left: "4",
# text: "web",
# fill_price: "0",
# user: "6693577",
# finish_as: "cancelled",
# status: "finished",
# is_liq: False,
# refu: "0",
# is_close: False,
# iceberg: "0",
# }
#
return self.parse_order(response, market)
async def cancel_all_orders(self, symbol=None, params={}):
await self.load_markets()
market = None if (symbol is None) else self.market(symbol)
stop = self.safe_value(params, 'stop')
params = self.omit(params, 'stop')
type, query = self.handle_market_type_and_params('cancelAllOrders', market, params)
request, requestParams = self.multi_order_spot_prepare_request(market, stop, query) if (type == 'spot') else self.prepare_request(market, type, query)
methodTail = 'PriceOrders' if stop else 'Orders'
method = self.get_supported_mapping(type, {
'spot': 'privateSpotDelete' + methodTail,
'margin': 'privateSpotDelete' + methodTail,
'swap': 'privateFuturesDeleteSettle' + methodTail,
'future': 'privateDeliveryDeleteSettle' + methodTail,
})
response = await getattr(self, method)(self.extend(request, requestParams))
#
# [
# {
# "id": 139797004085,
# "contract": "ADA_USDT",
# "mkfr": "0",
# "tkfr": "0.0005",
# "tif": "gtc",
# "is_reduce_only": False,
# "create_time": 1647911169.343,
# "finish_time": 1647911226.849,
# "price": "0.8",
# "size": 1,
# "refr": "0.3",
# "left": 1,
# "text": "api",
# "fill_price": "0",
# "user": 6693577,
# "finish_as": "cancelled",
# "status": "finished",
# "is_liq": False,
# "refu": 2436035,
# "is_close": False,
# "iceberg": 0
# }
# ...
# ]
#
return self.parse_orders(response, market)
async def transfer(self, code, amount, fromAccount, toAccount, params={}):
"""
makes internal transfers of funds between accounts on the same exchange
:param str code: unified currency code for currency being transferred
:param float amount: the amount of currency to transfer
:param str fromAccount: the account to transfer currency from
:param str toAccount: the account to transfer currency to
:param dict params: Exchange specific parameters
:param dict params['symbol']: Unified market symbol *required for type == margin*
:returns: A `transfer structure <https://docs.ccxt.com/en/latest/manual.html#transfer-structure>`
"""
await self.load_markets()
currency = self.currency(code)
fromId = self.parse_account(fromAccount)
toId = self.parse_account(toAccount)
truncated = self.currency_to_precision(code, amount)
request = {
'currency': currency['id'],
'amount': truncated,
}
if not (fromId in self.options['accountsByType']):
request['from'] = 'margin'
request['currency_pair'] = fromId
else:
request['from'] = fromId
if not (toId in self.options['accountsByType']):
request['to'] = 'margin'
request['currency_pair'] = toId
else:
request['to'] = toId
if fromId == 'margin' or toId == 'margin':
symbol = self.safe_string_2(params, 'symbol', 'currency_pair')
if symbol is None:
raise ArgumentsRequired(self.id + ' transfer requires params["symbol"] for isolated margin transfers')
market = self.market(symbol)
request['currency_pair'] = market['id']
params = self.omit(params, 'symbol')
if (toId == 'futures') or (toId == 'delivery') or (fromId == 'futures') or (fromId == 'delivery'):
request['settle'] = currency['lowerCaseId']
response = await self.privateWalletPostTransfers(self.extend(request, params))
#
# according to the docs(however actual response seems to be an empty string '')
#
# {
# "currency": "BTC",
# "from": "spot",
# "to": "margin",
# "amount": "1",
# "currency_pair": "BTC_USDT"
# }
#
transfer = self.parse_transfer(response, currency)
return self.extend(transfer, {
'fromAccount': fromAccount,
'toAccount': toAccount,
'amount': self.parse_number(truncated),
})
def parse_account(self, account):
accountsByType = self.options['accountsByType']
if account in accountsByType:
return accountsByType[account]
elif account in self.markets:
market = self.market(account)
return market['id']
else:
keys = list(accountsByType.keys())
raise ExchangeError(self.id + ' accounts must be one of ' + ', '.join(keys) + ' or an isolated margin symbol')
def parse_transfer(self, transfer, currency=None):
timestamp = self.milliseconds()
return {
'id': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'currency': self.safe_currency_code(None, currency),
'amount': None,
'fromAccount': None,
'toAccount': None,
'status': None,
'info': transfer,
}
async def
|
(self, leverage, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' setLeverage() requires a symbol argument')
# WARNING: THIS WILL INCREASE LIQUIDATION PRICE FOR OPEN ISOLATED LONG POSITIONS
# AND DECREASE LIQUIDATION PRICE FOR OPEN ISOLATED SHORT POSITIONS
if (leverage < 0) or (leverage > 100):
raise BadRequest(self.id + ' setLeverage() leverage should be between 1 and 100')
await self.load_markets()
market = self.market(symbol)
method = self.get_supported_mapping(market['type'], {
'swap': 'privateFuturesPostSettlePositionsContractLeverage',
'future': 'privateDeliveryPostSettlePositionsContractLeverage',
})
request, query = self.prepare_request(market, None, params)
defaultMarginMode = self.safe_string_2(self.options, 'marginMode', 'defaultMarginMode')
crossLeverageLimit = self.safe_string(query, 'cross_leverage_limit')
marginMode = self.safe_string(query, 'marginMode', defaultMarginMode)
if crossLeverageLimit is not None:
marginMode = 'cross'
leverage = crossLeverageLimit
if marginMode == 'cross' or marginMode == 'cross_margin':
request['query'] = {
'cross_leverage_limit': str(leverage),
'leverage': '0',
}
else:
request['query'] = {
'leverage': str(leverage),
}
response = await getattr(self, method)(self.extend(request, query))
#
# {
# "value": "0",
# "leverage": "5",
# "mode": "single",
# "realised_point": "0",
# "contract": "BTC_USDT",
# "entry_price": "0",
# "mark_price": "62035.86",
# "history_point": "0",
# "realised_pnl": "0",
# "close_order": null,
# "size": 0,
# "cross_leverage_limit": "0",
# "pending_orders": 0,
# "adl_ranking": 6,
# "maintenance_rate": "0.005",
# "unrealised_pnl": "0",
# "user": 2436035,
# "leverage_max": "100",
# "history_pnl": "0",
# "risk_limit": "1000000",
# "margin": "0",
# "last_close_pnl": "0",
# "liq_price": "0"
# }
#
return response
def parse_position(self, position, market=None):
#
# {
# value: "12.475572",
# leverage: "0",
# mode: "single",
# realised_point: "0",
# contract: "BTC_USDT",
# entry_price: "62422.6",
# mark_price: "62377.86",
# history_point: "0",
# realised_pnl: "-0.00624226",
# close_order: null,
# size: "2",
# cross_leverage_limit: "25",
# pending_orders: "0",
# adl_ranking: "5",
# maintenance_rate: "0.005",
# unrealised_pnl: "-0.008948",
# user: "663337",
# leverage_max: "100",
# history_pnl: "14.98868396636",
# risk_limit: "1000000",
# margin: "0.740721495056",
# last_close_pnl: "-0.041996015",
# liq_price: "59058.58"
# }
#
contract = self.safe_string(position, 'contract')
market = self.safe_market(contract, market)
size = self.safe_string(position, 'size')
side = None
if Precise.string_gt(size, '0'):
side = 'long'
elif Precise.string_lt(size, '0'):
side = 'short'
maintenanceRate = self.safe_string(position, 'maintenance_rate')
notional = self.safe_string(position, 'value')
leverage = self.safe_string(position, 'leverage')
marginMode = None
if leverage == '0':
marginMode = 'cross'
else:
marginMode = 'isolated'
unrealisedPnl = self.safe_string(position, 'unrealised_pnl')
# Initial Position Margin = ( Position Value / Leverage ) + Close Position Fee
# *The default leverage under the full position is the highest leverage in the market.
# *Trading fee is charged as Taker Fee Rate(0.075%).
takerFee = '0.00075'
feePaid = Precise.string_mul(takerFee, notional)
initialMarginString = Precise.string_add(Precise.string_div(notional, leverage), feePaid)
percentage = Precise.string_mul(Precise.string_div(unrealisedPnl, initialMarginString), '100')
return {
'info': position,
'symbol': self.safe_string(market, 'symbol'),
'timestamp': None,
'datetime': None,
'initialMargin': self.parse_number(initialMarginString),
'initialMarginPercentage': self.parse_number(Precise.string_div(initialMarginString, notional)),
'maintenanceMargin': self.parse_number(Precise.string_mul(maintenanceRate, notional)),
'maintenanceMarginPercentage': self.parse_number(maintenanceRate),
'entryPrice': self.safe_number(position, 'entry_price'),
'notional': self.parse_number(notional),
'leverage': self.safe_number(position, 'leverage'),
'unrealizedPnl': self.parse_number(unrealisedPnl),
'contracts': self.parse_number(Precise.string_abs(size)),
'contractSize': self.safe_value(market, 'contractSize'),
# 'realisedPnl': position['realised_pnl'],
'marginRatio': None,
'liquidationPrice': self.safe_number(position, 'liq_price'),
'markPrice': self.safe_number(position, 'mark_price'),
'collateral': self.safe_number(position, 'margin'),
'marginMode': marginMode,
'marginType': marginMode, # deprecated
'side': side,
'percentage': self.parse_number(percentage),
}
def parse_positions(self, positions):
result = []
for i in range(0, len(positions)):
result.append(self.parse_position(positions[i]))
return result
async def fetch_positions(self, symbols=None, params={}):
"""
Fetch trades positions
* @param {[str]} symbols Not used by Gateio, but parsed internally by CCXT
:param dict params: exchange specific parameters
:param str params['settle']: 'btc' or 'usdt' - settle currency for perpetual swap and future - default="usdt" for swap and "btc" for future
:param str params['type']: swap or future, if not provided self.options['defaultType'] is used
:returns: An array of `position structures <https://docs.ccxt.com/en/latest/manual.html#position-structure>`
"""
await self.load_markets()
type, query = self.handle_market_type_and_params('fetchPositions', None, params)
request, requestParams = self.prepare_request(None, type, query)
method = self.get_supported_mapping(type, {
'swap': 'privateFuturesGetSettlePositions',
'future': 'privateDeliveryGetSettlePositions',
})
response = await getattr(self, method)(self.extend(request, requestParams))
#
# [
# {
# value: "12.475572",
# leverage: "0",
# mode: "single",
# realised_point: "0",
# contract: "BTC_USDT",
# entry_price: "62422.6",
# mark_price: "62377.86",
# history_point: "0",
# realised_pnl: "-0.00624226",
# close_order: null,
# size: "2",
# cross_leverage_limit: "25",
# pending_orders: "0",
# adl_ranking: "5",
# maintenance_rate: "0.005",
# unrealised_pnl: "-0.008948",
# user: "6693577",
# leverage_max: "100",
# history_pnl: "14.98868396636",
# risk_limit: "1000000",
# margin: "0.740721495056",
# last_close_pnl: "-0.041996015",
# liq_price: "59058.58"
# }
# ]
#
result = self.parse_positions(response)
return self.filter_by_array(result, 'symbol', symbols, False)
async def fetch_leverage_tiers(self, symbols=None, params={}):
await self.load_markets()
type, query = self.handle_market_type_and_params('fetchLeverageTiers', None, params)
request, requestParams = self.prepare_request(None, type, query)
if type != 'future' and type != 'swap':
raise BadRequest(self.id + ' fetchLeverageTiers only supports swap and future')
method = self.get_supported_mapping(type, {
'swap': 'publicFuturesGetSettleContracts',
'future': 'publicDeliveryGetSettleContracts',
})
response = await getattr(self, method)(self.extend(request, requestParams))
#
# Perpetual swap
#
# [
# {
# "name": "BTC_USDT",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "ref_discount_rate": "0",
# "order_price_deviate": "0.5",
# "maintenance_rate": "0.005",
# "mark_type": "index",
# "last_price": "38026",
# "mark_price": "37985.6",
# "index_price": "37954.92",
# "funding_rate_indicative": "0.000219",
# "mark_price_round": "0.01",
# "funding_offset": 0,
# "in_delisting": False,
# "risk_limit_base": "1000000",
# "interest_rate": "0.0003",
# "order_price_round": "0.1",
# "order_size_min": 1,
# "ref_rebate_rate": "0.2",
# "funding_interval": 28800,
# "risk_limit_step": "1000000",
# "leverage_min": "1",
# "leverage_max": "100",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "funding_rate": "0.002053",
# "order_size_max": 1000000,
# "funding_next_apply": 1610035200,
# "short_users": 977,
# "config_change_time": 1609899548,
# "trade_size": 28530850594,
# "position_size": 5223816,
# "long_users": 455,
# "funding_impact_value": "60000",
# "orders_limit": 50,
# "trade_id": 10851092,
# "orderbook_id": 2129638396
# }
# ]
#
# Delivery Futures
#
# [
# {
# "name": "BTC_USDT_20200814",
# "underlying": "BTC_USDT",
# "cycle": "WEEKLY",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "mark_type": "index",
# "last_price": "9017",
# "mark_price": "9019",
# "index_price": "9005.3",
# "basis_rate": "0.185095",
# "basis_value": "13.7",
# "basis_impact_value": "100000",
# "settle_price": "0",
# "settle_price_interval": 60,
# "settle_price_duration": 1800,
# "settle_fee_rate": "0.0015",
# "expire_time": 1593763200,
# "order_price_round": "0.1",
# "mark_price_round": "0.1",
# "leverage_min": "1",
# "leverage_max": "100",
# "maintenance_rate": "1000000",
# "risk_limit_base": "140.726652109199",
# "risk_limit_step": "1000000",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "ref_discount_rate": "0",
# "ref_rebate_rate": "0.2",
# "order_price_deviate": "0.5",
# "order_size_min": 1,
# "order_size_max": 1000000,
# "orders_limit": 50,
# "orderbook_id": 63,
# "trade_id": 26,
# "trade_size": 435,
# "position_size": 130,
# "config_change_time": 1593158867,
# "in_delisting": False
# }
# ]
#
return self.parse_leverage_tiers(response, symbols, 'name')
def parse_market_leverage_tiers(self, info, market=None):
"""
* @ignore
https://www.gate.io/help/futures/perpetual/22162/instrctions-of-risk-limit
:param dict info: Exchange market response for 1 market
:param dict market: CCXT market
"""
#
# Perpetual swap
#
# {
# "name": "BTC_USDT",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "ref_discount_rate": "0",
# "order_price_deviate": "0.5",
# "maintenance_rate": "0.005",
# "mark_type": "index",
# "last_price": "38026",
# "mark_price": "37985.6",
# "index_price": "37954.92",
# "funding_rate_indicative": "0.000219",
# "mark_price_round": "0.01",
# "funding_offset": 0,
# "in_delisting": False,
# "risk_limit_base": "1000000",
# "interest_rate": "0.0003",
# "order_price_round": "0.1",
# "order_size_min": 1,
# "ref_rebate_rate": "0.2",
# "funding_interval": 28800,
# "risk_limit_step": "1000000",
# "leverage_min": "1",
# "leverage_max": "100",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "funding_rate": "0.002053",
# "order_size_max": 1000000,
# "funding_next_apply": 1610035200,
# "short_users": 977,
# "config_change_time": 1609899548,
# "trade_size": 28530850594,
# "position_size": 5223816,
# "long_users": 455,
# "funding_impact_value": "60000",
# "orders_limit": 50,
# "trade_id": 10851092,
# "orderbook_id": 2129638396
# }
#
# Delivery Futures
#
# {
# "name": "BTC_USDT_20200814",
# "underlying": "BTC_USDT",
# "cycle": "WEEKLY",
# "type": "direct",
# "quanto_multiplier": "0.0001",
# "mark_type": "index",
# "last_price": "9017",
# "mark_price": "9019",
# "index_price": "9005.3",
# "basis_rate": "0.185095",
# "basis_value": "13.7",
# "basis_impact_value": "100000",
# "settle_price": "0",
# "settle_price_interval": 60,
# "settle_price_duration": 1800,
# "settle_fee_rate": "0.0015",
# "expire_time": 1593763200,
# "order_price_round": "0.1",
# "mark_price_round": "0.1",
# "leverage_min": "1",
# "leverage_max": "100",
# "maintenance_rate": "1000000",
# "risk_limit_base": "140.726652109199",
# "risk_limit_step": "1000000",
# "risk_limit_max": "8000000",
# "maker_fee_rate": "-0.00025",
# "taker_fee_rate": "0.00075",
# "ref_discount_rate": "0",
# "ref_rebate_rate": "0.2",
# "order_price_deviate": "0.5",
# "order_size_min": 1,
# "order_size_max": 1000000,
# "orders_limit": 50,
# "orderbook_id": 63,
# "trade_id": 26,
# "trade_size": 435,
# "position_size": 130,
# "config_change_time": 1593158867,
# "in_delisting": False
# }
#
maintenanceMarginUnit = self.safe_string(info, 'maintenance_rate') # '0.005',
leverageMax = self.safe_string(info, 'leverage_max') # '100',
riskLimitStep = self.safe_string(info, 'risk_limit_step') # '1000000',
riskLimitMax = self.safe_string(info, 'risk_limit_max') # '16000000',
initialMarginUnit = Precise.string_div('1', leverageMax)
maintenanceMarginRate = maintenanceMarginUnit
initialMarginRatio = initialMarginUnit
floor = '0'
tiers = []
while(Precise.string_lt(floor, riskLimitMax)):
cap = Precise.string_add(floor, riskLimitStep)
tiers.append({
'tier': self.parse_number(Precise.string_div(cap, riskLimitStep)),
'currency': self.safe_string(market, 'settle'),
'minNotional': self.parse_number(floor),
'maxNotional': self.parse_number(cap),
'maintenanceMarginRate': self.parse_number(maintenanceMarginRate),
'maxLeverage': self.parse_number(Precise.string_div('1', initialMarginRatio)),
'info': info,
})
maintenanceMarginRate = Precise.string_add(maintenanceMarginRate, maintenanceMarginUnit)
initialMarginRatio = Precise.string_add(initialMarginRatio, initialMarginUnit)
floor = cap
return tiers
def sign(self, path, api=[], method='GET', params={}, headers=None, body=None):
authentication = api[0] # public, private
type = api[1] # spot, margin, future, delivery
query = self.omit(params, self.extract_params(path))
path = self.implode_params(path, params)
endPart = '' if (path == '') else ('/' + path)
entirePath = '/' + type + endPart
url = self.urls['api'][authentication][type]
if url is None:
raise NotSupported(self.id + ' does not have a testnet for the ' + type + ' market type.')
url += entirePath
if authentication == 'public':
if query:
url += '?' + self.urlencode(query)
else:
queryString = ''
if (method == 'GET') or (method == 'DELETE'):
if query:
queryString = self.urlencode(query)
url += '?' + queryString
else:
urlQueryParams = self.safe_value(query, 'query', {})
if urlQueryParams:
queryString = self.urlencode(urlQueryParams)
url += '?' + queryString
query = self.omit(query, 'query')
body = self.json(query)
bodyPayload = '' if (body is None) else body
bodySignature = self.hash(self.encode(bodyPayload), 'sha512')
timestamp = self.seconds()
timestampString = str(timestamp)
signaturePath = '/api/' + self.version + entirePath
payloadArray = [method.upper(), signaturePath, queryString, bodySignature, timestampString]
# eslint-disable-next-line quotes
payload = "\n".join(payloadArray)
signature = self.hmac(self.encode(payload), self.encode(self.secret), hashlib.sha512)
headers = {
'KEY': self.apiKey,
'Timestamp': timestampString,
'SIGN': signature,
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return
#
# {"label": "ORDER_NOT_FOUND", "message": "Order not found"}
# {"label": "INVALID_PARAM_VALUE", "message": "invalid argument: status"}
# {"label": "INVALID_PARAM_VALUE", "message": "invalid argument: Trigger.rule"}
# {"label": "INVALID_PARAM_VALUE", "message": "invalid argument: trigger.expiration invalid range"}
# {"label": "INVALID_ARGUMENT", "detail": "invalid size"}
#
label = self.safe_string(response, 'label')
if label is not None:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], label, feedback)
raise ExchangeError(feedback)
|
set_leverage
|
nonce_keyed_account.rs
|
use crate::{
account_utils::State as AccountUtilsState, keyed_account::KeyedAccount,
nonce_account::create_account,
};
use solana_program::{
instruction::InstructionError,
nonce::{self, state::Versions, State},
pubkey::Pubkey,
system_instruction::NonceError,
sysvar::{recent_blockhashes::RecentBlockhashes, rent::Rent},
};
use std::collections::HashSet;
pub trait NonceKeyedAccount {
fn advance_nonce_account(
&self,
recent_blockhashes: &RecentBlockhashes,
signers: &HashSet<Pubkey>,
) -> Result<(), InstructionError>;
fn withdraw_nonce_account(
&self,
lamports: u64,
to: &KeyedAccount,
recent_blockhashes: &RecentBlockhashes,
rent: &Rent,
signers: &HashSet<Pubkey>,
) -> Result<(), InstructionError>;
fn initialize_nonce_account(
&self,
nonce_authority: &Pubkey,
recent_blockhashes: &RecentBlockhashes,
rent: &Rent,
) -> Result<(), InstructionError>;
fn authorize_nonce_account(
&self,
nonce_authority: &Pubkey,
signers: &HashSet<Pubkey>,
) -> Result<(), InstructionError>;
}
impl<'a> NonceKeyedAccount for KeyedAccount<'a> {
fn advance_nonce_account(
&self,
recent_blockhashes: &RecentBlockhashes,
signers: &HashSet<Pubkey>,
) -> Result<(), InstructionError> {
if recent_blockhashes.is_empty() {
return Err(NonceError::NoRecentBlockhashes.into());
}
let state = AccountUtilsState::<Versions>::state(self)?.convert_to_current();
match state {
State::Initialized(data) => {
if !signers.contains(&data.authority) {
return Err(InstructionError::MissingRequiredSignature);
}
let recent_blockhash = recent_blockhashes[0].blockhash;
if data.blockhash == recent_blockhash {
return Err(NonceError::NotExpired.into());
}
let new_data = nonce::state::Data {
blockhash: recent_blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
..data
};
self.set_state(&Versions::new_current(State::Initialized(new_data)))
}
_ => Err(NonceError::BadAccountState.into()),
}
}
fn withdraw_nonce_account(
&self,
lamports: u64,
to: &KeyedAccount,
recent_blockhashes: &RecentBlockhashes,
rent: &Rent,
signers: &HashSet<Pubkey>,
) -> Result<(), InstructionError> {
let signer = match AccountUtilsState::<Versions>::state(self)?.convert_to_current() {
State::Uninitialized => {
if lamports > self.lamports()? {
return Err(InstructionError::InsufficientFunds);
}
*self.unsigned_key()
}
State::Initialized(ref data) => {
if lamports == self.lamports()? {
if data.blockhash == recent_blockhashes[0].blockhash {
return Err(NonceError::NotExpired.into());
}
self.set_state(&Versions::new_current(State::Uninitialized))?;
} else {
let min_balance = rent.minimum_balance(self.data_len()?);
if lamports + min_balance > self.lamports()? {
return Err(InstructionError::InsufficientFunds);
}
}
data.authority
}
};
if !signers.contains(&signer) {
return Err(InstructionError::MissingRequiredSignature);
}
self.try_account_ref_mut()?.lamports -= lamports;
to.try_account_ref_mut()?.lamports += lamports;
Ok(())
}
fn initialize_nonce_account(
&self,
nonce_authority: &Pubkey,
recent_blockhashes: &RecentBlockhashes,
rent: &Rent,
) -> Result<(), InstructionError> {
if recent_blockhashes.is_empty() {
return Err(NonceError::NoRecentBlockhashes.into());
}
match AccountUtilsState::<Versions>::state(self)?.convert_to_current() {
State::Uninitialized => {
let min_balance = rent.minimum_balance(self.data_len()?);
if self.lamports()? < min_balance {
return Err(InstructionError::InsufficientFunds);
}
let data = nonce::state::Data {
authority: *nonce_authority,
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
};
self.set_state(&Versions::new_current(State::Initialized(data)))
}
_ => Err(NonceError::BadAccountState.into()),
}
}
fn authorize_nonce_account(
&self,
nonce_authority: &Pubkey,
signers: &HashSet<Pubkey>,
) -> Result<(), InstructionError> {
match AccountUtilsState::<Versions>::state(self)?.convert_to_current() {
State::Initialized(data) => {
if !signers.contains(&data.authority) {
return Err(InstructionError::MissingRequiredSignature);
}
let new_data = nonce::state::Data {
authority: *nonce_authority,
..data
};
self.set_state(&Versions::new_current(State::Initialized(new_data)))
}
_ => Err(NonceError::BadAccountState.into()),
}
}
}
/// Convenience function for working with keyed accounts in tests
pub fn with_test_keyed_account<F>(lamports: u64, signer: bool, f: F)
where
F: Fn(&KeyedAccount),
{
let pubkey = Pubkey::new_unique();
let account = create_account(lamports);
let keyed_account = KeyedAccount::new(&pubkey, signer, &account);
f(&keyed_account)
}
#[cfg(test)]
mod test {
use super::*;
use crate::{
account_utils::State as AccountUtilsState,
keyed_account::KeyedAccount,
nonce::{self, State},
nonce_account::verify_nonce_account,
system_instruction::NonceError,
sysvar::recent_blockhashes::create_test_recent_blockhashes,
};
use solana_program::hash::Hash;
#[test]
fn default_is_uninitialized() {
assert_eq!(State::default(), State::Uninitialized)
}
#[test]
fn keyed_account_expected_behavior() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |keyed_account| {
let data = nonce::state::Data {
authority: *keyed_account.unsigned_key(),
..nonce::state::Data::default()
};
let mut signers = HashSet::new();
signers.insert(*keyed_account.signer_key().unwrap());
let state = AccountUtilsState::<Versions>::state(keyed_account)
.unwrap()
.convert_to_current();
// New is in Uninitialzed state
assert_eq!(state, State::Uninitialized);
let recent_blockhashes = create_test_recent_blockhashes(95);
let authorized = keyed_account.unsigned_key();
keyed_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let state = AccountUtilsState::<Versions>::state(keyed_account)
.unwrap()
.convert_to_current();
let data = nonce::state::Data {
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
..data
};
// First nonce instruction drives state from Uninitialized to Initialized
assert_eq!(state, State::Initialized(data.clone()));
let recent_blockhashes = create_test_recent_blockhashes(63);
keyed_account
.advance_nonce_account(&recent_blockhashes, &signers)
.unwrap();
let state = AccountUtilsState::<Versions>::state(keyed_account)
.unwrap()
.convert_to_current();
let data = nonce::state::Data {
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
..data
};
// Second nonce instruction consumes and replaces stored nonce
assert_eq!(state, State::Initialized(data.clone()));
let recent_blockhashes = create_test_recent_blockhashes(31);
keyed_account
.advance_nonce_account(&recent_blockhashes, &signers)
.unwrap();
let state = AccountUtilsState::<Versions>::state(keyed_account)
.unwrap()
.convert_to_current();
let data = nonce::state::Data {
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
..data
};
// Third nonce instruction for fun and profit
assert_eq!(state, State::Initialized(data));
with_test_keyed_account(42, false, |to_keyed| {
let recent_blockhashes = create_test_recent_blockhashes(0);
let withdraw_lamports = keyed_account.account.borrow().lamports;
let expect_nonce_lamports =
keyed_account.account.borrow().lamports - withdraw_lamports;
let expect_to_lamports = to_keyed.account.borrow().lamports + withdraw_lamports;
keyed_account
.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
)
.unwrap();
// Empties Account balance
assert_eq!(
keyed_account.account.borrow().lamports,
expect_nonce_lamports
);
// Account balance goes to `to`
assert_eq!(to_keyed.account.borrow().lamports, expect_to_lamports);
let state = AccountUtilsState::<Versions>::state(keyed_account)
.unwrap()
.convert_to_current();
// Empty balance deinitializes data
assert_eq!(state, State::Uninitialized);
})
})
}
#[test]
fn nonce_inx_initialized_account_not_signer_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_account| {
let recent_blockhashes = create_test_recent_blockhashes(31);
let authority = *nonce_account.unsigned_key();
nonce_account
.initialize_nonce_account(&authority, &recent_blockhashes, &rent)
.unwrap();
let pubkey = nonce_account.account.borrow().owner;
let nonce_account = KeyedAccount::new(&pubkey, false, nonce_account.account);
let state = AccountUtilsState::<Versions>::state(&nonce_account)
.unwrap()
.convert_to_current();
let data = nonce::state::Data {
authority,
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
};
assert_eq!(state, State::Initialized(data));
let signers = HashSet::new();
let recent_blockhashes = create_test_recent_blockhashes(0);
let result = nonce_account.advance_nonce_account(&recent_blockhashes, &signers);
assert_eq!(result, Err(InstructionError::MissingRequiredSignature),);
})
}
#[test]
fn nonce_inx_with_empty_recent_blockhashes_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |keyed_account| {
let mut signers = HashSet::new();
signers.insert(*keyed_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(0);
let authorized = *keyed_account.unsigned_key();
keyed_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let recent_blockhashes = vec![].into_iter().collect();
let result = keyed_account.advance_nonce_account(&recent_blockhashes, &signers);
assert_eq!(result, Err(NonceError::NoRecentBlockhashes.into()));
})
}
#[test]
fn nonce_inx_too_early_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |keyed_account| {
let mut signers = HashSet::new();
signers.insert(*keyed_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(63);
let authorized = *keyed_account.unsigned_key();
keyed_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let result = keyed_account.advance_nonce_account(&recent_blockhashes, &signers);
assert_eq!(result, Err(NonceError::NotExpired.into()));
})
}
#[test]
fn nonce_inx_uninitialized_account_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |keyed_account| {
let mut signers = HashSet::new();
signers.insert(*keyed_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(63);
let result = keyed_account.advance_nonce_account(&recent_blockhashes, &signers);
assert_eq!(result, Err(NonceError::BadAccountState.into()));
})
}
#[test]
fn nonce_inx_independent_nonce_authority_ok() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_account| {
with_test_keyed_account(42, true, |nonce_authority| {
let mut signers = HashSet::new();
signers.insert(*nonce_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(63);
let authorized = *nonce_authority.unsigned_key();
nonce_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let mut signers = HashSet::new();
signers.insert(*nonce_authority.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(31);
let result = nonce_account.advance_nonce_account(&recent_blockhashes, &signers);
assert_eq!(result, Ok(()));
});
});
}
#[test]
fn nonce_inx_no_nonce_authority_sig_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_account| {
with_test_keyed_account(42, false, |nonce_authority| {
let mut signers = HashSet::new();
signers.insert(*nonce_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(63);
let authorized = *nonce_authority.unsigned_key();
nonce_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let result = nonce_account.advance_nonce_account(&recent_blockhashes, &signers);
assert_eq!(result, Err(InstructionError::MissingRequiredSignature),);
});
});
}
#[test]
fn withdraw_inx_unintialized_acc_ok() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_keyed| {
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Uninitialized);
with_test_keyed_account(42, false, |to_keyed| {
let mut signers = HashSet::new();
signers.insert(*nonce_keyed.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(0);
let withdraw_lamports = nonce_keyed.account.borrow().lamports;
let expect_nonce_lamports =
nonce_keyed.account.borrow().lamports - withdraw_lamports;
let expect_to_lamports = to_keyed.account.borrow().lamports + withdraw_lamports;
nonce_keyed
.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
)
.unwrap();
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
// Withdraw instruction...
// Deinitializes Account state
assert_eq!(state, State::Uninitialized);
// Empties Account balance
assert_eq!(nonce_keyed.account.borrow().lamports, expect_nonce_lamports);
// Account balance goes to `to`
assert_eq!(to_keyed.account.borrow().lamports, expect_to_lamports);
})
})
}
#[test]
fn withdraw_inx_unintialized_acc_unsigned_fail()
|
#[test]
fn withdraw_inx_unintialized_acc_insuff_funds_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_keyed| {
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Uninitialized);
with_test_keyed_account(42, false, |to_keyed| {
let mut signers = HashSet::new();
signers.insert(*nonce_keyed.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(0);
let lamports = nonce_keyed.account.borrow().lamports + 1;
let result = nonce_keyed.withdraw_nonce_account(
lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
);
assert_eq!(result, Err(InstructionError::InsufficientFunds));
})
})
}
#[test]
fn withdraw_inx_uninitialized_acc_two_withdraws_ok() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_keyed| {
with_test_keyed_account(42, false, |to_keyed| {
let mut signers = HashSet::new();
signers.insert(*nonce_keyed.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(0);
let withdraw_lamports = nonce_keyed.account.borrow().lamports / 2;
let nonce_expect_lamports =
nonce_keyed.account.borrow().lamports - withdraw_lamports;
let to_expect_lamports = to_keyed.account.borrow().lamports + withdraw_lamports;
nonce_keyed
.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
)
.unwrap();
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Uninitialized);
assert_eq!(nonce_keyed.account.borrow().lamports, nonce_expect_lamports);
assert_eq!(to_keyed.account.borrow().lamports, to_expect_lamports);
let withdraw_lamports = nonce_keyed.account.borrow().lamports;
let nonce_expect_lamports =
nonce_keyed.account.borrow().lamports - withdraw_lamports;
let to_expect_lamports = to_keyed.account.borrow().lamports + withdraw_lamports;
nonce_keyed
.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
)
.unwrap();
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Uninitialized);
assert_eq!(nonce_keyed.account.borrow().lamports, nonce_expect_lamports);
assert_eq!(to_keyed.account.borrow().lamports, to_expect_lamports);
})
})
}
#[test]
fn withdraw_inx_initialized_acc_two_withdraws_ok() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_keyed| {
let mut signers = HashSet::new();
signers.insert(*nonce_keyed.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(31);
let authority = *nonce_keyed.unsigned_key();
nonce_keyed
.initialize_nonce_account(&authority, &recent_blockhashes, &rent)
.unwrap();
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
let data = nonce::state::Data {
authority,
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
};
assert_eq!(state, State::Initialized(data.clone()));
with_test_keyed_account(42, false, |to_keyed| {
let withdraw_lamports = nonce_keyed.account.borrow().lamports - min_lamports;
let nonce_expect_lamports =
nonce_keyed.account.borrow().lamports - withdraw_lamports;
let to_expect_lamports = to_keyed.account.borrow().lamports + withdraw_lamports;
nonce_keyed
.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
)
.unwrap();
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
let data = nonce::state::Data {
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
..data.clone()
};
assert_eq!(state, State::Initialized(data));
assert_eq!(nonce_keyed.account.borrow().lamports, nonce_expect_lamports);
assert_eq!(to_keyed.account.borrow().lamports, to_expect_lamports);
let recent_blockhashes = create_test_recent_blockhashes(0);
let withdraw_lamports = nonce_keyed.account.borrow().lamports;
let nonce_expect_lamports =
nonce_keyed.account.borrow().lamports - withdraw_lamports;
let to_expect_lamports = to_keyed.account.borrow().lamports + withdraw_lamports;
nonce_keyed
.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
)
.unwrap();
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Uninitialized);
assert_eq!(nonce_keyed.account.borrow().lamports, nonce_expect_lamports);
assert_eq!(to_keyed.account.borrow().lamports, to_expect_lamports);
})
})
}
#[test]
fn withdraw_inx_initialized_acc_nonce_too_early_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_keyed| {
let recent_blockhashes = create_test_recent_blockhashes(0);
let authorized = *nonce_keyed.unsigned_key();
nonce_keyed
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
with_test_keyed_account(42, false, |to_keyed| {
let mut signers = HashSet::new();
signers.insert(*nonce_keyed.signer_key().unwrap());
let withdraw_lamports = nonce_keyed.account.borrow().lamports;
let result = nonce_keyed.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
);
assert_eq!(result, Err(NonceError::NotExpired.into()));
})
})
}
#[test]
fn withdraw_inx_initialized_acc_insuff_funds_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_keyed| {
let recent_blockhashes = create_test_recent_blockhashes(95);
let authorized = *nonce_keyed.unsigned_key();
nonce_keyed
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
with_test_keyed_account(42, false, |to_keyed| {
let recent_blockhashes = create_test_recent_blockhashes(63);
let mut signers = HashSet::new();
signers.insert(*nonce_keyed.signer_key().unwrap());
let withdraw_lamports = nonce_keyed.account.borrow().lamports + 1;
let result = nonce_keyed.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
);
assert_eq!(result, Err(InstructionError::InsufficientFunds));
})
})
}
#[test]
fn withdraw_inx_initialized_acc_insuff_rent_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_keyed| {
let recent_blockhashes = create_test_recent_blockhashes(95);
let authorized = *nonce_keyed.unsigned_key();
nonce_keyed
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
with_test_keyed_account(42, false, |to_keyed| {
let recent_blockhashes = create_test_recent_blockhashes(63);
let mut signers = HashSet::new();
signers.insert(*nonce_keyed.signer_key().unwrap());
let withdraw_lamports = nonce_keyed.account.borrow().lamports - min_lamports + 1;
let result = nonce_keyed.withdraw_nonce_account(
withdraw_lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
);
assert_eq!(result, Err(InstructionError::InsufficientFunds));
})
})
}
#[test]
fn initialize_inx_ok() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |keyed_account| {
let state = AccountUtilsState::<Versions>::state(keyed_account)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Uninitialized);
let mut signers = HashSet::new();
signers.insert(*keyed_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(0);
let authority = *keyed_account.unsigned_key();
let result =
keyed_account.initialize_nonce_account(&authority, &recent_blockhashes, &rent);
let data = nonce::state::Data {
authority,
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
};
assert_eq!(result, Ok(()));
let state = AccountUtilsState::<Versions>::state(keyed_account)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Initialized(data));
})
}
#[test]
fn initialize_inx_empty_recent_blockhashes_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |keyed_account| {
let mut signers = HashSet::new();
signers.insert(*keyed_account.signer_key().unwrap());
let recent_blockhashes = vec![].into_iter().collect();
let authorized = *keyed_account.unsigned_key();
let result =
keyed_account.initialize_nonce_account(&authorized, &recent_blockhashes, &rent);
assert_eq!(result, Err(NonceError::NoRecentBlockhashes.into()));
})
}
#[test]
fn initialize_inx_initialized_account_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |keyed_account| {
let recent_blockhashes = create_test_recent_blockhashes(31);
let authorized = *keyed_account.unsigned_key();
keyed_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let recent_blockhashes = create_test_recent_blockhashes(0);
let result =
keyed_account.initialize_nonce_account(&authorized, &recent_blockhashes, &rent);
assert_eq!(result, Err(NonceError::BadAccountState.into()));
})
}
#[test]
fn initialize_inx_uninitialized_acc_insuff_funds_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports - 42, true, |keyed_account| {
let recent_blockhashes = create_test_recent_blockhashes(63);
let authorized = *keyed_account.unsigned_key();
let result =
keyed_account.initialize_nonce_account(&authorized, &recent_blockhashes, &rent);
assert_eq!(result, Err(InstructionError::InsufficientFunds));
})
}
#[test]
fn authorize_inx_ok() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_account| {
let mut signers = HashSet::new();
signers.insert(*nonce_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(31);
let authorized = *nonce_account.unsigned_key();
nonce_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let authority = Pubkey::default();
let data = nonce::state::Data {
authority,
blockhash: recent_blockhashes[0].blockhash,
fee_calculator: recent_blockhashes[0].fee_calculator.clone(),
};
let result = nonce_account.authorize_nonce_account(&Pubkey::default(), &signers);
assert_eq!(result, Ok(()));
let state = AccountUtilsState::<Versions>::state(nonce_account)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Initialized(data));
})
}
#[test]
fn authorize_inx_uninitialized_state_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_account| {
let mut signers = HashSet::new();
signers.insert(*nonce_account.signer_key().unwrap());
let result = nonce_account.authorize_nonce_account(&Pubkey::default(), &signers);
assert_eq!(result, Err(NonceError::BadAccountState.into()));
})
}
#[test]
fn authorize_inx_bad_authority_fail() {
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, true, |nonce_account| {
let mut signers = HashSet::new();
signers.insert(*nonce_account.signer_key().unwrap());
let recent_blockhashes = create_test_recent_blockhashes(31);
let authorized = &Pubkey::default().clone();
nonce_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &rent)
.unwrap();
let result = nonce_account.authorize_nonce_account(&Pubkey::default(), &signers);
assert_eq!(result, Err(InstructionError::MissingRequiredSignature));
})
}
#[test]
fn verify_nonce_ok() {
with_test_keyed_account(42, true, |nonce_account| {
let mut signers = HashSet::new();
signers.insert(nonce_account.signer_key().unwrap());
let state: State = nonce_account.state().unwrap();
// New is in Uninitialzed state
assert_eq!(state, State::Uninitialized);
let recent_blockhashes = create_test_recent_blockhashes(0);
let authorized = nonce_account.unsigned_key();
nonce_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &Rent::free())
.unwrap();
assert!(verify_nonce_account(
&nonce_account.account.borrow(),
&recent_blockhashes[0].blockhash,
));
});
}
#[test]
fn verify_nonce_bad_acc_state_fail() {
with_test_keyed_account(42, true, |nonce_account| {
assert!(!verify_nonce_account(
&nonce_account.account.borrow(),
&Hash::default()
));
});
}
#[test]
fn verify_nonce_bad_query_hash_fail() {
with_test_keyed_account(42, true, |nonce_account| {
let mut signers = HashSet::new();
signers.insert(nonce_account.signer_key().unwrap());
let state: State = nonce_account.state().unwrap();
// New is in Uninitialzed state
assert_eq!(state, State::Uninitialized);
let recent_blockhashes = create_test_recent_blockhashes(0);
let authorized = nonce_account.unsigned_key();
nonce_account
.initialize_nonce_account(&authorized, &recent_blockhashes, &Rent::free())
.unwrap();
assert!(!verify_nonce_account(
&nonce_account.account.borrow(),
&recent_blockhashes[1].blockhash,
));
});
}
}
|
{
let rent = Rent {
lamports_per_byte_year: 42,
..Rent::default()
};
let min_lamports = rent.minimum_balance(State::size());
with_test_keyed_account(min_lamports + 42, false, |nonce_keyed| {
let state = AccountUtilsState::<Versions>::state(nonce_keyed)
.unwrap()
.convert_to_current();
assert_eq!(state, State::Uninitialized);
with_test_keyed_account(42, false, |to_keyed| {
let signers = HashSet::new();
let recent_blockhashes = create_test_recent_blockhashes(0);
let lamports = nonce_keyed.account.borrow().lamports;
let result = nonce_keyed.withdraw_nonce_account(
lamports,
&to_keyed,
&recent_blockhashes,
&rent,
&signers,
);
assert_eq!(result, Err(InstructionError::MissingRequiredSignature),);
})
})
}
|
demo_i_renyi.py
|
#!/usr/bin/env python3
""" Demo for Renyi mutual information estimators.
Analytical vs estimated value is illustrated for normal random variables.
"""
from numpy.random import rand, multivariate_normal
from numpy import arange, zeros, dot, ones
import matplotlib.pyplot as plt
from ite.cost.x_factory import co_factory
from ite.cost.x_analytical_values import analytical_value_i_renyi
def
|
():
# parameters:
alpha = 0.7 # parameter of Renyi mutual information, \ne 1
dim = 2 # >=2; dimension of the distribution
num_of_samples_v = arange(100, 10*1000+1, 500)
cost_name = 'MIRenyi_DR'
# cost_name = 'MIRenyi_HR'
# initialization:
distr = 'normal' # distribution; fixed
ds = ones(dim, dtype='int') # dimensions of the 'subspaces'
num_of_samples_max = num_of_samples_v[-1]
length = len(num_of_samples_v)
co = co_factory(cost_name, mult=True, alpha=alpha) # cost object
# vector of estimated mutual information values:
i_hat_v = zeros(length)
# distr, dim -> samples (y), distribution parameters (par), analytical
# value (i):
if distr == 'normal':
# mean (m), covariance matrix (c):
m = rand(dim)
l = rand(dim, dim)
c = dot(l, l.T)
# generate samples (y~N(m,c)):
y = multivariate_normal(m, c, num_of_samples_max)
par = {"cov": c}
else:
raise Exception('Distribution=?')
i = analytical_value_i_renyi(distr, alpha, par)
# estimation:
for (tk, num_of_samples) in enumerate(num_of_samples_v):
i_hat_v[tk] = co.estimation(y[0:num_of_samples], ds) # broadcast
print("tk={0}/{1}".format(tk+1, length))
# plot:
plt.plot(num_of_samples_v, i_hat_v, num_of_samples_v, ones(length)*i)
plt.xlabel('Number of samples')
plt.ylabel('Renyi mutual information')
plt.legend(('estimation', 'analytical value'), loc='best')
plt.title("Estimator: " + cost_name)
plt.show()
if __name__ == "__main__":
main()
|
main
|
nyahentai.ts
|
import cheerio from 'cheerio';
import {SiteData} from './SiteData';
import {fetch} from '../spider/fetch';
import RemoteData from '../spider/RemoteData';
import RemoteImage from '../spider/RemoteImage';
async function fetchRemoteData(url: string): Promise<RemoteData> {
const images: RemoteImage[] = [];
try {
const {body} = await fetch(url);
// console.log(body);
const $ = cheerio.load(body);
|
let title = $('#info h1').text();
title = title.replace('[中国翻訳]', '');
title = title.replace('[DL版]', '').trim();
// console.log(title);
const list = $('#thumbnail-container img')
.map((index, element): string => {
let url = $(element).attr('data-src');
if (!url) throw new Error('[data-src] not exist');
url = url.replace('t.nyahentai.net', 'i.nyahentai.net');
url = url.replace('mt.404cdn.com', 'mi.404cdn.com');
url = url.replace('t.jpg', '.jpg');
url = url.replace('t.png', '.png');
return url;
})
.get();
// console.log(list);
list.forEach((url): void => {
images.push(new RemoteImage(url));
});
return new RemoteData(title, images);
} catch (e) {
console.log(e);
throw e;
}
}
const siteData: SiteData = {
fetchRemoteData,
siteName: '喵绅士',
urlCheckRegex: /https:\/\/zh\.nyahentai\.com\/g\/\d+/i,
};
export default siteData;
| |
rnn.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Recurrent Neural Network estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow_estimator.contrib.estimator.python.estimator import extenders
from tensorflow.contrib.feature_column.python.feature_column import sequence_feature_column as seq_fc
from tensorflow_estimator.python.estimator import estimator
from tensorflow_estimator.python.estimator.canned import head as head_lib
from tensorflow_estimator.python.estimator.canned import optimizers
from tensorflow.python.feature_column import feature_column as feature_column_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.layers import core as core_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.losses import losses
from tensorflow.python.summary import summary
from tensorflow.python.training import optimizer as optimizer_lib
from tensorflow.python.training import training_util
# The defaults are historical artifacts of the initial implementation, but seem
# reasonable choices.
_DEFAULT_LEARNING_RATE = 0.05
_DEFAULT_CLIP_NORM = 5.0
_CELL_TYPES = {'basic_rnn': rnn_cell.BasicRNNCell,
'lstm': rnn_cell.BasicLSTMCell,
'gru': rnn_cell.GRUCell}
# Indicates no value was provided by the user to a kwarg.
USE_DEFAULT = object()
def _single_rnn_cell(num_units, cell_type):
cell_type = _CELL_TYPES.get(cell_type, cell_type)
if not cell_type or not issubclass(cell_type, rnn_cell.RNNCell):
raise ValueError('Supported cell types are {}; got {}'.format(
list(_CELL_TYPES.keys()), cell_type))
return cell_type(num_units=num_units)
def
|
(num_units, cell_type='basic_rnn'):
"""Convenience function to create `rnn_cell_fn` for canned RNN Estimators.
Args:
num_units: Iterable of integer number of hidden units per RNN layer.
cell_type: A subclass of `tf.nn.rnn_cell.RNNCell` or a string specifying
the cell type. Supported strings are: `'basic_rnn'`, `'lstm'`, and
`'gru'`.
Returns:
A function that takes a single argument, an instance of
`tf.estimator.ModeKeys`, and returns an instance derived from
`tf.nn.rnn_cell.RNNCell`.
Raises:
ValueError: If cell_type is not supported.
"""
def rnn_cell_fn(mode):
# Unused. Part of the rnn_cell_fn interface since user specified functions
# may need different behavior across modes (e.g. dropout).
del mode
cells = [_single_rnn_cell(n, cell_type) for n in num_units]
if len(cells) == 1:
return cells[0]
return rnn_cell.MultiRNNCell(cells)
return rnn_cell_fn
def _concatenate_context_input(sequence_input, context_input):
"""Replicates `context_input` across all timesteps of `sequence_input`.
Expands dimension 1 of `context_input` then tiles it `sequence_length` times.
This value is appended to `sequence_input` on dimension 2 and the result is
returned.
Args:
sequence_input: A `Tensor` of dtype `float32` and shape `[batch_size,
padded_length, d0]`.
context_input: A `Tensor` of dtype `float32` and shape `[batch_size, d1]`.
Returns:
A `Tensor` of dtype `float32` and shape `[batch_size, padded_length,
d0 + d1]`.
Raises:
ValueError: If `sequence_input` does not have rank 3 or `context_input` does
not have rank 2.
"""
seq_rank_check = check_ops.assert_rank(
sequence_input,
3,
message='sequence_input must have rank 3',
data=[array_ops.shape(sequence_input)])
seq_type_check = check_ops.assert_type(
sequence_input,
dtypes.float32,
message='sequence_input must have dtype float32; got {}.'.format(
sequence_input.dtype))
ctx_rank_check = check_ops.assert_rank(
context_input,
2,
message='context_input must have rank 2',
data=[array_ops.shape(context_input)])
ctx_type_check = check_ops.assert_type(
context_input,
dtypes.float32,
message='context_input must have dtype float32; got {}.'.format(
context_input.dtype))
with ops.control_dependencies(
[seq_rank_check, seq_type_check, ctx_rank_check, ctx_type_check]):
padded_length = array_ops.shape(sequence_input)[1]
tiled_context_input = array_ops.tile(
array_ops.expand_dims(context_input, 1),
array_ops.concat([[1], [padded_length], [1]], 0))
return array_ops.concat([sequence_input, tiled_context_input], 2)
def _select_last_activations(activations, sequence_lengths):
"""Selects the nth set of activations for each n in `sequence_length`.
Returns a `Tensor` of shape `[batch_size, k]`. If `sequence_length` is not
`None`, then `output[i, :] = activations[i, sequence_length[i] - 1, :]`. If
`sequence_length` is `None`, then `output[i, :] = activations[i, -1, :]`.
Args:
activations: A `Tensor` with shape `[batch_size, padded_length, k]`.
sequence_lengths: A `Tensor` with shape `[batch_size]` or `None`.
Returns:
A `Tensor` of shape `[batch_size, k]`.
"""
with ops.name_scope(
'select_last_activations', values=[activations, sequence_lengths]):
activations_shape = array_ops.shape(activations)
batch_size = activations_shape[0]
padded_length = activations_shape[1]
output_units = activations_shape[2]
if sequence_lengths is None:
sequence_lengths = padded_length
start_indices = math_ops.to_int64(
math_ops.range(batch_size) * padded_length)
last_indices = start_indices + sequence_lengths - 1
reshaped_activations = array_ops.reshape(
activations, [batch_size * padded_length, output_units])
last_activations = array_ops.gather(reshaped_activations, last_indices)
last_activations.set_shape([activations.shape[0], activations.shape[2]])
return last_activations
def _rnn_logit_fn_builder(output_units, rnn_cell_fn, sequence_feature_columns,
context_feature_columns, input_layer_partitioner,
return_sequences=False):
"""Function builder for a rnn logit_fn.
Args:
output_units: An int indicating the dimension of the logit layer.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell`.
sequence_feature_columns: An iterable containing the `FeatureColumn`s
that represent sequential input.
context_feature_columns: An iterable containing the `FeatureColumn`s
that represent contextual input.
input_layer_partitioner: Partitioner for input layer.
return_sequences: A boolean indicating whether to return the last output
in the output sequence, or the full sequence.
Returns:
A logit_fn (see below).
Raises:
ValueError: If output_units is not an int.
"""
if not isinstance(output_units, int):
raise ValueError('output_units must be an int. Given type: {}'.format(
type(output_units)))
def rnn_logit_fn(features, mode):
"""Recurrent Neural Network logit_fn.
Args:
features: This is the first item returned from the `input_fn`
passed to `train`, `evaluate`, and `predict`. This should be a
single `Tensor` or `dict` of same.
mode: Optional. Specifies if this training, evaluation or prediction. See
`ModeKeys`.
Returns:
A `Tensor` representing the logits.
"""
with variable_scope.variable_scope(
'sequence_input_layer',
values=tuple(six.itervalues(features)),
partitioner=input_layer_partitioner):
sequence_input, sequence_length = seq_fc.sequence_input_layer(
features=features, feature_columns=sequence_feature_columns)
summary.histogram('sequence_length', sequence_length)
if context_feature_columns:
context_input = feature_column_lib.input_layer(
features=features,
feature_columns=context_feature_columns)
sequence_input = _concatenate_context_input(sequence_input,
context_input)
cell = rnn_cell_fn(mode)
# Ignore output state.
rnn_outputs, _ = rnn.dynamic_rnn(
cell=cell,
inputs=sequence_input,
sequence_length=sequence_length,
dtype=dtypes.float32,
time_major=False)
if not return_sequences:
rnn_outputs = _select_last_activations(rnn_outputs, sequence_length)
with variable_scope.variable_scope('logits', values=(rnn_outputs,)):
logits = core_layers.dense(
rnn_outputs,
units=output_units,
activation=None,
kernel_initializer=init_ops.glorot_uniform_initializer())
return logits
return rnn_logit_fn
def _rnn_model_fn(features,
labels,
mode,
head,
rnn_cell_fn,
sequence_feature_columns,
context_feature_columns,
return_sequences=False,
optimizer='Adagrad',
input_layer_partitioner=None,
config=None):
"""Recurrent Neural Net model_fn.
Args:
features: dict of `Tensor` and `SparseTensor` objects returned from
`input_fn`.
labels: `Tensor` of shape [batch_size, 1] or [batch_size] with labels.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
head: A `head_lib._Head` instance.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell`.
sequence_feature_columns: Iterable containing `FeatureColumn`s that
represent sequential model inputs.
context_feature_columns: Iterable containing `FeatureColumn`s that
represent model inputs not associated with a specific timestep.
return_sequences: A boolean indicating whether to return the last output
in the output sequence, or the full sequence.
optimizer: String, `tf.Optimizer` object, or callable that creates the
optimizer to use for training. If not specified, will use the Adagrad
optimizer with a default learning rate of 0.05 and gradient clip norm of
5.0.
input_layer_partitioner: Partitioner for input layer. Defaults
to `min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: `RunConfig` object to configure the runtime settings.
Returns:
An `EstimatorSpec` instance.
Raises:
ValueError: If mode or optimizer is invalid, or features has the wrong type.
"""
if not isinstance(features, dict):
raise ValueError('features should be a dictionary of `Tensor`s. '
'Given type: {}'.format(type(features)))
# If user does not provide an optimizer instance, use the optimizer specified
# by the string with default learning rate and gradient clipping.
if not isinstance(optimizer, optimizer_lib.Optimizer):
optimizer = optimizers.get_optimizer_instance(
optimizer, learning_rate=_DEFAULT_LEARNING_RATE)
optimizer = extenders.clip_gradients_by_norm(optimizer, _DEFAULT_CLIP_NORM)
num_ps_replicas = config.num_ps_replicas if config else 0
partitioner = partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas)
with variable_scope.variable_scope(
'rnn',
values=tuple(six.itervalues(features)),
partitioner=partitioner):
input_layer_partitioner = input_layer_partitioner or (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=64 << 20))
logit_fn = _rnn_logit_fn_builder(
output_units=head.logits_dimension,
rnn_cell_fn=rnn_cell_fn,
sequence_feature_columns=sequence_feature_columns,
context_feature_columns=context_feature_columns,
input_layer_partitioner=input_layer_partitioner,
return_sequences=return_sequences)
logits = logit_fn(features=features, mode=mode)
def _train_op_fn(loss):
"""Returns the op to optimize the loss."""
return optimizer.minimize(
loss,
global_step=training_util.get_global_step())
return head.create_estimator_spec(
features=features,
mode=mode,
labels=labels,
train_op_fn=_train_op_fn,
logits=logits)
def _assert_rnn_cell_fn(rnn_cell_fn, num_units, cell_type):
"""Assert arguments are valid and return rnn_cell_fn."""
if rnn_cell_fn and (num_units or cell_type != USE_DEFAULT):
raise ValueError(
'num_units and cell_type must not be specified when using rnn_cell_fn'
)
if not rnn_cell_fn:
if cell_type == USE_DEFAULT:
cell_type = 'basic_rnn'
rnn_cell_fn = _make_rnn_cell_fn(num_units, cell_type)
return rnn_cell_fn
class RNNClassifier(estimator.Estimator):
"""A classifier for TensorFlow RNN models.
Trains a recurrent neural network model to classify instances into one of
multiple classes.
Example:
```python
token_sequence = sequence_categorical_column_with_hash_bucket(...)
token_emb = embedding_column(categorical_column=token_sequence, ...)
estimator = RNNClassifier(
sequence_feature_columns=[token_emb],
num_units=[32, 16], cell_type='lstm')
# Input builders
def input_fn_train: # returns x, y
pass
estimator.train(input_fn=input_fn_train, steps=100)
def input_fn_eval: # returns x, y
pass
metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
def input_fn_predict: # returns x, None
pass
predictions = estimator.predict(input_fn=input_fn_predict)
```
Input of `train` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column` is not `None`, a feature with
`key=weight_column` whose value is a `Tensor`.
* for each `column` in `sequence_feature_columns`:
- a feature with `key=column.name` whose `value` is a `SparseTensor`.
* for each `column` in `context_feature_columns`:
- if `column` is a `_CategoricalColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `_WeightedCategoricalColumn`, two features: the first
with `key` the id column name, the second with `key` the weight column
name. Both features' `value` must be a `SparseTensor`.
- if `column` is a `_DenseColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
Loss is calculated by using softmax cross entropy.
@compatibility(eager)
Estimators are not compatible with eager execution.
@end_compatibility
"""
def __init__(self,
sequence_feature_columns,
context_feature_columns=None,
num_units=None,
cell_type=USE_DEFAULT,
rnn_cell_fn=None,
model_dir=None,
n_classes=2,
weight_column=None,
label_vocabulary=None,
optimizer='Adagrad',
loss_reduction=losses.Reduction.SUM_OVER_BATCH_SIZE,
input_layer_partitioner=None,
config=None):
"""Initializes a `RNNClassifier` instance.
Args:
sequence_feature_columns: An iterable containing the `FeatureColumn`s
that represent sequential input. All items in the set should either be
sequence columns (e.g. `sequence_numeric_column`) or constructed from
one (e.g. `embedding_column` with `sequence_categorical_column_*` as
input).
context_feature_columns: An iterable containing the `FeatureColumn`s
for contextual input. The data represented by these columns will be
replicated and given to the RNN at each timestep. These columns must be
instances of classes derived from `_DenseColumn` such as
`numeric_column`, not the sequential variants.
num_units: Iterable of integer number of hidden units per RNN layer. If
set, `cell_type` must also be specified and `rnn_cell_fn` must be
`None`.
cell_type: A subclass of `tf.nn.rnn_cell.RNNCell` or a string specifying
the cell type. Supported strings are: `'basic_rnn'`, `'lstm'`, and
`'gru'`. If set, `num_units` must also be specified and `rnn_cell_fn`
must be `None`.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell` that will be used to
construct the RNN. If set, `num_units` and `cell_type` cannot be set.
This is for advanced users who need additional customization beyond
`num_units` and `cell_type`. Note that `tf.nn.rnn_cell.MultiRNNCell` is
needed for stacked RNNs.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: Number of label classes. Defaults to 2, namely binary
classification. Must be > 1.
weight_column: A string or a `_NumericColumn` created by
`tf.feature_column.numeric_column` defining feature column representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example. If it is a string, it is
used as a key to fetch weight tensor from the `features`. If it is a
`_NumericColumn`, raw tensor is fetched by key `weight_column.key`,
then weight_column.normalizer_fn is applied on it to get weight tensor.
label_vocabulary: A list of strings represents possible label values. If
given, labels must be string type and have any value in
`label_vocabulary`. If it is not given, that means labels are
already encoded as integer or float within [0, 1] for `n_classes=2` and
encoded as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 .
Also there will be errors if vocabulary is not provided and labels are
string.
optimizer: An instance of `tf.Optimizer` or string specifying optimizer
type. Defaults to Adagrad optimizer.
loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
input_layer_partitioner: Optional. Partitioner for input layer. Defaults
to `min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: `RunConfig` object to configure the runtime settings.
Raises:
ValueError: If `num_units`, `cell_type`, and `rnn_cell_fn` are not
compatible.
"""
rnn_cell_fn = _assert_rnn_cell_fn(rnn_cell_fn, num_units, cell_type)
if n_classes == 2:
head = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss( # pylint: disable=protected-access
weight_column=weight_column,
label_vocabulary=label_vocabulary,
loss_reduction=loss_reduction)
else:
head = head_lib._multi_class_head_with_softmax_cross_entropy_loss( # pylint: disable=protected-access
n_classes,
weight_column=weight_column,
label_vocabulary=label_vocabulary,
loss_reduction=loss_reduction)
def _model_fn(features, labels, mode, config):
return _rnn_model_fn(
features=features,
labels=labels,
mode=mode,
head=head,
rnn_cell_fn=rnn_cell_fn,
sequence_feature_columns=tuple(sequence_feature_columns or []),
context_feature_columns=tuple(context_feature_columns or []),
return_sequences=False,
optimizer=optimizer,
input_layer_partitioner=input_layer_partitioner,
config=config)
super(RNNClassifier, self).__init__(
model_fn=_model_fn, model_dir=model_dir, config=config)
class RNNEstimator(estimator.Estimator):
"""An Estimator for TensorFlow RNN models with user-specified head.
Example:
```python
token_sequence = sequence_categorical_column_with_hash_bucket(...)
token_emb = embedding_column(categorical_column=token_sequence, ...)
estimator = RNNEstimator(
head=tf.contrib.estimator.regression_head(),
sequence_feature_columns=[token_emb],
num_units=[32, 16], cell_type='lstm')
# Or with custom RNN cell:
def rnn_cell_fn(mode):
cells = [ tf.contrib.rnn.LSTMCell(size) for size in [32, 16] ]
if mode == tf.estimator.ModeKeys.TRAIN:
cells = [ tf.contrib.rnn.DropoutWrapper(cell, input_keep_prob=0.5)
for cell in cells ]
return tf.contrib.rnn.MultiRNNCell(cells)
estimator = RNNEstimator(
head=tf.contrib.estimator.regression_head(),
sequence_feature_columns=[token_emb],
rnn_cell_fn=rnn_cell_fn)
# Input builders
def input_fn_train: # returns x, y
pass
estimator.train(input_fn=input_fn_train, steps=100)
def input_fn_eval: # returns x, y
pass
metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
def input_fn_predict: # returns x, None
pass
predictions = estimator.predict(input_fn=input_fn_predict)
```
Input of `train` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if the head's `weight_column` is not `None`, a feature with
`key=weight_column` whose value is a `Tensor`.
* for each `column` in `sequence_feature_columns`:
- a feature with `key=column.name` whose `value` is a `SparseTensor`.
* for each `column` in `context_feature_columns`:
- if `column` is a `_CategoricalColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `_WeightedCategoricalColumn`, two features: the first
with `key` the id column name, the second with `key` the weight column
name. Both features' `value` must be a `SparseTensor`.
- if `column` is a `_DenseColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
Loss and predicted output are determined by the specified head.
@compatibility(eager)
Estimators are not compatible with eager execution.
@end_compatibility
"""
def __init__(self,
head,
sequence_feature_columns,
context_feature_columns=None,
num_units=None,
cell_type=USE_DEFAULT,
rnn_cell_fn=None,
return_sequences=False,
model_dir=None,
optimizer='Adagrad',
input_layer_partitioner=None,
config=None):
"""Initializes a `RNNEstimator` instance.
Args:
head: A `_Head` instance constructed with a method such as
`tf.contrib.estimator.multi_label_head`. This specifies the model's
output and loss function to be optimized.
sequence_feature_columns: An iterable containing the `FeatureColumn`s
that represent sequential input. All items in the set should either be
sequence columns (e.g. `sequence_numeric_column`) or constructed from
one (e.g. `embedding_column` with `sequence_categorical_column_*` as
input).
context_feature_columns: An iterable containing the `FeatureColumn`s
for contextual input. The data represented by these columns will be
replicated and given to the RNN at each timestep. These columns must be
instances of classes derived from `_DenseColumn` such as
`numeric_column`, not the sequential variants.
num_units: Iterable of integer number of hidden units per RNN layer. If
set, `cell_type` must also be specified and `rnn_cell_fn` must be
`None`.
cell_type: A subclass of `tf.nn.rnn_cell.RNNCell` or a string specifying
the cell type. Supported strings are: `'basic_rnn'`, `'lstm'`, and
`'gru'`. If set, `num_units` must also be specified and `rnn_cell_fn`
must be `None`.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell` that will be used to
construct the RNN. If set, `num_units` and `cell_type` cannot be set.
This is for advanced users who need additional customization beyond
`num_units` and `cell_type`. Note that `tf.nn.rnn_cell.MultiRNNCell` is
needed for stacked RNNs.
return_sequences: A boolean indicating whether to return the last output
in the output sequence, or the full sequence.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
optimizer: An instance of `tf.Optimizer` or string specifying optimizer
type. Defaults to Adagrad optimizer.
input_layer_partitioner: Optional. Partitioner for input layer. Defaults
to `min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: `RunConfig` object to configure the runtime settings.
Raises:
ValueError: If `num_units`, `cell_type`, and `rnn_cell_fn` are not
compatible.
"""
rnn_cell_fn = _assert_rnn_cell_fn(rnn_cell_fn, num_units, cell_type)
def _model_fn(features, labels, mode, config):
return _rnn_model_fn(
features=features,
labels=labels,
mode=mode,
head=head,
rnn_cell_fn=rnn_cell_fn,
sequence_feature_columns=tuple(sequence_feature_columns or []),
context_feature_columns=tuple(context_feature_columns or []),
return_sequences=return_sequences,
optimizer=optimizer,
input_layer_partitioner=input_layer_partitioner,
config=config)
super(RNNEstimator, self).__init__(
model_fn=_model_fn, model_dir=model_dir, config=config)
|
_make_rnn_cell_fn
|
render.rs
|
//! Rustdoc's HTML rendering module.
//!
//! This modules contains the bulk of the logic necessary for rendering a
//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
//! rendering process is largely driven by the `format!` syntax extension to
//! perform all I/O into files and streams.
//!
//! The rendering process is largely driven by the `Context` and `Cache`
//! structures. The cache is pre-populated by crawling the crate in question,
//! and then it is shared among the various rendering threads. The cache is meant
//! to be a fairly large structure not implementing `Clone` (because it's shared
//! among threads). The context, however, should be a lightweight structure. This
//! is cloned per-thread and contains information about what is currently being
//! rendered.
//!
//! In order to speed up rendering (mostly because of markdown rendering), the
//! rendering process has been parallelized. This parallelization is only
//! exposed through the `crate` method on the context, and then also from the
//! fact that the shared cache is stored in TLS (and must be accessed as such).
//!
//! In addition to rendering the crate itself, this module is also responsible
//! for creating the corresponding search index and source file renderings.
//! These threads are not parallelized (they haven't been a bottleneck yet), and
//! both occur before the crate is rendered.
pub use self::ExternalLocation::*;
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::{BTreeMap, VecDeque};
use std::default::Default;
use std::error;
use std::fmt::{self, Display, Formatter, Write as FmtWrite};
use std::ffi::OsStr;
use std::fs::{self, File, OpenOptions};
use std::io::prelude::*;
use std::io::{self, BufWriter, BufReader};
use std::mem;
use std::path::{PathBuf, Path, Component};
use std::str;
use std::sync::Arc;
use std::rc::Rc;
use errors;
use serialize::json::{ToJson, Json, as_json};
use syntax::ast;
use syntax::ext::base::MacroKind;
use syntax::source_map::FileName;
use syntax::feature_gate::UnstableFeatures;
use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId};
use rustc::middle::privacy::AccessLevels;
use rustc::middle::stability;
use rustc::hir;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::flock;
use crate::clean::{self, AttributesExt, Deprecation, GetDefId, SelfTy, Mutability};
use crate::config::RenderOptions;
use crate::doctree;
use crate::fold::DocFolder;
use crate::html::escape::Escape;
use crate::html::format::{AsyncSpace, ConstnessSpace};
use crate::html::format::{GenericBounds, WhereClause, href, AbiSpace, DefaultSpace};
use crate::html::format::{VisSpace, Function, UnsafetySpace, MutableSpace};
use crate::html::format::fmt_impl_for_trait_page;
use crate::html::item_type::ItemType;
use crate::html::markdown::{self, Markdown, MarkdownHtml, MarkdownSummaryLine, ErrorCodes, IdMap};
use crate::html::{highlight, layout, static_files};
use minifier;
/// A pair of name and its optional document.
pub type NameDoc = (String, Option<String>);
pub struct SlashChecker<'a>(pub &'a str);
impl<'a> Display for SlashChecker<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
if !self.0.ends_with("/") && !self.0.is_empty() {
write!(f, "{}/", self.0)
} else {
write!(f, "{}", self.0)
}
}
}
/// Major driving force in all rustdoc rendering. This contains information
/// about where in the tree-like hierarchy rendering is occurring and controls
/// how the current page is being rendered.
///
/// It is intended that this context is a lightweight object which can be fairly
/// easily cloned because it is cloned per work-job (about once per item in the
/// rustdoc tree).
#[derive(Clone)]
struct Context {
/// Current hierarchy of components leading down to what's currently being
/// rendered
pub current: Vec<String>,
/// The current destination folder of where HTML artifacts should be placed.
/// This changes as the context descends into the module hierarchy.
pub dst: PathBuf,
/// A flag, which when `true`, will render pages which redirect to the
/// real location of an item. This is used to allow external links to
/// publicly reused items to redirect to the right location.
pub render_redirect_pages: bool,
pub codes: ErrorCodes,
/// The map used to ensure all generated 'id=' attributes are unique.
id_map: Rc<RefCell<IdMap>>,
pub shared: Arc<SharedContext>,
}
struct SharedContext {
/// The path to the crate root source minus the file name.
/// Used for simplifying paths to the highlighted source code files.
pub src_root: PathBuf,
/// This describes the layout of each page, and is not modified after
/// creation of the context (contains info like the favicon and added html).
pub layout: layout::Layout,
/// This flag indicates whether `[src]` links should be generated or not. If
/// the source files are present in the html rendering, then this will be
/// `true`.
pub include_sources: bool,
/// The local file sources we've emitted and their respective url-paths.
pub local_sources: FxHashMap<PathBuf, String>,
/// All the passes that were run on this crate.
pub passes: FxHashSet<String>,
/// The base-URL of the issue tracker for when an item has been tagged with
/// an issue number.
pub issue_tracker_base_url: Option<String>,
/// The given user css file which allow to customize the generated
/// documentation theme.
pub css_file_extension: Option<PathBuf>,
/// The directories that have already been created in this doc run. Used to reduce the number
/// of spurious `create_dir_all` calls.
pub created_dirs: RefCell<FxHashSet<PathBuf>>,
/// This flag indicates whether listings of modules (in the side bar and documentation itself)
/// should be ordered alphabetically or in order of appearance (in the source code).
pub sort_modules_alphabetically: bool,
/// Additional themes to be added to the generated docs.
pub themes: Vec<PathBuf>,
/// Suffix to be added on resource files (if suffix is "-v2" then "light.css" becomes
/// "light-v2.css").
pub resource_suffix: String,
/// Optional path string to be used to load static files on output pages. If not set, uses
/// combinations of `../` to reach the documentation root.
pub static_root_path: Option<String>,
/// If false, the `select` element to have search filtering by crates on rendered docs
/// won't be generated.
pub generate_search_filter: bool,
/// Option disabled by default to generate files used by RLS and some other tools.
pub generate_redirect_pages: bool,
}
impl SharedContext {
fn ensure_dir(&self, dst: &Path) -> io::Result<()> {
let mut dirs = self.created_dirs.borrow_mut();
if !dirs.contains(dst) {
fs::create_dir_all(dst)?;
dirs.insert(dst.to_path_buf());
}
Ok(())
}
}
impl SharedContext {
/// Returns `true` if the `collapse-docs` pass was run on this crate.
pub fn was_collapsed(&self) -> bool {
self.passes.contains("collapse-docs")
}
/// Based on whether the `collapse-docs` pass was run, return either the `doc_value` or the
/// `collapsed_doc_value` of the given item.
pub fn maybe_collapsed_doc_value<'a>(&self, item: &'a clean::Item) -> Option<Cow<'a, str>> {
if self.was_collapsed() {
item.collapsed_doc_value().map(|s| s.into())
} else {
item.doc_value().map(|s| s.into())
}
}
}
/// Indicates where an external crate can be found.
pub enum ExternalLocation {
/// Remote URL root of the external crate
Remote(String),
/// This external crate can be found in the local doc/ folder
Local,
/// The external crate could not be found.
Unknown,
}
/// Metadata about implementations for a type or trait.
#[derive(Clone, Debug)]
pub struct Impl {
pub impl_item: clean::Item,
}
impl Impl {
fn inner_impl(&self) -> &clean::Impl {
match self.impl_item.inner {
clean::ImplItem(ref impl_) => impl_,
_ => panic!("non-impl item found in impl")
}
}
fn trait_did(&self) -> Option<DefId> {
self.inner_impl().trait_.def_id()
}
}
#[derive(Debug)]
pub struct Error {
pub file: PathBuf,
pub error: io::Error,
}
impl error::Error for Error {
fn description(&self) -> &str {
self.error.description()
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "\"{}\": {}", self.file.display(), self.error)
}
}
impl Error {
pub fn new(e: io::Error, file: &Path) -> Error {
Error {
file: file.to_path_buf(),
error: e,
}
}
}
macro_rules! try_none {
($e:expr, $file:expr) => ({
use std::io;
match $e {
Some(e) => e,
None => return Err(Error::new(io::Error::new(io::ErrorKind::Other, "not found"),
$file))
}
})
}
macro_rules! try_err {
($e:expr, $file:expr) => ({
match $e {
Ok(e) => e,
Err(e) => return Err(Error::new(e, $file)),
}
})
}
/// This cache is used to store information about the `clean::Crate` being
/// rendered in order to provide more useful documentation. This contains
/// information like all implementors of a trait, all traits a type implements,
/// documentation for all known traits, etc.
///
/// This structure purposefully does not implement `Clone` because it's intended
/// to be a fairly large and expensive structure to clone. Instead this adheres
/// to `Send` so it may be stored in a `Arc` instance and shared among the various
/// rendering threads.
#[derive(Default)]
pub struct Cache {
/// Mapping of typaram ids to the name of the type parameter. This is used
/// when pretty-printing a type (so pretty-printing doesn't have to
/// painfully maintain a context like this)
pub param_names: FxHashMap<DefId, String>,
/// Maps a type ID to all known implementations for that type. This is only
/// recognized for intra-crate `ResolvedPath` types, and is used to print
/// out extra documentation on the page of an enum/struct.
///
/// The values of the map are a list of implementations and documentation
/// found on that implementation.
pub impls: FxHashMap<DefId, Vec<Impl>>,
/// Maintains a mapping of local crate `NodeId`s to the fully qualified name
/// and "short type description" of that node. This is used when generating
/// URLs when a type is being linked to. External paths are not located in
/// this map because the `External` type itself has all the information
/// necessary.
pub paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
/// Similar to `paths`, but only holds external paths. This is only used for
/// generating explicit hyperlinks to other crates.
pub external_paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
/// Maps local `DefId`s of exported types to fully qualified paths.
/// Unlike 'paths', this mapping ignores any renames that occur
/// due to 'use' statements.
///
/// This map is used when writing out the special 'implementors'
/// javascript file. By using the exact path that the type
/// is declared with, we ensure that each path will be identical
/// to the path used if the corresponding type is inlined. By
/// doing this, we can detect duplicate impls on a trait page, and only display
/// the impl for the inlined type.
pub exact_paths: FxHashMap<DefId, Vec<String>>,
/// This map contains information about all known traits of this crate.
/// Implementations of a crate should inherit the documentation of the
/// parent trait if no extra documentation is specified, and default methods
/// should show up in documentation about trait implementations.
pub traits: FxHashMap<DefId, clean::Trait>,
/// When rendering traits, it's often useful to be able to list all
/// implementors of the trait, and this mapping is exactly, that: a mapping
/// of trait ids to the list of known implementors of the trait
pub implementors: FxHashMap<DefId, Vec<Impl>>,
/// Cache of where external crate documentation can be found.
pub extern_locations: FxHashMap<CrateNum, (String, PathBuf, ExternalLocation)>,
/// Cache of where documentation for primitives can be found.
pub primitive_locations: FxHashMap<clean::PrimitiveType, DefId>,
// Note that external items for which `doc(hidden)` applies to are shown as
// non-reachable while local items aren't. This is because we're reusing
// the access levels from the privacy check pass.
pub access_levels: AccessLevels<DefId>,
/// The version of the crate being documented, if given from the `--crate-version` flag.
pub crate_version: Option<String>,
// Private fields only used when initially crawling a crate to build a cache
stack: Vec<String>,
parent_stack: Vec<DefId>,
parent_is_trait_impl: bool,
search_index: Vec<IndexItem>,
stripped_mod: bool,
deref_trait_did: Option<DefId>,
deref_mut_trait_did: Option<DefId>,
owned_box_did: Option<DefId>,
masked_crates: FxHashSet<CrateNum>,
// In rare case where a structure is defined in one module but implemented
// in another, if the implementing module is parsed before defining module,
// then the fully qualified name of the structure isn't presented in `paths`
// yet when its implementation methods are being indexed. Caches such methods
// and their parent id here and indexes them at the end of crate parsing.
orphan_impl_items: Vec<(DefId, clean::Item)>,
// Similarly to `orphan_impl_items`, sometimes trait impls are picked up
// even though the trait itself is not exported. This can happen if a trait
// was defined in function/expression scope, since the impl will be picked
// up by `collect-trait-impls` but the trait won't be scraped out in the HIR
// crawl. In order to prevent crashes when looking for spotlight traits or
// when gathering trait documentation on a type, hold impls here while
// folding and add them to the cache later on if we find the trait.
orphan_trait_impls: Vec<(DefId, FxHashSet<DefId>, Impl)>,
/// Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias,
/// we need the alias element to have an array of items.
aliases: FxHashMap<String, Vec<IndexItem>>,
}
/// Temporary storage for data obtained during `RustdocVisitor::clean()`.
/// Later on moved into `CACHE_KEY`.
#[derive(Default)]
pub struct RenderInfo {
pub inlined: FxHashSet<DefId>,
pub external_paths: crate::core::ExternalPaths,
pub external_param_names: FxHashMap<DefId, String>,
pub exact_paths: FxHashMap<DefId, Vec<String>>,
pub access_levels: AccessLevels<DefId>,
pub deref_trait_did: Option<DefId>,
pub deref_mut_trait_did: Option<DefId>,
pub owned_box_did: Option<DefId>,
}
/// Helper struct to render all source code to HTML pages
struct SourceCollector<'a> {
scx: &'a mut SharedContext,
/// Root destination to place all HTML output into
dst: PathBuf,
}
/// Wrapper struct to render the source code of a file. This will do things like
/// adding line numbers to the left-hand side.
struct
|
<'a>(&'a str);
// Helper structs for rendering items/sidebars and carrying along contextual
// information
#[derive(Copy, Clone)]
struct Item<'a> {
cx: &'a Context,
item: &'a clean::Item,
}
struct Sidebar<'a> { cx: &'a Context, item: &'a clean::Item, }
/// Struct representing one entry in the JS search index. These are all emitted
/// by hand to a large JS file at the end of cache-creation.
#[derive(Debug)]
struct IndexItem {
ty: ItemType,
name: String,
path: String,
desc: String,
parent: Option<DefId>,
parent_idx: Option<usize>,
search_type: Option<IndexItemFunctionType>,
}
impl ToJson for IndexItem {
fn to_json(&self) -> Json {
assert_eq!(self.parent.is_some(), self.parent_idx.is_some());
let mut data = Vec::with_capacity(6);
data.push((self.ty as usize).to_json());
data.push(self.name.to_json());
data.push(self.path.to_json());
data.push(self.desc.to_json());
data.push(self.parent_idx.to_json());
data.push(self.search_type.to_json());
Json::Array(data)
}
}
/// A type used for the search index.
#[derive(Debug)]
struct Type {
name: Option<String>,
generics: Option<Vec<String>>,
}
impl ToJson for Type {
fn to_json(&self) -> Json {
match self.name {
Some(ref name) => {
let mut data = Vec::with_capacity(2);
data.push(name.to_json());
if let Some(ref generics) = self.generics {
data.push(generics.to_json());
}
Json::Array(data)
}
None => Json::Null,
}
}
}
/// Full type of functions/methods in the search index.
#[derive(Debug)]
struct IndexItemFunctionType {
inputs: Vec<Type>,
output: Option<Vec<Type>>,
}
impl ToJson for IndexItemFunctionType {
fn to_json(&self) -> Json {
// If we couldn't figure out a type, just write `null`.
let mut iter = self.inputs.iter();
if match self.output {
Some(ref output) => iter.chain(output.iter()).any(|ref i| i.name.is_none()),
None => iter.any(|ref i| i.name.is_none()),
} {
Json::Null
} else {
let mut data = Vec::with_capacity(2);
data.push(self.inputs.to_json());
if let Some(ref output) = self.output {
if output.len() > 1 {
data.push(output.to_json());
} else {
data.push(output[0].to_json());
}
}
Json::Array(data)
}
}
}
thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default());
thread_local!(pub static CURRENT_LOCATION_KEY: RefCell<Vec<String>> = RefCell::new(Vec::new()));
pub fn initial_ids() -> Vec<String> {
[
"main",
"search",
"help",
"TOC",
"render-detail",
"associated-types",
"associated-const",
"required-methods",
"provided-methods",
"implementors",
"synthetic-implementors",
"implementors-list",
"synthetic-implementors-list",
"methods",
"deref-methods",
"implementations",
].into_iter().map(|id| (String::from(*id))).collect()
}
/// Generates the documentation for `crate` into the directory `dst`
pub fn run(mut krate: clean::Crate,
options: RenderOptions,
passes: FxHashSet<String>,
renderinfo: RenderInfo,
diag: &errors::Handler) -> Result<(), Error> {
// need to save a copy of the options for rendering the index page
let md_opts = options.clone();
let RenderOptions {
output,
external_html,
id_map,
playground_url,
sort_modules_alphabetically,
themes,
extension_css,
extern_html_root_urls,
resource_suffix,
static_root_path,
generate_search_filter,
generate_redirect_pages,
..
} = options;
let src_root = match krate.src {
FileName::Real(ref p) => match p.parent() {
Some(p) => p.to_path_buf(),
None => PathBuf::new(),
},
_ => PathBuf::new(),
};
let mut scx = SharedContext {
src_root,
passes,
include_sources: true,
local_sources: Default::default(),
issue_tracker_base_url: None,
layout: layout::Layout {
logo: String::new(),
favicon: String::new(),
external_html,
krate: krate.name.clone(),
},
css_file_extension: extension_css,
created_dirs: Default::default(),
sort_modules_alphabetically,
themes,
resource_suffix,
static_root_path,
generate_search_filter,
generate_redirect_pages,
};
// If user passed in `--playground-url` arg, we fill in crate name here
if let Some(url) = playground_url {
markdown::PLAYGROUND.with(|slot| {
*slot.borrow_mut() = Some((Some(krate.name.clone()), url));
});
}
// Crawl the crate attributes looking for attributes which control how we're
// going to emit HTML
if let Some(attrs) = krate.module.as_ref().map(|m| &m.attrs) {
for attr in attrs.lists("doc") {
match (attr.name_or_empty().get(), attr.value_str()) {
("html_favicon_url", Some(s)) => {
scx.layout.favicon = s.to_string();
}
("html_logo_url", Some(s)) => {
scx.layout.logo = s.to_string();
}
("html_playground_url", Some(s)) => {
markdown::PLAYGROUND.with(|slot| {
let name = krate.name.clone();
*slot.borrow_mut() = Some((Some(name), s.to_string()));
});
}
("issue_tracker_base_url", Some(s)) => {
scx.issue_tracker_base_url = Some(s.to_string());
}
("html_no_source", None) if attr.is_word() => {
scx.include_sources = false;
}
_ => {}
}
}
}
let dst = output;
try_err!(fs::create_dir_all(&dst), &dst);
krate = render_sources(&dst, &mut scx, krate)?;
let cx = Context {
current: Vec::new(),
dst,
render_redirect_pages: false,
codes: ErrorCodes::from(UnstableFeatures::from_environment().is_nightly_build()),
id_map: Rc::new(RefCell::new(id_map)),
shared: Arc::new(scx),
};
// Crawl the crate to build various caches used for the output
let RenderInfo {
inlined: _,
external_paths,
external_param_names,
exact_paths,
access_levels,
deref_trait_did,
deref_mut_trait_did,
owned_box_did,
} = renderinfo;
let external_paths = external_paths.into_iter()
.map(|(k, (v, t))| (k, (v, ItemType::from(t))))
.collect();
let mut cache = Cache {
impls: Default::default(),
external_paths,
exact_paths,
paths: Default::default(),
implementors: Default::default(),
stack: Vec::new(),
parent_stack: Vec::new(),
search_index: Vec::new(),
parent_is_trait_impl: false,
extern_locations: Default::default(),
primitive_locations: Default::default(),
stripped_mod: false,
access_levels,
crate_version: krate.version.take(),
orphan_impl_items: Vec::new(),
orphan_trait_impls: Vec::new(),
traits: krate.external_traits.lock().replace(Default::default()),
deref_trait_did,
deref_mut_trait_did,
owned_box_did,
masked_crates: mem::replace(&mut krate.masked_crates, Default::default()),
param_names: external_param_names,
aliases: Default::default(),
};
// Cache where all our extern crates are located
for &(n, ref e) in &krate.externs {
let src_root = match e.src {
FileName::Real(ref p) => match p.parent() {
Some(p) => p.to_path_buf(),
None => PathBuf::new(),
},
_ => PathBuf::new(),
};
let extern_url = extern_html_root_urls.get(&e.name).map(|u| &**u);
cache.extern_locations.insert(n, (e.name.clone(), src_root,
extern_location(e, extern_url, &cx.dst)));
let did = DefId { krate: n, index: CRATE_DEF_INDEX };
cache.external_paths.insert(did, (vec![e.name.to_string()], ItemType::Module));
}
// Cache where all known primitives have their documentation located.
//
// Favor linking to as local extern as possible, so iterate all crates in
// reverse topological order.
for &(_, ref e) in krate.externs.iter().rev() {
for &(def_id, prim, _) in &e.primitives {
cache.primitive_locations.insert(prim, def_id);
}
}
for &(def_id, prim, _) in &krate.primitives {
cache.primitive_locations.insert(prim, def_id);
}
cache.stack.push(krate.name.clone());
krate = cache.fold_crate(krate);
for (trait_did, dids, impl_) in cache.orphan_trait_impls.drain(..) {
if cache.traits.contains_key(&trait_did) {
for did in dids {
cache.impls.entry(did).or_insert(vec![]).push(impl_.clone());
}
}
}
// Build our search index
let index = build_index(&krate, &mut cache);
// Freeze the cache now that the index has been built. Put an Arc into TLS
// for future parallelization opportunities
let cache = Arc::new(cache);
CACHE_KEY.with(|v| *v.borrow_mut() = cache.clone());
CURRENT_LOCATION_KEY.with(|s| s.borrow_mut().clear());
write_shared(&cx, &krate, &*cache, index, &md_opts, diag)?;
// And finally render the whole crate's documentation
cx.krate(krate)
}
/// Builds the search index from the collected metadata
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
let mut nodeid_to_pathid = FxHashMap::default();
let mut crate_items = Vec::with_capacity(cache.search_index.len());
let mut crate_paths = Vec::<Json>::new();
let Cache { ref mut search_index,
ref orphan_impl_items,
ref mut paths, .. } = *cache;
// Attach all orphan items to the type's definition if the type
// has since been learned.
for &(did, ref item) in orphan_impl_items {
if let Some(&(ref fqp, _)) = paths.get(&did) {
search_index.push(IndexItem {
ty: item.type_(),
name: item.name.clone().unwrap(),
path: fqp[..fqp.len() - 1].join("::"),
desc: plain_summary_line_short(item.doc_value()),
parent: Some(did),
parent_idx: None,
search_type: get_index_search_type(&item),
});
}
}
// Reduce `NodeId` in paths into smaller sequential numbers,
// and prune the paths that do not appear in the index.
let mut lastpath = String::new();
let mut lastpathid = 0usize;
for item in search_index {
item.parent_idx = item.parent.map(|nodeid| {
if nodeid_to_pathid.contains_key(&nodeid) {
*nodeid_to_pathid.get(&nodeid).unwrap()
} else {
let pathid = lastpathid;
nodeid_to_pathid.insert(nodeid, pathid);
lastpathid += 1;
let &(ref fqp, short) = paths.get(&nodeid).unwrap();
crate_paths.push(((short as usize), fqp.last().unwrap().clone()).to_json());
pathid
}
});
// Omit the parent path if it is same to that of the prior item.
if lastpath == item.path {
item.path.clear();
} else {
lastpath = item.path.clone();
}
crate_items.push(item.to_json());
}
let crate_doc = krate.module.as_ref().map(|module| {
plain_summary_line_short(module.doc_value())
}).unwrap_or(String::new());
let mut crate_data = BTreeMap::new();
crate_data.insert("doc".to_owned(), Json::String(crate_doc));
crate_data.insert("i".to_owned(), Json::Array(crate_items));
crate_data.insert("p".to_owned(), Json::Array(crate_paths));
// Collect the index into a string
format!("searchIndex[{}] = {};",
as_json(&krate.name),
Json::Object(crate_data))
}
fn write_shared(
cx: &Context,
krate: &clean::Crate,
cache: &Cache,
search_index: String,
options: &RenderOptions,
diag: &errors::Handler,
) -> Result<(), Error> {
// Write out the shared files. Note that these are shared among all rustdoc
// docs placed in the output directory, so this needs to be a synchronized
// operation with respect to all other rustdocs running around.
let _lock = flock::Lock::panicking_new(&cx.dst.join(".lock"), true, true, true);
// Add all the static files. These may already exist, but we just
// overwrite them anyway to make sure that they're fresh and up-to-date.
write_minify(cx.dst.join(&format!("rustdoc{}.css", cx.shared.resource_suffix)),
static_files::RUSTDOC_CSS,
options.enable_minification)?;
write_minify(cx.dst.join(&format!("settings{}.css", cx.shared.resource_suffix)),
static_files::SETTINGS_CSS,
options.enable_minification)?;
write_minify(cx.dst.join(&format!("noscript{}.css", cx.shared.resource_suffix)),
static_files::NOSCRIPT_CSS,
options.enable_minification)?;
// To avoid "light.css" to be overwritten, we'll first run over the received themes and only
// then we'll run over the "official" styles.
let mut themes: FxHashSet<String> = FxHashSet::default();
for entry in &cx.shared.themes {
let content = try_err!(fs::read(&entry), &entry);
let theme = try_none!(try_none!(entry.file_stem(), &entry).to_str(), &entry);
let extension = try_none!(try_none!(entry.extension(), &entry).to_str(), &entry);
write(cx.dst.join(format!("{}{}.{}", theme, cx.shared.resource_suffix, extension)),
content.as_slice())?;
themes.insert(theme.to_owned());
}
if (*cx.shared).layout.logo.is_empty() {
write(cx.dst.join(&format!("rust-logo{}.png", cx.shared.resource_suffix)),
static_files::RUST_LOGO)?;
}
if (*cx.shared).layout.favicon.is_empty() {
write(cx.dst.join(&format!("favicon{}.ico", cx.shared.resource_suffix)),
static_files::RUST_FAVICON)?;
}
write(cx.dst.join(&format!("brush{}.svg", cx.shared.resource_suffix)),
static_files::BRUSH_SVG)?;
write(cx.dst.join(&format!("wheel{}.svg", cx.shared.resource_suffix)),
static_files::WHEEL_SVG)?;
write(cx.dst.join(&format!("down-arrow{}.svg", cx.shared.resource_suffix)),
static_files::DOWN_ARROW_SVG)?;
write_minify(cx.dst.join(&format!("light{}.css", cx.shared.resource_suffix)),
static_files::themes::LIGHT,
options.enable_minification)?;
themes.insert("light".to_owned());
write_minify(cx.dst.join(&format!("dark{}.css", cx.shared.resource_suffix)),
static_files::themes::DARK,
options.enable_minification)?;
themes.insert("dark".to_owned());
let mut themes: Vec<&String> = themes.iter().collect();
themes.sort();
// To avoid theme switch latencies as much as possible, we put everything theme related
// at the beginning of the html files into another js file.
write(cx.dst.join(&format!("theme{}.js", cx.shared.resource_suffix)),
format!(
r#"var themes = document.getElementById("theme-choices");
var themePicker = document.getElementById("theme-picker");
function switchThemeButtonState() {{
if (themes.style.display === "block") {{
themes.style.display = "none";
themePicker.style.borderBottomRightRadius = "3px";
themePicker.style.borderBottomLeftRadius = "3px";
}} else {{
themes.style.display = "block";
themePicker.style.borderBottomRightRadius = "0";
themePicker.style.borderBottomLeftRadius = "0";
}}
}};
function handleThemeButtonsBlur(e) {{
var active = document.activeElement;
var related = e.relatedTarget;
if (active.id !== "themePicker" &&
(!active.parentNode || active.parentNode.id !== "theme-choices") &&
(!related ||
(related.id !== "themePicker" &&
(!related.parentNode || related.parentNode.id !== "theme-choices")))) {{
switchThemeButtonState();
}}
}}
themePicker.onclick = switchThemeButtonState;
themePicker.onblur = handleThemeButtonsBlur;
[{}].forEach(function(item) {{
var but = document.createElement('button');
but.innerHTML = item;
but.onclick = function(el) {{
switchTheme(currentTheme, mainTheme, item);
}};
but.onblur = handleThemeButtonsBlur;
themes.appendChild(but);
}});"#,
themes.iter()
.map(|s| format!("\"{}\"", s))
.collect::<Vec<String>>()
.join(",")).as_bytes(),
)?;
write_minify(cx.dst.join(&format!("main{}.js", cx.shared.resource_suffix)),
static_files::MAIN_JS,
options.enable_minification)?;
write_minify(cx.dst.join(&format!("settings{}.js", cx.shared.resource_suffix)),
static_files::SETTINGS_JS,
options.enable_minification)?;
if cx.shared.include_sources {
write_minify(cx.dst.join(&format!("source-script{}.js", cx.shared.resource_suffix)),
static_files::sidebar::SOURCE_SCRIPT,
options.enable_minification)?;
}
{
write_minify(cx.dst.join(&format!("storage{}.js", cx.shared.resource_suffix)),
&format!("var resourcesSuffix = \"{}\";{}",
cx.shared.resource_suffix,
static_files::STORAGE_JS),
options.enable_minification)?;
}
if let Some(ref css) = cx.shared.css_file_extension {
let out = cx.dst.join(&format!("theme{}.css", cx.shared.resource_suffix));
if !options.enable_minification {
try_err!(fs::copy(css, out), css);
} else {
let buffer = try_err!(fs::read_to_string(css), css);
write_minify(out, &buffer, options.enable_minification)?;
}
}
write_minify(cx.dst.join(&format!("normalize{}.css", cx.shared.resource_suffix)),
static_files::NORMALIZE_CSS,
options.enable_minification)?;
write(cx.dst.join("FiraSans-Regular.woff"),
static_files::fira_sans::REGULAR)?;
write(cx.dst.join("FiraSans-Medium.woff"),
static_files::fira_sans::MEDIUM)?;
write(cx.dst.join("FiraSans-LICENSE.txt"),
static_files::fira_sans::LICENSE)?;
write(cx.dst.join("SourceSerifPro-Regular.ttf.woff"),
static_files::source_serif_pro::REGULAR)?;
write(cx.dst.join("SourceSerifPro-Bold.ttf.woff"),
static_files::source_serif_pro::BOLD)?;
write(cx.dst.join("SourceSerifPro-It.ttf.woff"),
static_files::source_serif_pro::ITALIC)?;
write(cx.dst.join("SourceSerifPro-LICENSE.txt"),
static_files::source_serif_pro::LICENSE)?;
write(cx.dst.join("SourceCodePro-Regular.woff"),
static_files::source_code_pro::REGULAR)?;
write(cx.dst.join("SourceCodePro-Semibold.woff"),
static_files::source_code_pro::SEMIBOLD)?;
write(cx.dst.join("SourceCodePro-LICENSE.txt"),
static_files::source_code_pro::LICENSE)?;
write(cx.dst.join("LICENSE-MIT.txt"),
static_files::LICENSE_MIT)?;
write(cx.dst.join("LICENSE-APACHE.txt"),
static_files::LICENSE_APACHE)?;
write(cx.dst.join("COPYRIGHT.txt"),
static_files::COPYRIGHT)?;
fn collect(
path: &Path,
krate: &str,
key: &str,
for_search_index: bool,
) -> io::Result<(Vec<String>, Vec<String>, Vec<String>)> {
use minifier::js;
let mut ret = Vec::new();
let mut krates = Vec::new();
let mut variables = Vec::new();
let mut krate = krate.to_owned();
if path.exists() {
for line in BufReader::new(File::open(path)?).lines() {
let line = line?;
if for_search_index && line.starts_with("var R") {
variables.push(line.clone());
// We need to check if the crate name has been put into a variable as well.
let tokens = js::simple_minify(&line).apply(js::clean_tokens);
let mut pos = 0;
while pos < tokens.len() {
if let Some((var_pos, Some(value_pos))) =
js::get_variable_name_and_value_positions(&tokens, pos) {
if let Some(s) = tokens.0[value_pos].get_string() {
if &s[1..s.len() - 1] == krate {
if let Some(var) = tokens[var_pos].get_other() {
krate = var.to_owned();
break
}
}
}
}
pos += 1;
}
continue;
}
if !line.starts_with(key) {
continue;
}
if line.starts_with(&format!(r#"{}["{}"]"#, key, krate)) {
continue;
}
ret.push(line.to_string());
krates.push(line[key.len() + 2..].split('"')
.next()
.map(|s| s.to_owned())
.unwrap_or_else(|| String::new()));
}
}
Ok((ret, krates, variables))
}
fn show_item(item: &IndexItem, krate: &str) -> String {
format!("{{'crate':'{}','ty':{},'name':'{}','desc':'{}','p':'{}'{}}}",
krate, item.ty as usize, item.name, item.desc.replace("'", "\\'"), item.path,
if let Some(p) = item.parent_idx {
format!(",'parent':{}", p)
} else {
String::new()
})
}
let dst = cx.dst.join("aliases.js");
{
let (mut all_aliases, _, _) = try_err!(collect(&dst, &krate.name, "ALIASES", false), &dst);
let mut w = try_err!(File::create(&dst), &dst);
let mut output = String::with_capacity(100);
for (alias, items) in &cache.aliases {
if items.is_empty() {
continue
}
output.push_str(&format!("\"{}\":[{}],",
alias,
items.iter()
.map(|v| show_item(v, &krate.name))
.collect::<Vec<_>>()
.join(",")));
}
all_aliases.push(format!("ALIASES[\"{}\"] = {{{}}};", krate.name, output));
all_aliases.sort();
try_err!(writeln!(&mut w, "var ALIASES = {{}};"), &dst);
for aliases in &all_aliases {
try_err!(writeln!(&mut w, "{}", aliases), &dst);
}
}
use std::ffi::OsString;
#[derive(Debug)]
struct Hierarchy {
elem: OsString,
children: FxHashMap<OsString, Hierarchy>,
elems: FxHashSet<OsString>,
}
impl Hierarchy {
fn new(elem: OsString) -> Hierarchy {
Hierarchy {
elem,
children: FxHashMap::default(),
elems: FxHashSet::default(),
}
}
fn to_json_string(&self) -> String {
let mut subs: Vec<&Hierarchy> = self.children.values().collect();
subs.sort_unstable_by(|a, b| a.elem.cmp(&b.elem));
let mut files = self.elems.iter()
.map(|s| format!("\"{}\"",
s.to_str()
.expect("invalid osstring conversion")))
.collect::<Vec<_>>();
files.sort_unstable_by(|a, b| a.cmp(b));
// FIXME(imperio): we could avoid to generate "dirs" and "files" if they're empty.
format!("{{\"name\":\"{name}\",\"dirs\":[{subs}],\"files\":[{files}]}}",
name=self.elem.to_str().expect("invalid osstring conversion"),
subs=subs.iter().map(|s| s.to_json_string()).collect::<Vec<_>>().join(","),
files=files.join(","))
}
}
if cx.shared.include_sources {
let mut hierarchy = Hierarchy::new(OsString::new());
for source in cx.shared.local_sources.iter()
.filter_map(|p| p.0.strip_prefix(&cx.shared.src_root)
.ok()) {
let mut h = &mut hierarchy;
let mut elems = source.components()
.filter_map(|s| {
match s {
Component::Normal(s) => Some(s.to_owned()),
_ => None,
}
})
.peekable();
loop {
let cur_elem = elems.next().expect("empty file path");
if elems.peek().is_none() {
h.elems.insert(cur_elem);
break;
} else {
let e = cur_elem.clone();
h.children.entry(cur_elem.clone()).or_insert_with(|| Hierarchy::new(e));
h = h.children.get_mut(&cur_elem).expect("not found child");
}
}
}
let dst = cx.dst.join("source-files.js");
let (mut all_sources, _krates, _) = try_err!(collect(&dst, &krate.name, "sourcesIndex",
false),
&dst);
all_sources.push(format!("sourcesIndex[\"{}\"] = {};",
&krate.name,
hierarchy.to_json_string()));
all_sources.sort();
let mut w = try_err!(File::create(&dst), &dst);
try_err!(writeln!(&mut w,
"var N = null;var sourcesIndex = {{}};\n{}\ncreateSourceSidebar();",
all_sources.join("\n")),
&dst);
}
// Update the search index
let dst = cx.dst.join("search-index.js");
let (mut all_indexes, mut krates, variables) = try_err!(collect(&dst,
&krate.name,
"searchIndex",
true), &dst);
all_indexes.push(search_index);
// Sort the indexes by crate so the file will be generated identically even
// with rustdoc running in parallel.
all_indexes.sort();
let mut w = try_err!(File::create(&dst), &dst);
try_err!(writeln!(&mut w, "var N=null,E=\"\",T=\"t\",U=\"u\",searchIndex={{}};"), &dst);
try_err!(write_minify_replacer(&mut w,
&format!("{}\n{}", variables.join(""), all_indexes.join("\n")),
options.enable_minification),
&dst);
try_err!(write!(&mut w, "initSearch(searchIndex);addSearchOptions(searchIndex);"), &dst);
if options.enable_index_page {
if let Some(index_page) = options.index_page.clone() {
let mut md_opts = options.clone();
md_opts.output = cx.dst.clone();
md_opts.external_html = (*cx.shared).layout.external_html.clone();
crate::markdown::render(index_page, md_opts, diag);
} else {
let dst = cx.dst.join("index.html");
let mut w = BufWriter::new(try_err!(File::create(&dst), &dst));
let page = layout::Page {
title: "Index of crates",
css_class: "mod",
root_path: "./",
static_root_path: cx.shared.static_root_path.deref(),
description: "List of crates",
keywords: BASIC_KEYWORDS,
resource_suffix: &cx.shared.resource_suffix,
extra_scripts: &[],
static_extra_scripts: &[],
};
krates.push(krate.name.clone());
krates.sort();
krates.dedup();
let content = format!(
"<h1 class='fqn'>\
<span class='in-band'>List of all crates</span>\
</h1><ul class='mod'>{}</ul>",
krates
.iter()
.map(|s| {
format!("<li><a href=\"{}index.html\">{}</li>",
SlashChecker(s), s)
})
.collect::<String>());
try_err!(layout::render(&mut w, &cx.shared.layout,
&page, &(""), &content,
cx.shared.css_file_extension.is_some(),
&cx.shared.themes,
cx.shared.generate_search_filter), &dst);
try_err!(w.flush(), &dst);
}
}
// Update the list of all implementors for traits
let dst = cx.dst.join("implementors");
for (&did, imps) in &cache.implementors {
// Private modules can leak through to this phase of rustdoc, which
// could contain implementations for otherwise private types. In some
// rare cases we could find an implementation for an item which wasn't
// indexed, so we just skip this step in that case.
//
// FIXME: this is a vague explanation for why this can't be a `get`, in
// theory it should be...
let &(ref remote_path, remote_item_type) = match cache.paths.get(&did) {
Some(p) => p,
None => match cache.external_paths.get(&did) {
Some(p) => p,
None => continue,
}
};
let mut have_impls = false;
let mut implementors = format!(r#"implementors["{}"] = ["#, krate.name);
for imp in imps {
// If the trait and implementation are in the same crate, then
// there's no need to emit information about it (there's inlining
// going on). If they're in different crates then the crate defining
// the trait will be interested in our implementation.
if imp.impl_item.def_id.krate == did.krate { continue }
// If the implementation is from another crate then that crate
// should add it.
if !imp.impl_item.def_id.is_local() { continue }
have_impls = true;
write!(implementors, "{{text:{},synthetic:{},types:{}}},",
as_json(&imp.inner_impl().to_string()),
imp.inner_impl().synthetic,
as_json(&collect_paths_for_type(imp.inner_impl().for_.clone()))).unwrap();
}
implementors.push_str("];");
// Only create a js file if we have impls to add to it. If the trait is
// documented locally though we always create the file to avoid dead
// links.
if !have_impls && !cache.paths.contains_key(&did) {
continue;
}
let mut mydst = dst.clone();
for part in &remote_path[..remote_path.len() - 1] {
mydst.push(part);
}
try_err!(fs::create_dir_all(&mydst), &mydst);
mydst.push(&format!("{}.{}.js",
remote_item_type.css_class(),
remote_path[remote_path.len() - 1]));
let (mut all_implementors, _, _) = try_err!(collect(&mydst, &krate.name, "implementors",
false),
&mydst);
all_implementors.push(implementors);
// Sort the implementors by crate so the file will be generated
// identically even with rustdoc running in parallel.
all_implementors.sort();
let mut f = try_err!(File::create(&mydst), &mydst);
try_err!(writeln!(&mut f, "(function() {{var implementors = {{}};"), &mydst);
for implementor in &all_implementors {
try_err!(writeln!(&mut f, "{}", *implementor), &mydst);
}
try_err!(writeln!(&mut f, "{}", r"
if (window.register_implementors) {
window.register_implementors(implementors);
} else {
window.pending_implementors = implementors;
}
"), &mydst);
try_err!(writeln!(&mut f, r"}})()"), &mydst);
}
Ok(())
}
fn render_sources(dst: &Path, scx: &mut SharedContext,
krate: clean::Crate) -> Result<clean::Crate, Error> {
info!("emitting source files");
let dst = dst.join("src").join(&krate.name);
try_err!(fs::create_dir_all(&dst), &dst);
let mut folder = SourceCollector {
dst,
scx,
};
Ok(folder.fold_crate(krate))
}
/// Writes the entire contents of a string to a destination, not attempting to
/// catch any errors.
fn write(dst: PathBuf, contents: &[u8]) -> Result<(), Error> {
Ok(try_err!(fs::write(&dst, contents), &dst))
}
fn write_minify(dst: PathBuf, contents: &str, enable_minification: bool) -> Result<(), Error> {
if enable_minification {
if dst.extension() == Some(&OsStr::new("css")) {
let res = try_none!(minifier::css::minify(contents).ok(), &dst);
write(dst, res.as_bytes())
} else {
write(dst, minifier::js::minify(contents).as_bytes())
}
} else {
write(dst, contents.as_bytes())
}
}
fn write_minify_replacer<W: Write>(
dst: &mut W,
contents: &str,
enable_minification: bool,
) -> io::Result<()> {
use minifier::js::{Keyword, ReservedChar, Token};
if enable_minification {
writeln!(dst, "{}",
minifier::js::simple_minify(contents)
.apply(|f| {
// We keep backlines.
minifier::js::clean_tokens_except(f, |c| {
c.get_char() != Some(ReservedChar::Backline)
})
})
.apply(|f| {
minifier::js::replace_token_with(f, |t| {
match *t {
Token::Keyword(Keyword::Null) => Some(Token::Other("N")),
Token::String(s) => {
let s = &s[1..s.len() -1]; // The quotes are included
if s.is_empty() {
Some(Token::Other("E"))
} else if s == "t" {
Some(Token::Other("T"))
} else if s == "u" {
Some(Token::Other("U"))
} else {
None
}
}
_ => None,
}
})
})
.apply(|f| {
// We add a backline after the newly created variables.
minifier::js::aggregate_strings_into_array_with_separation(
f,
"R",
Token::Char(ReservedChar::Backline),
)
})
.to_string())
} else {
writeln!(dst, "{}", contents)
}
}
/// Takes a path to a source file and cleans the path to it. This canonicalizes
/// things like ".." to components which preserve the "top down" hierarchy of a
/// static HTML tree. Each component in the cleaned path will be passed as an
/// argument to `f`. The very last component of the path (ie the file name) will
/// be passed to `f` if `keep_filename` is true, and ignored otherwise.
fn clean_srcpath<F>(src_root: &Path, p: &Path, keep_filename: bool, mut f: F) where
F: FnMut(&OsStr),
{
// make it relative, if possible
let p = p.strip_prefix(src_root).unwrap_or(p);
let mut iter = p.components().peekable();
while let Some(c) = iter.next() {
if !keep_filename && iter.peek().is_none() {
break;
}
match c {
Component::ParentDir => f("up".as_ref()),
Component::Normal(c) => f(c),
_ => continue,
}
}
}
/// Attempts to find where an external crate is located, given that we're
/// rendering in to the specified source destination.
fn extern_location(e: &clean::ExternalCrate, extern_url: Option<&str>, dst: &Path)
-> ExternalLocation
{
// See if there's documentation generated into the local directory
let local_location = dst.join(&e.name);
if local_location.is_dir() {
return Local;
}
if let Some(url) = extern_url {
let mut url = url.to_string();
if !url.ends_with("/") {
url.push('/');
}
return Remote(url);
}
// Failing that, see if there's an attribute specifying where to find this
// external crate
e.attrs.lists("doc")
.filter(|a| a.check_name("html_root_url"))
.filter_map(|a| a.value_str())
.map(|url| {
let mut url = url.to_string();
if !url.ends_with("/") {
url.push('/')
}
Remote(url)
}).next().unwrap_or(Unknown) // Well, at least we tried.
}
impl<'a> DocFolder for SourceCollector<'a> {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
// If we're including source files, and we haven't seen this file yet,
// then we need to render it out to the filesystem.
if self.scx.include_sources
// skip all invalid or macro spans
&& item.source.filename.is_real()
// skip non-local items
&& item.def_id.is_local() {
// If it turns out that we couldn't read this file, then we probably
// can't read any of the files (generating html output from json or
// something like that), so just don't include sources for the
// entire crate. The other option is maintaining this mapping on a
// per-file basis, but that's probably not worth it...
self.scx
.include_sources = match self.emit_source(&item.source.filename) {
Ok(()) => true,
Err(e) => {
println!("warning: source code was requested to be rendered, \
but processing `{}` had an error: {}",
item.source.filename, e);
println!(" skipping rendering of source code");
false
}
};
}
self.fold_item_recur(item)
}
}
impl<'a> SourceCollector<'a> {
/// Renders the given filename into its corresponding HTML source file.
fn emit_source(&mut self, filename: &FileName) -> io::Result<()> {
let p = match *filename {
FileName::Real(ref file) => file,
_ => return Ok(()),
};
if self.scx.local_sources.contains_key(&**p) {
// We've already emitted this source
return Ok(());
}
let contents = fs::read_to_string(&p)?;
// Remove the utf-8 BOM if any
let contents = if contents.starts_with("\u{feff}") {
&contents[3..]
} else {
&contents[..]
};
// Create the intermediate directories
let mut cur = self.dst.clone();
let mut root_path = String::from("../../");
let mut href = String::new();
clean_srcpath(&self.scx.src_root, &p, false, |component| {
cur.push(component);
fs::create_dir_all(&cur).unwrap();
root_path.push_str("../");
href.push_str(&component.to_string_lossy());
href.push('/');
});
let mut fname = p.file_name()
.expect("source has no filename")
.to_os_string();
fname.push(".html");
cur.push(&fname);
href.push_str(&fname.to_string_lossy());
let mut w = BufWriter::new(File::create(&cur)?);
let title = format!("{} -- source", cur.file_name().unwrap()
.to_string_lossy());
let desc = format!("Source to the Rust file `{}`.", filename);
let page = layout::Page {
title: &title,
css_class: "source",
root_path: &root_path,
static_root_path: self.scx.static_root_path.deref(),
description: &desc,
keywords: BASIC_KEYWORDS,
resource_suffix: &self.scx.resource_suffix,
extra_scripts: &["source-files"],
static_extra_scripts: &[&format!("source-script{}", self.scx.resource_suffix)],
};
layout::render(&mut w, &self.scx.layout,
&page, &(""), &Source(contents),
self.scx.css_file_extension.is_some(),
&self.scx.themes,
self.scx.generate_search_filter)?;
w.flush()?;
self.scx.local_sources.insert(p.clone(), href);
Ok(())
}
}
impl DocFolder for Cache {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
if item.def_id.is_local() {
debug!("folding {} \"{:?}\", id {:?}", item.type_(), item.name, item.def_id);
}
// If this is a stripped module,
// we don't want it or its children in the search index.
let orig_stripped_mod = match item.inner {
clean::StrippedItem(box clean::ModuleItem(..)) => {
mem::replace(&mut self.stripped_mod, true)
}
_ => self.stripped_mod,
};
// If the impl is from a masked crate or references something from a
// masked crate then remove it completely.
if let clean::ImplItem(ref i) = item.inner {
if self.masked_crates.contains(&item.def_id.krate) ||
i.trait_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) ||
i.for_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) {
return None;
}
}
// Register any generics to their corresponding string. This is used
// when pretty-printing types.
if let Some(generics) = item.inner.generics() {
self.generics(generics);
}
// Propagate a trait method's documentation to all implementors of the
// trait.
if let clean::TraitItem(ref t) = item.inner {
self.traits.entry(item.def_id).or_insert_with(|| t.clone());
}
// Collect all the implementors of traits.
if let clean::ImplItem(ref i) = item.inner {
if let Some(did) = i.trait_.def_id() {
if i.blanket_impl.is_none() {
self.implementors.entry(did).or_default().push(Impl {
impl_item: item.clone(),
});
}
}
}
// Index this method for searching later on.
if let Some(ref s) = item.name {
let (parent, is_inherent_impl_item) = match item.inner {
clean::StrippedItem(..) => ((None, None), false),
clean::AssociatedConstItem(..) |
clean::TypedefItem(_, true) if self.parent_is_trait_impl => {
// skip associated items in trait impls
((None, None), false)
}
clean::AssociatedTypeItem(..) |
clean::TyMethodItem(..) |
clean::StructFieldItem(..) |
clean::VariantItem(..) => {
((Some(*self.parent_stack.last().unwrap()),
Some(&self.stack[..self.stack.len() - 1])),
false)
}
clean::MethodItem(..) | clean::AssociatedConstItem(..) => {
if self.parent_stack.is_empty() {
((None, None), false)
} else {
let last = self.parent_stack.last().unwrap();
let did = *last;
let path = match self.paths.get(&did) {
// The current stack not necessarily has correlation
// for where the type was defined. On the other
// hand, `paths` always has the right
// information if present.
Some(&(ref fqp, ItemType::Trait)) |
Some(&(ref fqp, ItemType::Struct)) |
Some(&(ref fqp, ItemType::Union)) |
Some(&(ref fqp, ItemType::Enum)) =>
Some(&fqp[..fqp.len() - 1]),
Some(..) => Some(&*self.stack),
None => None
};
((Some(*last), path), true)
}
}
_ => ((None, Some(&*self.stack)), false)
};
match parent {
(parent, Some(path)) if is_inherent_impl_item || (!self.stripped_mod) => {
debug_assert!(!item.is_stripped());
// A crate has a module at its root, containing all items,
// which should not be indexed. The crate-item itself is
// inserted later on when serializing the search-index.
if item.def_id.index != CRATE_DEF_INDEX {
self.search_index.push(IndexItem {
ty: item.type_(),
name: s.to_string(),
path: path.join("::"),
desc: plain_summary_line_short(item.doc_value()),
parent,
parent_idx: None,
search_type: get_index_search_type(&item),
});
}
}
(Some(parent), None) if is_inherent_impl_item => {
// We have a parent, but we don't know where they're
// defined yet. Wait for later to index this item.
self.orphan_impl_items.push((parent, item.clone()));
}
_ => {}
}
}
// Keep track of the fully qualified path for this item.
let pushed = match item.name {
Some(ref n) if !n.is_empty() => {
self.stack.push(n.to_string());
true
}
_ => false,
};
match item.inner {
clean::StructItem(..) | clean::EnumItem(..) |
clean::TypedefItem(..) | clean::TraitItem(..) |
clean::FunctionItem(..) | clean::ModuleItem(..) |
clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) |
clean::ConstantItem(..) | clean::StaticItem(..) |
clean::UnionItem(..) | clean::ForeignTypeItem |
clean::MacroItem(..) | clean::ProcMacroItem(..)
if !self.stripped_mod => {
// Re-exported items mean that the same id can show up twice
// in the rustdoc ast that we're looking at. We know,
// however, that a re-exported item doesn't show up in the
// `public_items` map, so we can skip inserting into the
// paths map if there was already an entry present and we're
// not a public item.
if !self.paths.contains_key(&item.def_id) ||
self.access_levels.is_public(item.def_id)
{
self.paths.insert(item.def_id,
(self.stack.clone(), item.type_()));
}
self.add_aliases(&item);
}
// Link variants to their parent enum because pages aren't emitted
// for each variant.
clean::VariantItem(..) if !self.stripped_mod => {
let mut stack = self.stack.clone();
stack.pop();
self.paths.insert(item.def_id, (stack, ItemType::Enum));
}
clean::PrimitiveItem(..) if item.visibility.is_some() => {
self.add_aliases(&item);
self.paths.insert(item.def_id, (self.stack.clone(),
item.type_()));
}
_ => {}
}
// Maintain the parent stack
let orig_parent_is_trait_impl = self.parent_is_trait_impl;
let parent_pushed = match item.inner {
clean::TraitItem(..) | clean::EnumItem(..) | clean::ForeignTypeItem |
clean::StructItem(..) | clean::UnionItem(..) => {
self.parent_stack.push(item.def_id);
self.parent_is_trait_impl = false;
true
}
clean::ImplItem(ref i) => {
self.parent_is_trait_impl = i.trait_.is_some();
match i.for_ {
clean::ResolvedPath{ did, .. } => {
self.parent_stack.push(did);
true
}
ref t => {
let prim_did = t.primitive_type().and_then(|t| {
self.primitive_locations.get(&t).cloned()
});
match prim_did {
Some(did) => {
self.parent_stack.push(did);
true
}
None => false,
}
}
}
}
_ => false
};
// Once we've recursively found all the generics, hoard off all the
// implementations elsewhere.
let ret = self.fold_item_recur(item).and_then(|item| {
if let clean::Item { inner: clean::ImplItem(_), .. } = item {
// Figure out the id of this impl. This may map to a
// primitive rather than always to a struct/enum.
// Note: matching twice to restrict the lifetime of the `i` borrow.
let mut dids = FxHashSet::default();
if let clean::Item { inner: clean::ImplItem(ref i), .. } = item {
match i.for_ {
clean::ResolvedPath { did, .. } |
clean::BorrowedRef {
type_: box clean::ResolvedPath { did, .. }, ..
} => {
dids.insert(did);
}
ref t => {
let did = t.primitive_type().and_then(|t| {
self.primitive_locations.get(&t).cloned()
});
if let Some(did) = did {
dids.insert(did);
}
}
}
if let Some(generics) = i.trait_.as_ref().and_then(|t| t.generics()) {
for bound in generics {
if let Some(did) = bound.def_id() {
dids.insert(did);
}
}
}
} else {
unreachable!()
};
let impl_item = Impl {
impl_item: item,
};
if impl_item.trait_did().map_or(true, |d| self.traits.contains_key(&d)) {
for did in dids {
self.impls.entry(did).or_insert(vec![]).push(impl_item.clone());
}
} else {
let trait_did = impl_item.trait_did().unwrap();
self.orphan_trait_impls.push((trait_did, dids, impl_item));
}
None
} else {
Some(item)
}
});
if pushed { self.stack.pop().unwrap(); }
if parent_pushed { self.parent_stack.pop().unwrap(); }
self.stripped_mod = orig_stripped_mod;
self.parent_is_trait_impl = orig_parent_is_trait_impl;
ret
}
}
impl<'a> Cache {
fn generics(&mut self, generics: &clean::Generics) {
for param in &generics.params {
match param.kind {
clean::GenericParamDefKind::Lifetime => {}
clean::GenericParamDefKind::Type { did, .. } |
clean::GenericParamDefKind::Const { did, .. } => {
self.param_names.insert(did, param.name.clone());
}
}
}
}
fn add_aliases(&mut self, item: &clean::Item) {
if item.def_id.index == CRATE_DEF_INDEX {
return
}
if let Some(ref item_name) = item.name {
let path = self.paths.get(&item.def_id)
.map(|p| p.0[..p.0.len() - 1].join("::"))
.unwrap_or("std".to_owned());
for alias in item.attrs.lists("doc")
.filter(|a| a.check_name("alias"))
.filter_map(|a| a.value_str()
.map(|s| s.to_string().replace("\"", "")))
.filter(|v| !v.is_empty())
.collect::<FxHashSet<_>>()
.into_iter() {
self.aliases.entry(alias)
.or_insert(Vec::with_capacity(1))
.push(IndexItem {
ty: item.type_(),
name: item_name.to_string(),
path: path.clone(),
desc: plain_summary_line_short(item.doc_value()),
parent: None,
parent_idx: None,
search_type: get_index_search_type(&item),
});
}
}
}
}
#[derive(Debug, Eq, PartialEq, Hash)]
struct ItemEntry {
url: String,
name: String,
}
impl ItemEntry {
fn new(mut url: String, name: String) -> ItemEntry {
while url.starts_with('/') {
url.remove(0);
}
ItemEntry {
url,
name,
}
}
}
impl fmt::Display for ItemEntry {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "<a href='{}'>{}</a>", self.url, Escape(&self.name))
}
}
impl PartialOrd for ItemEntry {
fn partial_cmp(&self, other: &ItemEntry) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ItemEntry {
fn cmp(&self, other: &ItemEntry) -> ::std::cmp::Ordering {
self.name.cmp(&other.name)
}
}
#[derive(Debug)]
struct AllTypes {
structs: FxHashSet<ItemEntry>,
enums: FxHashSet<ItemEntry>,
unions: FxHashSet<ItemEntry>,
primitives: FxHashSet<ItemEntry>,
traits: FxHashSet<ItemEntry>,
macros: FxHashSet<ItemEntry>,
functions: FxHashSet<ItemEntry>,
typedefs: FxHashSet<ItemEntry>,
existentials: FxHashSet<ItemEntry>,
statics: FxHashSet<ItemEntry>,
constants: FxHashSet<ItemEntry>,
keywords: FxHashSet<ItemEntry>,
attributes: FxHashSet<ItemEntry>,
derives: FxHashSet<ItemEntry>,
trait_aliases: FxHashSet<ItemEntry>,
}
impl AllTypes {
fn new() -> AllTypes {
let new_set = |cap| FxHashSet::with_capacity_and_hasher(cap, Default::default());
AllTypes {
structs: new_set(100),
enums: new_set(100),
unions: new_set(100),
primitives: new_set(26),
traits: new_set(100),
macros: new_set(100),
functions: new_set(100),
typedefs: new_set(100),
existentials: new_set(100),
statics: new_set(100),
constants: new_set(100),
keywords: new_set(100),
attributes: new_set(100),
derives: new_set(100),
trait_aliases: new_set(100),
}
}
fn append(&mut self, item_name: String, item_type: &ItemType) {
let mut url: Vec<_> = item_name.split("::").skip(1).collect();
if let Some(name) = url.pop() {
let new_url = format!("{}/{}.{}.html", url.join("/"), item_type, name);
url.push(name);
let name = url.join("::");
match *item_type {
ItemType::Struct => self.structs.insert(ItemEntry::new(new_url, name)),
ItemType::Enum => self.enums.insert(ItemEntry::new(new_url, name)),
ItemType::Union => self.unions.insert(ItemEntry::new(new_url, name)),
ItemType::Primitive => self.primitives.insert(ItemEntry::new(new_url, name)),
ItemType::Trait => self.traits.insert(ItemEntry::new(new_url, name)),
ItemType::Macro => self.macros.insert(ItemEntry::new(new_url, name)),
ItemType::Function => self.functions.insert(ItemEntry::new(new_url, name)),
ItemType::Typedef => self.typedefs.insert(ItemEntry::new(new_url, name)),
ItemType::Existential => self.existentials.insert(ItemEntry::new(new_url, name)),
ItemType::Static => self.statics.insert(ItemEntry::new(new_url, name)),
ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)),
ItemType::ProcAttribute => self.attributes.insert(ItemEntry::new(new_url, name)),
ItemType::ProcDerive => self.derives.insert(ItemEntry::new(new_url, name)),
ItemType::TraitAlias => self.trait_aliases.insert(ItemEntry::new(new_url, name)),
_ => true,
};
}
}
}
fn print_entries(f: &mut fmt::Formatter<'_>, e: &FxHashSet<ItemEntry>, title: &str,
class: &str) -> fmt::Result {
if !e.is_empty() {
let mut e: Vec<&ItemEntry> = e.iter().collect();
e.sort();
write!(f, "<h3 id='{}'>{}</h3><ul class='{} docblock'>{}</ul>",
title,
Escape(title),
class,
e.iter().map(|s| format!("<li>{}</li>", s)).collect::<String>())?;
}
Ok(())
}
impl fmt::Display for AllTypes {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f,
"<h1 class='fqn'>\
<span class='out-of-band'>\
<span id='render-detail'>\
<a id=\"toggle-all-docs\" href=\"javascript:void(0)\" title=\"collapse all docs\">\
[<span class='inner'>−</span>]\
</a>\
</span>
</span>
<span class='in-band'>List of all items</span>\
</h1>")?;
print_entries(f, &self.structs, "Structs", "structs")?;
print_entries(f, &self.enums, "Enums", "enums")?;
print_entries(f, &self.unions, "Unions", "unions")?;
print_entries(f, &self.primitives, "Primitives", "primitives")?;
print_entries(f, &self.traits, "Traits", "traits")?;
print_entries(f, &self.macros, "Macros", "macros")?;
print_entries(f, &self.attributes, "Attribute Macros", "attributes")?;
print_entries(f, &self.derives, "Derive Macros", "derives")?;
print_entries(f, &self.functions, "Functions", "functions")?;
print_entries(f, &self.typedefs, "Typedefs", "typedefs")?;
print_entries(f, &self.trait_aliases, "Trait Aliases", "trait-aliases")?;
print_entries(f, &self.existentials, "Existentials", "existentials")?;
print_entries(f, &self.statics, "Statics", "statics")?;
print_entries(f, &self.constants, "Constants", "constants")
}
}
#[derive(Debug)]
struct Settings<'a> {
// (id, explanation, default value)
settings: Vec<(&'static str, &'static str, bool)>,
root_path: &'a str,
suffix: &'a str,
}
impl<'a> Settings<'a> {
pub fn new(root_path: &'a str, suffix: &'a str) -> Settings<'a> {
Settings {
settings: vec![
("item-declarations", "Auto-hide item declarations.", true),
("item-attributes", "Auto-hide item attributes.", true),
("trait-implementations", "Auto-hide trait implementations documentation",
true),
("method-docs", "Auto-hide item methods' documentation", false),
("go-to-only-result", "Directly go to item in search if there is only one result",
false),
("line-numbers", "Show line numbers on code examples", false),
],
root_path,
suffix,
}
}
}
impl<'a> fmt::Display for Settings<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f,
"<h1 class='fqn'>\
<span class='in-band'>Rustdoc settings</span>\
</h1>\
<div class='settings'>{}</div>\
<script src='{}settings{}.js'></script>",
self.settings.iter()
.map(|(id, text, enabled)| {
format!("<div class='setting-line'>\
<label class='toggle'>\
<input type='checkbox' id='{}' {}>\
<span class='slider'></span>\
</label>\
<div>{}</div>\
</div>", id, if *enabled { " checked" } else { "" }, text)
})
.collect::<String>(),
self.root_path,
self.suffix)
}
}
impl Context {
fn derive_id(&self, id: String) -> String {
let mut map = self.id_map.borrow_mut();
map.derive(id)
}
/// String representation of how to get back to the root path of the 'doc/'
/// folder in terms of a relative URL.
fn root_path(&self) -> String {
"../".repeat(self.current.len())
}
/// Recurse in the directory structure and change the "root path" to make
/// sure it always points to the top (relatively).
fn recurse<T, F>(&mut self, s: String, f: F) -> T where
F: FnOnce(&mut Context) -> T,
{
if s.is_empty() {
panic!("Unexpected empty destination: {:?}", self.current);
}
let prev = self.dst.clone();
self.dst.push(&s);
self.current.push(s);
info!("Recursing into {}", self.dst.display());
let ret = f(self);
info!("Recursed; leaving {}", self.dst.display());
// Go back to where we were at
self.dst = prev;
self.current.pop().unwrap();
ret
}
/// Main method for rendering a crate.
///
/// This currently isn't parallelized, but it'd be pretty easy to add
/// parallelization to this function.
fn krate(self, mut krate: clean::Crate) -> Result<(), Error> {
let mut item = match krate.module.take() {
Some(i) => i,
None => return Ok(()),
};
let final_file = self.dst.join(&krate.name)
.join("all.html");
let settings_file = self.dst.join("settings.html");
let crate_name = krate.name.clone();
item.name = Some(krate.name);
let mut all = AllTypes::new();
{
// Render the crate documentation
let mut work = vec![(self.clone(), item)];
while let Some((mut cx, item)) = work.pop() {
cx.item(item, &mut all, |cx, item| {
work.push((cx.clone(), item))
})?
}
}
let mut w = BufWriter::new(try_err!(File::create(&final_file), &final_file));
let mut root_path = self.dst.to_str().expect("invalid path").to_owned();
if !root_path.ends_with('/') {
root_path.push('/');
}
let mut page = layout::Page {
title: "List of all items in this crate",
css_class: "mod",
root_path: "../",
static_root_path: self.shared.static_root_path.deref(),
description: "List of all items in this crate",
keywords: BASIC_KEYWORDS,
resource_suffix: &self.shared.resource_suffix,
extra_scripts: &[],
static_extra_scripts: &[],
};
let sidebar = if let Some(ref version) = cache().crate_version {
format!("<p class='location'>Crate {}</p>\
<div class='block version'>\
<p>Version {}</p>\
</div>\
<a id='all-types' href='index.html'><p>Back to index</p></a>",
crate_name, version)
} else {
String::new()
};
try_err!(layout::render(&mut w, &self.shared.layout,
&page, &sidebar, &all,
self.shared.css_file_extension.is_some(),
&self.shared.themes,
self.shared.generate_search_filter),
&final_file);
// Generating settings page.
let settings = Settings::new("./", &self.shared.resource_suffix);
page.title = "Rustdoc settings";
page.description = "Settings of Rustdoc";
page.root_path = "./";
let mut w = BufWriter::new(try_err!(File::create(&settings_file), &settings_file));
let mut themes = self.shared.themes.clone();
let sidebar = "<p class='location'>Settings</p><div class='sidebar-elems'></div>";
themes.push(PathBuf::from("settings.css"));
let layout = self.shared.layout.clone();
try_err!(layout::render(&mut w, &layout,
&page, &sidebar, &settings,
self.shared.css_file_extension.is_some(),
&themes,
self.shared.generate_search_filter),
&settings_file);
Ok(())
}
fn render_item(&self,
writer: &mut dyn io::Write,
it: &clean::Item,
pushname: bool)
-> io::Result<()> {
// A little unfortunate that this is done like this, but it sure
// does make formatting *a lot* nicer.
CURRENT_LOCATION_KEY.with(|slot| {
*slot.borrow_mut() = self.current.clone();
});
let mut title = if it.is_primitive() || it.is_keyword() {
// No need to include the namespace for primitive types and keywords
String::new()
} else {
self.current.join("::")
};
if pushname {
if !title.is_empty() {
title.push_str("::");
}
title.push_str(it.name.as_ref().unwrap());
}
title.push_str(" - Rust");
let tyname = it.type_().css_class();
let desc = if it.is_crate() {
format!("API documentation for the Rust `{}` crate.",
self.shared.layout.krate)
} else {
format!("API documentation for the Rust `{}` {} in crate `{}`.",
it.name.as_ref().unwrap(), tyname, self.shared.layout.krate)
};
let keywords = make_item_keywords(it);
let page = layout::Page {
css_class: tyname,
root_path: &self.root_path(),
static_root_path: self.shared.static_root_path.deref(),
title: &title,
description: &desc,
keywords: &keywords,
resource_suffix: &self.shared.resource_suffix,
extra_scripts: &[],
static_extra_scripts: &[],
};
{
self.id_map.borrow_mut().reset();
self.id_map.borrow_mut().populate(initial_ids());
}
if !self.render_redirect_pages {
layout::render(writer, &self.shared.layout, &page,
&Sidebar{ cx: self, item: it },
&Item{ cx: self, item: it },
self.shared.css_file_extension.is_some(),
&self.shared.themes,
self.shared.generate_search_filter)?;
} else {
let mut url = self.root_path();
if let Some(&(ref names, ty)) = cache().paths.get(&it.def_id) {
for name in &names[..names.len() - 1] {
url.push_str(name);
url.push_str("/");
}
url.push_str(&item_path(ty, names.last().unwrap()));
layout::redirect(writer, &url)?;
}
}
Ok(())
}
/// Non-parallelized version of rendering an item. This will take the input
/// item, render its contents, and then invoke the specified closure with
/// all sub-items which need to be rendered.
///
/// The rendering driver uses this closure to queue up more work.
fn item<F>(&mut self, item: clean::Item, all: &mut AllTypes, mut f: F) -> Result<(), Error>
where F: FnMut(&mut Context, clean::Item),
{
// Stripped modules survive the rustdoc passes (i.e., `strip-private`)
// if they contain impls for public types. These modules can also
// contain items such as publicly re-exported structures.
//
// External crates will provide links to these structures, so
// these modules are recursed into, but not rendered normally
// (a flag on the context).
if !self.render_redirect_pages {
self.render_redirect_pages = item.is_stripped();
}
if item.is_mod() {
// modules are special because they add a namespace. We also need to
// recurse into the items of the module as well.
let name = item.name.as_ref().unwrap().to_string();
let mut item = Some(item);
self.recurse(name, |this| {
let item = item.take().unwrap();
let mut buf = Vec::new();
this.render_item(&mut buf, &item, false).unwrap();
// buf will be empty if the module is stripped and there is no redirect for it
if !buf.is_empty() {
try_err!(this.shared.ensure_dir(&this.dst), &this.dst);
let joint_dst = this.dst.join("index.html");
try_err!(fs::write(&joint_dst, buf), &joint_dst);
}
let m = match item.inner {
clean::StrippedItem(box clean::ModuleItem(m)) |
clean::ModuleItem(m) => m,
_ => unreachable!()
};
// Render sidebar-items.js used throughout this module.
if !this.render_redirect_pages {
let items = this.build_sidebar_items(&m);
let js_dst = this.dst.join("sidebar-items.js");
let mut js_out = BufWriter::new(try_err!(File::create(&js_dst), &js_dst));
try_err!(write!(&mut js_out, "initSidebarItems({});",
as_json(&items)), &js_dst);
}
for item in m.items {
f(this, item);
}
Ok(())
})?;
} else if item.name.is_some() {
let mut buf = Vec::new();
self.render_item(&mut buf, &item, true).unwrap();
// buf will be empty if the item is stripped and there is no redirect for it
if !buf.is_empty() {
let name = item.name.as_ref().unwrap();
let item_type = item.type_();
let file_name = &item_path(item_type, name);
try_err!(self.shared.ensure_dir(&self.dst), &self.dst);
let joint_dst = self.dst.join(file_name);
try_err!(fs::write(&joint_dst, buf), &joint_dst);
if !self.render_redirect_pages {
all.append(full_path(self, &item), &item_type);
}
if self.shared.generate_redirect_pages {
// Redirect from a sane URL using the namespace to Rustdoc's
// URL for the page.
let redir_name = format!("{}.{}.html", name, item_type.name_space());
let redir_dst = self.dst.join(redir_name);
if let Ok(redirect_out) = OpenOptions::new().create_new(true)
.write(true)
.open(&redir_dst) {
let mut redirect_out = BufWriter::new(redirect_out);
try_err!(layout::redirect(&mut redirect_out, file_name), &redir_dst);
}
}
// If the item is a macro, redirect from the old macro URL (with !)
// to the new one (without).
if item_type == ItemType::Macro {
let redir_name = format!("{}.{}!.html", item_type, name);
let redir_dst = self.dst.join(redir_name);
let redirect_out = try_err!(File::create(&redir_dst), &redir_dst);
let mut redirect_out = BufWriter::new(redirect_out);
try_err!(layout::redirect(&mut redirect_out, file_name), &redir_dst);
}
}
}
Ok(())
}
fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap<String, Vec<NameDoc>> {
// BTreeMap instead of HashMap to get a sorted output
let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new();
for item in &m.items {
if item.is_stripped() { continue }
let short = item.type_().css_class();
let myname = match item.name {
None => continue,
Some(ref s) => s.to_string(),
};
let short = short.to_string();
map.entry(short).or_default()
.push((myname, Some(plain_summary_line(item.doc_value()))));
}
if self.shared.sort_modules_alphabetically {
for (_, items) in &mut map {
items.sort();
}
}
map
}
}
impl<'a> Item<'a> {
/// Generates a url appropriate for an `href` attribute back to the source of
/// this item.
///
/// The url generated, when clicked, will redirect the browser back to the
/// original source code.
///
/// If `None` is returned, then a source link couldn't be generated. This
/// may happen, for example, with externally inlined items where the source
/// of their crate documentation isn't known.
fn src_href(&self) -> Option<String> {
let mut root = self.cx.root_path();
let cache = cache();
let mut path = String::new();
// We can safely ignore macros from other libraries
let file = match self.item.source.filename {
FileName::Real(ref path) => path,
_ => return None,
};
let (krate, path) = if self.item.def_id.is_local() {
if let Some(path) = self.cx.shared.local_sources.get(file) {
(&self.cx.shared.layout.krate, path)
} else {
return None;
}
} else {
let (krate, src_root) = match *cache.extern_locations.get(&self.item.def_id.krate)? {
(ref name, ref src, Local) => (name, src),
(ref name, ref src, Remote(ref s)) => {
root = s.to_string();
(name, src)
}
(_, _, Unknown) => return None,
};
clean_srcpath(&src_root, file, false, |component| {
path.push_str(&component.to_string_lossy());
path.push('/');
});
let mut fname = file.file_name().expect("source has no filename")
.to_os_string();
fname.push(".html");
path.push_str(&fname.to_string_lossy());
(krate, &path)
};
let lines = if self.item.source.loline == self.item.source.hiline {
self.item.source.loline.to_string()
} else {
format!("{}-{}", self.item.source.loline, self.item.source.hiline)
};
Some(format!("{root}src/{krate}/{path}#{lines}",
root = Escape(&root),
krate = krate,
path = path,
lines = lines))
}
}
fn wrap_into_docblock<F>(w: &mut fmt::Formatter<'_>,
f: F) -> fmt::Result
where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result {
write!(w, "<div class=\"docblock type-decl hidden-by-usual-hider\">")?;
f(w)?;
write!(w, "</div>")
}
impl<'a> fmt::Display for Item<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
debug_assert!(!self.item.is_stripped());
// Write the breadcrumb trail header for the top
write!(fmt, "<h1 class='fqn'><span class='out-of-band'>")?;
if let Some(version) = self.item.stable_since() {
write!(fmt, "<span class='since' title='Stable since Rust version {0}'>{0}</span>",
version)?;
}
write!(fmt,
"<span id='render-detail'>\
<a id=\"toggle-all-docs\" href=\"javascript:void(0)\" \
title=\"collapse all docs\">\
[<span class='inner'>−</span>]\
</a>\
</span>")?;
// Write `src` tag
//
// When this item is part of a `pub use` in a downstream crate, the
// [src] link in the downstream documentation will actually come back to
// this page, and this link will be auto-clicked. The `id` attribute is
// used to find the link to auto-click.
if self.cx.shared.include_sources && !self.item.is_primitive() {
if let Some(l) = self.src_href() {
write!(fmt, "<a class='srclink' href='{}' title='{}'>[src]</a>",
l, "goto source code")?;
}
}
write!(fmt, "</span>")?; // out-of-band
write!(fmt, "<span class='in-band'>")?;
match self.item.inner {
clean::ModuleItem(ref m) => if m.is_crate {
write!(fmt, "Crate ")?;
} else {
write!(fmt, "Module ")?;
},
clean::FunctionItem(..) | clean::ForeignFunctionItem(..) => write!(fmt, "Function ")?,
clean::TraitItem(..) => write!(fmt, "Trait ")?,
clean::StructItem(..) => write!(fmt, "Struct ")?,
clean::UnionItem(..) => write!(fmt, "Union ")?,
clean::EnumItem(..) => write!(fmt, "Enum ")?,
clean::TypedefItem(..) => write!(fmt, "Type Definition ")?,
clean::MacroItem(..) => write!(fmt, "Macro ")?,
clean::ProcMacroItem(ref mac) => match mac.kind {
MacroKind::Bang => write!(fmt, "Macro ")?,
MacroKind::Attr => write!(fmt, "Attribute Macro ")?,
MacroKind::Derive => write!(fmt, "Derive Macro ")?,
MacroKind::ProcMacroStub => unreachable!(),
}
clean::PrimitiveItem(..) => write!(fmt, "Primitive Type ")?,
clean::StaticItem(..) | clean::ForeignStaticItem(..) => write!(fmt, "Static ")?,
clean::ConstantItem(..) => write!(fmt, "Constant ")?,
clean::ForeignTypeItem => write!(fmt, "Foreign Type ")?,
clean::KeywordItem(..) => write!(fmt, "Keyword ")?,
clean::ExistentialItem(..) => write!(fmt, "Existential Type ")?,
clean::TraitAliasItem(..) => write!(fmt, "Trait Alias ")?,
_ => {
// We don't generate pages for any other type.
unreachable!();
}
}
if !self.item.is_primitive() && !self.item.is_keyword() {
let cur = &self.cx.current;
let amt = if self.item.is_mod() { cur.len() - 1 } else { cur.len() };
for (i, component) in cur.iter().enumerate().take(amt) {
write!(fmt, "<a href='{}index.html'>{}</a>::<wbr>",
"../".repeat(cur.len() - i - 1),
component)?;
}
}
write!(fmt, "<a class=\"{}\" href=''>{}</a>",
self.item.type_(), self.item.name.as_ref().unwrap())?;
write!(fmt, "</span></h1>")?; // in-band
match self.item.inner {
clean::ModuleItem(ref m) =>
item_module(fmt, self.cx, self.item, &m.items),
clean::FunctionItem(ref f) | clean::ForeignFunctionItem(ref f) =>
item_function(fmt, self.cx, self.item, f),
clean::TraitItem(ref t) => item_trait(fmt, self.cx, self.item, t),
clean::StructItem(ref s) => item_struct(fmt, self.cx, self.item, s),
clean::UnionItem(ref s) => item_union(fmt, self.cx, self.item, s),
clean::EnumItem(ref e) => item_enum(fmt, self.cx, self.item, e),
clean::TypedefItem(ref t, _) => item_typedef(fmt, self.cx, self.item, t),
clean::MacroItem(ref m) => item_macro(fmt, self.cx, self.item, m),
clean::ProcMacroItem(ref m) => item_proc_macro(fmt, self.cx, self.item, m),
clean::PrimitiveItem(ref p) => item_primitive(fmt, self.cx, self.item, p),
clean::StaticItem(ref i) | clean::ForeignStaticItem(ref i) =>
item_static(fmt, self.cx, self.item, i),
clean::ConstantItem(ref c) => item_constant(fmt, self.cx, self.item, c),
clean::ForeignTypeItem => item_foreign_type(fmt, self.cx, self.item),
clean::KeywordItem(ref k) => item_keyword(fmt, self.cx, self.item, k),
clean::ExistentialItem(ref e, _) => item_existential(fmt, self.cx, self.item, e),
clean::TraitAliasItem(ref ta) => item_trait_alias(fmt, self.cx, self.item, ta),
_ => {
// We don't generate pages for any other type.
unreachable!();
}
}
}
}
fn item_path(ty: ItemType, name: &str) -> String {
match ty {
ItemType::Module => format!("{}index.html", SlashChecker(name)),
_ => format!("{}.{}.html", ty.css_class(), name),
}
}
fn full_path(cx: &Context, item: &clean::Item) -> String {
let mut s = cx.current.join("::");
s.push_str("::");
s.push_str(item.name.as_ref().unwrap());
s
}
fn shorter<'a>(s: Option<&'a str>) -> String {
match s {
Some(s) => s.lines()
.skip_while(|s| s.chars().all(|c| c.is_whitespace()))
.take_while(|line|{
(*line).chars().any(|chr|{
!chr.is_whitespace()
})
}).collect::<Vec<_>>().join("\n"),
None => String::new()
}
}
#[inline]
fn plain_summary_line(s: Option<&str>) -> String {
let line = shorter(s).replace("\n", " ");
markdown::plain_summary_line_full(&line[..], false)
}
#[inline]
fn plain_summary_line_short(s: Option<&str>) -> String {
let line = shorter(s).replace("\n", " ");
markdown::plain_summary_line_full(&line[..], true)
}
fn document(w: &mut fmt::Formatter<'_>, cx: &Context, item: &clean::Item) -> fmt::Result {
if let Some(ref name) = item.name {
info!("Documenting {}", name);
}
document_stability(w, cx, item, false)?;
document_full(w, item, cx, "", false)?;
Ok(())
}
/// Render md_text as markdown.
fn render_markdown(w: &mut fmt::Formatter<'_>,
cx: &Context,
md_text: &str,
links: Vec<(String, String)>,
prefix: &str,
is_hidden: bool)
-> fmt::Result {
let mut ids = cx.id_map.borrow_mut();
write!(w, "<div class='docblock{}'>{}{}</div>",
if is_hidden { " hidden" } else { "" },
prefix,
Markdown(md_text, &links, RefCell::new(&mut ids),
cx.codes))
}
fn document_short(
w: &mut fmt::Formatter<'_>,
cx: &Context,
item: &clean::Item,
link: AssocItemLink<'_>,
prefix: &str, is_hidden: bool
) -> fmt::Result {
if let Some(s) = item.doc_value() {
let markdown = if s.contains('\n') {
format!("{} [Read more]({})",
&plain_summary_line(Some(s)), naive_assoc_href(item, link))
} else {
plain_summary_line(Some(s))
};
render_markdown(w, cx, &markdown, item.links(), prefix, is_hidden)?;
} else if !prefix.is_empty() {
write!(w, "<div class='docblock{}'>{}</div>",
if is_hidden { " hidden" } else { "" },
prefix)?;
}
Ok(())
}
fn document_full(w: &mut fmt::Formatter<'_>, item: &clean::Item,
cx: &Context, prefix: &str, is_hidden: bool) -> fmt::Result {
if let Some(s) = cx.shared.maybe_collapsed_doc_value(item) {
debug!("Doc block: =====\n{}\n=====", s);
render_markdown(w, cx, &*s, item.links(), prefix, is_hidden)?;
} else if !prefix.is_empty() {
write!(w, "<div class='docblock{}'>{}</div>",
if is_hidden { " hidden" } else { "" },
prefix)?;
}
Ok(())
}
fn document_stability(w: &mut fmt::Formatter<'_>, cx: &Context, item: &clean::Item,
is_hidden: bool) -> fmt::Result {
let stabilities = short_stability(item, cx);
if !stabilities.is_empty() {
write!(w, "<div class='stability{}'>", if is_hidden { " hidden" } else { "" })?;
for stability in stabilities {
write!(w, "{}", stability)?;
}
write!(w, "</div>")?;
}
Ok(())
}
fn document_non_exhaustive_header(item: &clean::Item) -> &str {
if item.is_non_exhaustive() { " (Non-exhaustive)" } else { "" }
}
fn document_non_exhaustive(w: &mut fmt::Formatter<'_>, item: &clean::Item) -> fmt::Result {
if item.is_non_exhaustive() {
write!(w, "<div class='docblock non-exhaustive non-exhaustive-{}'>", {
if item.is_struct() {
"struct"
} else if item.is_enum() {
"enum"
} else if item.is_variant() {
"variant"
} else {
"type"
}
})?;
if item.is_struct() {
write!(w, "Non-exhaustive structs could have additional fields added in future. \
Therefore, non-exhaustive structs cannot be constructed in external crates \
using the traditional <code>Struct {{ .. }}</code> syntax; cannot be \
matched against without a wildcard <code>..</code>; and \
struct update syntax will not work.")?;
} else if item.is_enum() {
write!(w, "Non-exhaustive enums could have additional variants added in future. \
Therefore, when matching against variants of non-exhaustive enums, an \
extra wildcard arm must be added to account for any future variants.")?;
} else if item.is_variant() {
write!(w, "Non-exhaustive enum variants could have additional fields added in future. \
Therefore, non-exhaustive enum variants cannot be constructed in external \
crates and cannot be matched against.")?;
} else {
write!(w, "This type will require a wildcard arm in any match statements or \
constructors.")?;
}
write!(w, "</div>")?;
}
Ok(())
}
fn name_key(name: &str) -> (&str, u64, usize) {
let end = name.bytes()
.rposition(|b| b.is_ascii_digit()).map_or(name.len(), |i| i + 1);
// find number at end
let split = name[0..end].bytes()
.rposition(|b| !b.is_ascii_digit()).map_or(0, |i| i + 1);
// count leading zeroes
let after_zeroes =
name[split..end].bytes().position(|b| b != b'0').map_or(name.len(), |extra| split + extra);
// sort leading zeroes last
let num_zeroes = after_zeroes - split;
match name[split..end].parse() {
Ok(n) => (&name[..split], n, num_zeroes),
Err(_) => (name, 0, num_zeroes),
}
}
fn item_module(w: &mut fmt::Formatter<'_>, cx: &Context,
item: &clean::Item, items: &[clean::Item]) -> fmt::Result {
document(w, cx, item)?;
let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::<Vec<usize>>();
// the order of item types in the listing
fn reorder(ty: ItemType) -> u8 {
match ty {
ItemType::ExternCrate => 0,
ItemType::Import => 1,
ItemType::Primitive => 2,
ItemType::Module => 3,
ItemType::Macro => 4,
ItemType::Struct => 5,
ItemType::Enum => 6,
ItemType::Constant => 7,
ItemType::Static => 8,
ItemType::Trait => 9,
ItemType::Function => 10,
ItemType::Typedef => 12,
ItemType::Union => 13,
_ => 14 + ty as u8,
}
}
fn cmp(i1: &clean::Item, i2: &clean::Item, idx1: usize, idx2: usize) -> Ordering {
let ty1 = i1.type_();
let ty2 = i2.type_();
if ty1 != ty2 {
return (reorder(ty1), idx1).cmp(&(reorder(ty2), idx2))
}
let s1 = i1.stability.as_ref().map(|s| s.level);
let s2 = i2.stability.as_ref().map(|s| s.level);
match (s1, s2) {
(Some(stability::Unstable), Some(stability::Stable)) => return Ordering::Greater,
(Some(stability::Stable), Some(stability::Unstable)) => return Ordering::Less,
_ => {}
}
let lhs = i1.name.as_ref().map_or("", |s| &**s);
let rhs = i2.name.as_ref().map_or("", |s| &**s);
name_key(lhs).cmp(&name_key(rhs))
}
if cx.shared.sort_modules_alphabetically {
indices.sort_by(|&i1, &i2| cmp(&items[i1], &items[i2], i1, i2));
}
// This call is to remove re-export duplicates in cases such as:
//
// ```
// pub mod foo {
// pub mod bar {
// pub trait Double { fn foo(); }
// }
// }
//
// pub use foo::bar::*;
// pub use foo::*;
// ```
//
// `Double` will appear twice in the generated docs.
//
// FIXME: This code is quite ugly and could be improved. Small issue: DefId
// can be identical even if the elements are different (mostly in imports).
// So in case this is an import, we keep everything by adding a "unique id"
// (which is the position in the vector).
indices.dedup_by_key(|i| (items[*i].def_id,
if items[*i].name.as_ref().is_some() {
Some(full_path(cx, &items[*i]))
} else {
None
},
items[*i].type_(),
if items[*i].is_import() {
*i
} else {
0
}));
debug!("{:?}", indices);
let mut curty = None;
for &idx in &indices {
let myitem = &items[idx];
if myitem.is_stripped() {
continue;
}
let myty = Some(myitem.type_());
if curty == Some(ItemType::ExternCrate) && myty == Some(ItemType::Import) {
// Put `extern crate` and `use` re-exports in the same section.
curty = myty;
} else if myty != curty {
if curty.is_some() {
write!(w, "</table>")?;
}
curty = myty;
let (short, name) = item_ty_to_strs(&myty.unwrap());
write!(w, "<h2 id='{id}' class='section-header'>\
<a href=\"#{id}\">{name}</a></h2>\n<table>",
id = cx.derive_id(short.to_owned()), name = name)?;
}
match myitem.inner {
clean::ExternCrateItem(ref name, ref src) => {
use crate::html::format::HRef;
match *src {
Some(ref src) => {
write!(w, "<tr><td><code>{}extern crate {} as {};",
VisSpace(&myitem.visibility),
HRef::new(myitem.def_id, src),
name)?
}
None => {
write!(w, "<tr><td><code>{}extern crate {};",
VisSpace(&myitem.visibility),
HRef::new(myitem.def_id, name))?
}
}
write!(w, "</code></td></tr>")?;
}
clean::ImportItem(ref import) => {
write!(w, "<tr><td><code>{}{}</code></td></tr>",
VisSpace(&myitem.visibility), *import)?;
}
_ => {
if myitem.name.is_none() { continue }
let unsafety_flag = match myitem.inner {
clean::FunctionItem(ref func) | clean::ForeignFunctionItem(ref func)
if func.header.unsafety == hir::Unsafety::Unsafe => {
"<a title='unsafe function' href='#'><sup>⚠</sup></a>"
}
_ => "",
};
let stab = myitem.stability_class();
let add = if stab.is_some() {
" "
} else {
""
};
let doc_value = myitem.doc_value().unwrap_or("");
write!(w, "\
<tr class='{stab}{add}module-item'>\
<td><a class=\"{class}\" href=\"{href}\" \
title='{title}'>{name}</a>{unsafety_flag}</td>\
<td class='docblock-short'>{stab_tags}{docs}</td>\
</tr>",
name = *myitem.name.as_ref().unwrap(),
stab_tags = stability_tags(myitem),
docs = MarkdownSummaryLine(doc_value, &myitem.links()),
class = myitem.type_(),
add = add,
stab = stab.unwrap_or_else(|| String::new()),
unsafety_flag = unsafety_flag,
href = item_path(myitem.type_(), myitem.name.as_ref().unwrap()),
title = [full_path(cx, myitem), myitem.type_().to_string()]
.iter()
.filter_map(|s| if !s.is_empty() {
Some(s.as_str())
} else {
None
})
.collect::<Vec<_>>()
.join(" "),
)?;
}
}
}
if curty.is_some() {
write!(w, "</table>")?;
}
Ok(())
}
/// Render the stability and deprecation tags that are displayed in the item's summary at the
/// module level.
fn stability_tags(item: &clean::Item) -> String {
let mut tags = String::new();
fn tag_html(class: &str, contents: &str) -> String {
format!(r#"<span class="stab {}">{}</span>"#, class, contents)
}
// The trailing space after each tag is to space it properly against the rest of the docs.
if item.deprecation().is_some() {
let mut message = "Deprecated";
if let Some(ref stab) = item.stability {
if let Some(ref depr) = stab.deprecation {
if let Some(ref since) = depr.since {
if !stability::deprecation_in_effect(&since) {
message = "Deprecation planned";
}
}
}
}
tags += &tag_html("deprecated", message);
}
if let Some(stab) = item
.stability
.as_ref()
.filter(|s| s.level == stability::Unstable)
{
if stab.feature.as_ref().map(|s| &**s) == Some("rustc_private") {
tags += &tag_html("internal", "Internal");
} else {
tags += &tag_html("unstable", "Experimental");
}
}
if let Some(ref cfg) = item.attrs.cfg {
tags += &tag_html("portability", &cfg.render_short_html());
}
tags
}
/// Render the stability and/or deprecation warning that is displayed at the top of the item's
/// documentation.
fn short_stability(item: &clean::Item, cx: &Context) -> Vec<String> {
let mut stability = vec![];
let error_codes = ErrorCodes::from(UnstableFeatures::from_environment().is_nightly_build());
if let Some(Deprecation { note, since }) = &item.deprecation() {
// We display deprecation messages for #[deprecated] and #[rustc_deprecated]
// but only display the future-deprecation messages for #[rustc_deprecated].
let mut message = if let Some(since) = since {
format!("Deprecated since {}", Escape(since))
} else {
String::from("Deprecated")
};
if let Some(ref stab) = item.stability {
if let Some(ref depr) = stab.deprecation {
if let Some(ref since) = depr.since {
if !stability::deprecation_in_effect(&since) {
message = format!("Deprecating in {}", Escape(&since));
}
}
}
}
if let Some(note) = note {
let mut ids = cx.id_map.borrow_mut();
let html = MarkdownHtml(¬e, RefCell::new(&mut ids), error_codes);
message.push_str(&format!(": {}", html));
}
stability.push(format!("<div class='stab deprecated'>{}</div>", message));
}
if let Some(stab) = item
.stability
.as_ref()
.filter(|stab| stab.level == stability::Unstable)
{
let is_rustc_private = stab.feature.as_ref().map(|s| &**s) == Some("rustc_private");
let mut message = if is_rustc_private {
"<span class='emoji'>⚙️</span> This is an internal compiler API."
} else {
"<span class='emoji'>🔬</span> This is a nightly-only experimental API."
}
.to_owned();
if let Some(feature) = stab.feature.as_ref() {
let mut feature = format!("<code>{}</code>", Escape(&feature));
if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, stab.issue) {
feature.push_str(&format!(
" <a href=\"{url}{issue}\">#{issue}</a>",
url = url,
issue = issue
));
}
message.push_str(&format!(" ({})", feature));
}
if let Some(unstable_reason) = &stab.unstable_reason {
// Provide a more informative message than the compiler help.
let unstable_reason = if is_rustc_private {
"This crate is being loaded from the sysroot, a permanently unstable location \
for private compiler dependencies. It is not intended for general use. Prefer \
using a public version of this crate from \
[crates.io](https://crates.io) via [`Cargo.toml`]\
(https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html)."
} else {
unstable_reason
};
let mut ids = cx.id_map.borrow_mut();
message = format!(
"<details><summary>{}</summary>{}</details>",
message,
MarkdownHtml(&unstable_reason, RefCell::new(&mut ids), error_codes)
);
}
let class = if is_rustc_private {
"internal"
} else {
"unstable"
};
stability.push(format!("<div class='stab {}'>{}</div>", class, message));
}
if let Some(ref cfg) = item.attrs.cfg {
stability.push(format!(
"<div class='stab portability'>{}</div>",
cfg.render_long_html()
));
}
stability
}
fn item_constant(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
c: &clean::Constant) -> fmt::Result {
write!(w, "<pre class='rust const'>")?;
render_attributes(w, it)?;
write!(w, "{vis}const \
{name}: {typ}</pre>",
vis = VisSpace(&it.visibility),
name = it.name.as_ref().unwrap(),
typ = c.type_)?;
document(w, cx, it)
}
fn item_static(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
s: &clean::Static) -> fmt::Result {
write!(w, "<pre class='rust static'>")?;
render_attributes(w, it)?;
write!(w, "{vis}static {mutability}\
{name}: {typ}</pre>",
vis = VisSpace(&it.visibility),
mutability = MutableSpace(s.mutability),
name = it.name.as_ref().unwrap(),
typ = s.type_)?;
document(w, cx, it)
}
fn item_function(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
f: &clean::Function) -> fmt::Result {
let header_len = format!(
"{}{}{}{}{:#}fn {}{:#}",
VisSpace(&it.visibility),
ConstnessSpace(f.header.constness),
UnsafetySpace(f.header.unsafety),
AsyncSpace(f.header.asyncness),
AbiSpace(f.header.abi),
it.name.as_ref().unwrap(),
f.generics
).len();
write!(w, "{}<pre class='rust fn'>", render_spotlight_traits(it)?)?;
render_attributes(w, it)?;
write!(w,
"{vis}{constness}{unsafety}{asyncness}{abi}fn \
{name}{generics}{decl}{where_clause}</pre>",
vis = VisSpace(&it.visibility),
constness = ConstnessSpace(f.header.constness),
unsafety = UnsafetySpace(f.header.unsafety),
asyncness = AsyncSpace(f.header.asyncness),
abi = AbiSpace(f.header.abi),
name = it.name.as_ref().unwrap(),
generics = f.generics,
where_clause = WhereClause { gens: &f.generics, indent: 0, end_newline: true },
decl = Function {
decl: &f.decl,
header_len,
indent: 0,
asyncness: f.header.asyncness,
})?;
document(w, cx, it)
}
fn render_implementor(cx: &Context, implementor: &Impl, w: &mut fmt::Formatter<'_>,
implementor_dups: &FxHashMap<&str, (DefId, bool)>) -> fmt::Result {
// If there's already another implementor that has the same abbridged name, use the
// full path, for example in `std::iter::ExactSizeIterator`
let use_absolute = match implementor.inner_impl().for_ {
clean::ResolvedPath { ref path, is_generic: false, .. } |
clean::BorrowedRef {
type_: box clean::ResolvedPath { ref path, is_generic: false, .. },
..
} => implementor_dups[path.last_name()].1,
_ => false,
};
render_impl(w, cx, implementor, AssocItemLink::Anchor(None), RenderMode::Normal,
implementor.impl_item.stable_since(), false, Some(use_absolute))?;
Ok(())
}
fn render_impls(cx: &Context, w: &mut fmt::Formatter<'_>,
traits: &[&&Impl],
containing_item: &clean::Item) -> fmt::Result {
for i in traits {
let did = i.trait_did().unwrap();
let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods);
render_impl(w, cx, i, assoc_link,
RenderMode::Normal, containing_item.stable_since(), true, None)?;
}
Ok(())
}
fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool) -> String {
let mut bounds = String::new();
if !t_bounds.is_empty() {
if !trait_alias {
bounds.push_str(": ");
}
for (i, p) in t_bounds.iter().enumerate() {
if i > 0 {
bounds.push_str(" + ");
}
bounds.push_str(&(*p).to_string());
}
}
bounds
}
fn compare_impl<'a, 'b>(lhs: &'a &&Impl, rhs: &'b &&Impl) -> Ordering {
let lhs = format!("{}", lhs.inner_impl());
let rhs = format!("{}", rhs.inner_impl());
// lhs and rhs are formatted as HTML, which may be unnecessary
name_key(&lhs).cmp(&name_key(&rhs))
}
fn item_trait(
w: &mut fmt::Formatter<'_>,
cx: &Context,
it: &clean::Item,
t: &clean::Trait,
) -> fmt::Result {
let bounds = bounds(&t.bounds, false);
let types = t.items.iter().filter(|m| m.is_associated_type()).collect::<Vec<_>>();
let consts = t.items.iter().filter(|m| m.is_associated_const()).collect::<Vec<_>>();
let required = t.items.iter().filter(|m| m.is_ty_method()).collect::<Vec<_>>();
let provided = t.items.iter().filter(|m| m.is_method()).collect::<Vec<_>>();
// Output the trait definition
wrap_into_docblock(w, |w| {
write!(w, "<pre class='rust trait'>")?;
render_attributes(w, it)?;
write!(w, "{}{}{}trait {}{}{}",
VisSpace(&it.visibility),
UnsafetySpace(t.unsafety),
if t.is_auto { "auto " } else { "" },
it.name.as_ref().unwrap(),
t.generics,
bounds)?;
if !t.generics.where_predicates.is_empty() {
write!(w, "{}", WhereClause { gens: &t.generics, indent: 0, end_newline: true })?;
} else {
write!(w, " ")?;
}
if t.items.is_empty() {
write!(w, "{{ }}")?;
} else {
// FIXME: we should be using a derived_id for the Anchors here
write!(w, "{{\n")?;
for t in &types {
write!(w, " ")?;
render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait)?;
write!(w, ";\n")?;
}
if !types.is_empty() && !consts.is_empty() {
w.write_str("\n")?;
}
for t in &consts {
write!(w, " ")?;
render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait)?;
write!(w, ";\n")?;
}
if !consts.is_empty() && !required.is_empty() {
w.write_str("\n")?;
}
for (pos, m) in required.iter().enumerate() {
write!(w, " ")?;
render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait)?;
write!(w, ";\n")?;
if pos < required.len() - 1 {
write!(w, "<div class='item-spacer'></div>")?;
}
}
if !required.is_empty() && !provided.is_empty() {
w.write_str("\n")?;
}
for (pos, m) in provided.iter().enumerate() {
write!(w, " ")?;
render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait)?;
match m.inner {
clean::MethodItem(ref inner) if !inner.generics.where_predicates.is_empty() => {
write!(w, ",\n {{ ... }}\n")?;
},
_ => {
write!(w, " {{ ... }}\n")?;
},
}
if pos < provided.len() - 1 {
write!(w, "<div class='item-spacer'></div>")?;
}
}
write!(w, "}}")?;
}
write!(w, "</pre>")
})?;
// Trait documentation
document(w, cx, it)?;
fn write_small_section_header(
w: &mut fmt::Formatter<'_>,
id: &str,
title: &str,
extra_content: &str,
) -> fmt::Result {
write!(w, "
<h2 id='{0}' class='small-section-header'>\
{1}<a href='#{0}' class='anchor'></a>\
</h2>{2}", id, title, extra_content)
}
fn write_loading_content(w: &mut fmt::Formatter<'_>, extra_content: &str) -> fmt::Result {
write!(w, "{}<span class='loading-content'>Loading content...</span>", extra_content)
}
fn trait_item(w: &mut fmt::Formatter<'_>, cx: &Context, m: &clean::Item, t: &clean::Item)
-> fmt::Result {
let name = m.name.as_ref().unwrap();
let item_type = m.type_();
let id = cx.derive_id(format!("{}.{}", item_type, name));
let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h3 id='{id}' class='method'>{extra}<code id='{ns_id}'>",
extra = render_spotlight_traits(m)?,
id = id,
ns_id = ns_id)?;
render_assoc_item(w, m, AssocItemLink::Anchor(Some(&id)), ItemType::Impl)?;
write!(w, "</code>")?;
render_stability_since(w, m, t)?;
write!(w, "</h3>")?;
document(w, cx, m)?;
Ok(())
}
if !types.is_empty() {
write_small_section_header(w, "associated-types", "Associated Types",
"<div class='methods'>")?;
for t in &types {
trait_item(w, cx, *t, it)?;
}
write_loading_content(w, "</div>")?;
}
if !consts.is_empty() {
write_small_section_header(w, "associated-const", "Associated Constants",
"<div class='methods'>")?;
for t in &consts {
trait_item(w, cx, *t, it)?;
}
write_loading_content(w, "</div>")?;
}
// Output the documentation for each function individually
if !required.is_empty() {
write_small_section_header(w, "required-methods", "Required methods",
"<div class='methods'>")?;
for m in &required {
trait_item(w, cx, *m, it)?;
}
write_loading_content(w, "</div>")?;
}
if !provided.is_empty() {
write_small_section_header(w, "provided-methods", "Provided methods",
"<div class='methods'>")?;
for m in &provided {
trait_item(w, cx, *m, it)?;
}
write_loading_content(w, "</div>")?;
}
// If there are methods directly on this trait object, render them here.
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)?;
let cache = cache();
let mut synthetic_types = Vec::new();
if let Some(implementors) = cache.implementors.get(&it.def_id) {
// The DefId is for the first Type found with that name. The bool is
// if any Types with the same name but different DefId have been found.
let mut implementor_dups: FxHashMap<&str, (DefId, bool)> = FxHashMap::default();
for implementor in implementors {
match implementor.inner_impl().for_ {
clean::ResolvedPath { ref path, did, is_generic: false, .. } |
clean::BorrowedRef {
type_: box clean::ResolvedPath { ref path, did, is_generic: false, .. },
..
} => {
let &mut (prev_did, ref mut has_duplicates) =
implementor_dups.entry(path.last_name()).or_insert((did, false));
if prev_did != did {
*has_duplicates = true;
}
}
_ => {}
}
}
let (local, foreign) = implementors.iter()
.partition::<Vec<_>, _>(|i| i.inner_impl().for_.def_id()
.map_or(true, |d| cache.paths.contains_key(&d)));
let (mut synthetic, mut concrete): (Vec<&&Impl>, Vec<&&Impl>) = local.iter()
.partition(|i| i.inner_impl().synthetic);
synthetic.sort_by(compare_impl);
concrete.sort_by(compare_impl);
if !foreign.is_empty() {
write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", "")?;
for implementor in foreign {
let assoc_link = AssocItemLink::GotoSource(
implementor.impl_item.def_id,
&implementor.inner_impl().provided_trait_methods
);
render_impl(w, cx, &implementor, assoc_link,
RenderMode::Normal, implementor.impl_item.stable_since(), false,
None)?;
}
write_loading_content(w, "")?;
}
write_small_section_header(w, "implementors", "Implementors",
"<div class='item-list' id='implementors-list'>")?;
for implementor in concrete {
render_implementor(cx, implementor, w, &implementor_dups)?;
}
write_loading_content(w, "</div>")?;
if t.auto {
write_small_section_header(w, "synthetic-implementors", "Auto implementors",
"<div class='item-list' id='synthetic-implementors-list'>")?;
for implementor in synthetic {
synthetic_types.extend(
collect_paths_for_type(implementor.inner_impl().for_.clone())
);
render_implementor(cx, implementor, w, &implementor_dups)?;
}
write_loading_content(w, "</div>")?;
}
} else {
// even without any implementations to write in, we still want the heading and list, so the
// implementors javascript file pulled in below has somewhere to write the impls into
write_small_section_header(w, "implementors", "Implementors",
"<div class='item-list' id='implementors-list'>")?;
write_loading_content(w, "</div>")?;
if t.auto {
write_small_section_header(w, "synthetic-implementors", "Auto implementors",
"<div class='item-list' id='synthetic-implementors-list'>")?;
write_loading_content(w, "</div>")?;
}
}
write!(w, r#"<script type="text/javascript">window.inlined_types=new Set({});</script>"#,
as_json(&synthetic_types))?;
write!(w, r#"<script type="text/javascript" async
src="{root_path}/implementors/{path}/{ty}.{name}.js">
</script>"#,
root_path = vec![".."; cx.current.len()].join("/"),
path = if it.def_id.is_local() {
cx.current.join("/")
} else {
let (ref path, _) = cache.external_paths[&it.def_id];
path[..path.len() - 1].join("/")
},
ty = it.type_().css_class(),
name = *it.name.as_ref().unwrap())?;
Ok(())
}
fn naive_assoc_href(it: &clean::Item, link: AssocItemLink<'_>) -> String {
use crate::html::item_type::ItemType::*;
let name = it.name.as_ref().unwrap();
let ty = match it.type_() {
Typedef | AssociatedType => AssociatedType,
s@_ => s,
};
let anchor = format!("#{}.{}", ty, name);
match link {
AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id),
AssocItemLink::Anchor(None) => anchor,
AssocItemLink::GotoSource(did, _) => {
href(did).map(|p| format!("{}{}", p.0, anchor)).unwrap_or(anchor)
}
}
}
fn assoc_const(w: &mut fmt::Formatter<'_>,
it: &clean::Item,
ty: &clean::Type,
_default: Option<&String>,
link: AssocItemLink<'_>) -> fmt::Result {
write!(w, "{}const <a href='{}' class=\"constant\"><b>{}</b></a>: {}",
VisSpace(&it.visibility),
naive_assoc_href(it, link),
it.name.as_ref().unwrap(),
ty)?;
Ok(())
}
fn assoc_type<W: fmt::Write>(w: &mut W, it: &clean::Item,
bounds: &[clean::GenericBound],
default: Option<&clean::Type>,
link: AssocItemLink<'_>) -> fmt::Result {
write!(w, "type <a href='{}' class=\"type\">{}</a>",
naive_assoc_href(it, link),
it.name.as_ref().unwrap())?;
if !bounds.is_empty() {
write!(w, ": {}", GenericBounds(bounds))?
}
if let Some(default) = default {
write!(w, " = {}", default)?;
}
Ok(())
}
fn render_stability_since_raw<'a, T: fmt::Write>(
w: &mut T,
ver: Option<&'a str>,
containing_ver: Option<&'a str>,
) -> fmt::Result {
if let Some(v) = ver {
if containing_ver != ver && v.len() > 0 {
write!(w, "<div class='since' title='Stable since Rust version {0}'>{0}</div>", v)?
}
}
Ok(())
}
fn render_stability_since(w: &mut fmt::Formatter<'_>,
item: &clean::Item,
containing_item: &clean::Item) -> fmt::Result {
render_stability_since_raw(w, item.stable_since(), containing_item.stable_since())
}
fn render_assoc_item(w: &mut fmt::Formatter<'_>,
item: &clean::Item,
link: AssocItemLink<'_>,
parent: ItemType) -> fmt::Result {
fn method(w: &mut fmt::Formatter<'_>,
meth: &clean::Item,
header: hir::FnHeader,
g: &clean::Generics,
d: &clean::FnDecl,
link: AssocItemLink<'_>,
parent: ItemType)
-> fmt::Result {
let name = meth.name.as_ref().unwrap();
let anchor = format!("#{}.{}", meth.type_(), name);
let href = match link {
AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id),
AssocItemLink::Anchor(None) => anchor,
AssocItemLink::GotoSource(did, provided_methods) => {
// We're creating a link from an impl-item to the corresponding
// trait-item and need to map the anchored type accordingly.
let ty = if provided_methods.contains(name) {
ItemType::Method
} else {
ItemType::TyMethod
};
href(did).map(|p| format!("{}#{}.{}", p.0, ty, name)).unwrap_or(anchor)
}
};
let mut header_len = format!(
"{}{}{}{}{}{:#}fn {}{:#}",
VisSpace(&meth.visibility),
ConstnessSpace(header.constness),
UnsafetySpace(header.unsafety),
AsyncSpace(header.asyncness),
DefaultSpace(meth.is_default()),
AbiSpace(header.abi),
name,
*g
).len();
let (indent, end_newline) = if parent == ItemType::Trait {
header_len += 4;
(4, false)
} else {
(0, true)
};
render_attributes(w, meth)?;
write!(w, "{}{}{}{}{}{}fn <a href='{href}' class='fnname'>{name}</a>\
{generics}{decl}{where_clause}",
VisSpace(&meth.visibility),
ConstnessSpace(header.constness),
UnsafetySpace(header.unsafety),
AsyncSpace(header.asyncness),
DefaultSpace(meth.is_default()),
AbiSpace(header.abi),
href = href,
name = name,
generics = *g,
decl = Function {
decl: d,
header_len,
indent,
asyncness: header.asyncness,
},
where_clause = WhereClause {
gens: g,
indent,
end_newline,
})
}
match item.inner {
clean::StrippedItem(..) => Ok(()),
clean::TyMethodItem(ref m) => {
method(w, item, m.header, &m.generics, &m.decl, link, parent)
}
clean::MethodItem(ref m) => {
method(w, item, m.header, &m.generics, &m.decl, link, parent)
}
clean::AssociatedConstItem(ref ty, ref default) => {
assoc_const(w, item, ty, default.as_ref(), link)
}
clean::AssociatedTypeItem(ref bounds, ref default) => {
assoc_type(w, item, bounds, default.as_ref(), link)
}
_ => panic!("render_assoc_item called on non-associated-item")
}
}
fn item_struct(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
s: &clean::Struct) -> fmt::Result {
wrap_into_docblock(w, |w| {
write!(w, "<pre class='rust struct'>")?;
render_attributes(w, it)?;
render_struct(w,
it,
Some(&s.generics),
s.struct_type,
&s.fields,
"",
true)?;
write!(w, "</pre>")
})?;
document(w, cx, it)?;
let mut fields = s.fields.iter().filter_map(|f| {
match f.inner {
clean::StructFieldItem(ref ty) => Some((f, ty)),
_ => None,
}
}).peekable();
if let doctree::Plain = s.struct_type {
if fields.peek().is_some() {
write!(w, "<h2 id='fields' class='fields small-section-header'>
Fields{}<a href='#fields' class='anchor'></a></h2>",
document_non_exhaustive_header(it))?;
document_non_exhaustive(w, it)?;
for (field, ty) in fields {
let id = cx.derive_id(format!("{}.{}",
ItemType::StructField,
field.name.as_ref().unwrap()));
let ns_id = cx.derive_id(format!("{}.{}",
field.name.as_ref().unwrap(),
ItemType::StructField.name_space()));
write!(w, "<span id=\"{id}\" class=\"{item_type} small-section-header\">\
<a href=\"#{id}\" class=\"anchor field\"></a>\
<code id=\"{ns_id}\">{name}: {ty}</code>\
</span>",
item_type = ItemType::StructField,
id = id,
ns_id = ns_id,
name = field.name.as_ref().unwrap(),
ty = ty)?;
document(w, cx, field)?;
}
}
}
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_union(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
s: &clean::Union) -> fmt::Result {
wrap_into_docblock(w, |w| {
write!(w, "<pre class='rust union'>")?;
render_attributes(w, it)?;
render_union(w,
it,
Some(&s.generics),
&s.fields,
"",
true)?;
write!(w, "</pre>")
})?;
document(w, cx, it)?;
let mut fields = s.fields.iter().filter_map(|f| {
match f.inner {
clean::StructFieldItem(ref ty) => Some((f, ty)),
_ => None,
}
}).peekable();
if fields.peek().is_some() {
write!(w, "<h2 id='fields' class='fields small-section-header'>
Fields<a href='#fields' class='anchor'></a></h2>")?;
for (field, ty) in fields {
let name = field.name.as_ref().expect("union field name");
let id = format!("{}.{}", ItemType::StructField, name);
write!(w, "<span id=\"{id}\" class=\"{shortty} small-section-header\">\
<a href=\"#{id}\" class=\"anchor field\"></a>\
<code>{name}: {ty}</code>\
</span>",
id = id,
name = name,
shortty = ItemType::StructField,
ty = ty)?;
if let Some(stability_class) = field.stability_class() {
write!(w, "<span class='stab {stab}'></span>",
stab = stability_class)?;
}
document(w, cx, field)?;
}
}
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_enum(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
e: &clean::Enum) -> fmt::Result {
wrap_into_docblock(w, |w| {
write!(w, "<pre class='rust enum'>")?;
render_attributes(w, it)?;
write!(w, "{}enum {}{}{}",
VisSpace(&it.visibility),
it.name.as_ref().unwrap(),
e.generics,
WhereClause { gens: &e.generics, indent: 0, end_newline: true })?;
if e.variants.is_empty() && !e.variants_stripped {
write!(w, " {{}}")?;
} else {
write!(w, " {{\n")?;
for v in &e.variants {
write!(w, " ")?;
let name = v.name.as_ref().unwrap();
match v.inner {
clean::VariantItem(ref var) => {
match var.kind {
clean::VariantKind::CLike => write!(w, "{}", name)?,
clean::VariantKind::Tuple(ref tys) => {
write!(w, "{}(", name)?;
for (i, ty) in tys.iter().enumerate() {
if i > 0 {
write!(w, ", ")?
}
write!(w, "{}", *ty)?;
}
write!(w, ")")?;
}
clean::VariantKind::Struct(ref s) => {
render_struct(w,
v,
None,
s.struct_type,
&s.fields,
" ",
false)?;
}
}
}
_ => unreachable!()
}
write!(w, ",\n")?;
}
if e.variants_stripped {
write!(w, " // some variants omitted\n")?;
}
write!(w, "}}")?;
}
write!(w, "</pre>")
})?;
document(w, cx, it)?;
if !e.variants.is_empty() {
write!(w, "<h2 id='variants' class='variants small-section-header'>
Variants{}<a href='#variants' class='anchor'></a></h2>\n",
document_non_exhaustive_header(it))?;
document_non_exhaustive(w, it)?;
for variant in &e.variants {
let id = cx.derive_id(format!("{}.{}",
ItemType::Variant,
variant.name.as_ref().unwrap()));
let ns_id = cx.derive_id(format!("{}.{}",
variant.name.as_ref().unwrap(),
ItemType::Variant.name_space()));
write!(w, "<span id=\"{id}\" class=\"variant small-section-header\">\
<a href=\"#{id}\" class=\"anchor field\"></a>\
<code id='{ns_id}'>{name}",
id = id,
ns_id = ns_id,
name = variant.name.as_ref().unwrap())?;
if let clean::VariantItem(ref var) = variant.inner {
if let clean::VariantKind::Tuple(ref tys) = var.kind {
write!(w, "(")?;
for (i, ty) in tys.iter().enumerate() {
if i > 0 {
write!(w, ", ")?;
}
write!(w, "{}", *ty)?;
}
write!(w, ")")?;
}
}
write!(w, "</code></span>")?;
document(w, cx, variant)?;
document_non_exhaustive(w, variant)?;
use crate::clean::{Variant, VariantKind};
if let clean::VariantItem(Variant {
kind: VariantKind::Struct(ref s)
}) = variant.inner {
let variant_id = cx.derive_id(format!("{}.{}.fields",
ItemType::Variant,
variant.name.as_ref().unwrap()));
write!(w, "<span class='autohide sub-variant' id='{id}'>",
id = variant_id)?;
write!(w, "<h3>Fields of <b>{name}</b></h3><div>",
name = variant.name.as_ref().unwrap())?;
for field in &s.fields {
use crate::clean::StructFieldItem;
if let StructFieldItem(ref ty) = field.inner {
let id = cx.derive_id(format!("variant.{}.field.{}",
variant.name.as_ref().unwrap(),
field.name.as_ref().unwrap()));
let ns_id = cx.derive_id(format!("{}.{}.{}.{}",
variant.name.as_ref().unwrap(),
ItemType::Variant.name_space(),
field.name.as_ref().unwrap(),
ItemType::StructField.name_space()));
write!(w, "<span id=\"{id}\" class=\"variant small-section-header\">\
<a href=\"#{id}\" class=\"anchor field\"></a>\
<code id='{ns_id}'>{f}: {t}\
</code></span>",
id = id,
ns_id = ns_id,
f = field.name.as_ref().unwrap(),
t = *ty)?;
document(w, cx, field)?;
}
}
write!(w, "</div></span>")?;
}
render_stability_since(w, variant, it)?;
}
}
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)?;
Ok(())
}
fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
let path = attr.path.to_string();
if attr.is_word() {
Some(path)
} else if let Some(v) = attr.value_str() {
Some(format!("{} = {:?}", path, v.as_str()))
} else if let Some(values) = attr.meta_item_list() {
let display: Vec<_> = values.iter().filter_map(|attr| {
attr.meta_item().and_then(|mi| render_attribute(mi))
}).collect();
if display.len() > 0 {
Some(format!("{}({})", path, display.join(", ")))
} else {
None
}
} else {
None
}
}
const ATTRIBUTE_WHITELIST: &'static [&'static str] = &[
"export_name",
"lang",
"link_section",
"must_use",
"no_mangle",
"repr",
"unsafe_destructor_blind_to_params",
"non_exhaustive"
];
fn render_attributes(w: &mut fmt::Formatter<'_>, it: &clean::Item) -> fmt::Result {
let mut attrs = String::new();
for attr in &it.attrs.other_attrs {
if !ATTRIBUTE_WHITELIST.contains(&attr.name_or_empty().get()) {
continue;
}
if let Some(s) = render_attribute(&attr.meta().unwrap()) {
attrs.push_str(&format!("#[{}]\n", s));
}
}
if attrs.len() > 0 {
write!(w, "<div class=\"docblock attributes\">{}</div>", &attrs)?;
}
Ok(())
}
fn render_struct(w: &mut fmt::Formatter<'_>, it: &clean::Item,
g: Option<&clean::Generics>,
ty: doctree::StructType,
fields: &[clean::Item],
tab: &str,
structhead: bool) -> fmt::Result {
write!(w, "{}{}{}",
VisSpace(&it.visibility),
if structhead {"struct "} else {""},
it.name.as_ref().unwrap())?;
if let Some(g) = g {
write!(w, "{}", g)?
}
match ty {
doctree::Plain => {
if let Some(g) = g {
write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: true })?
}
let mut has_visible_fields = false;
write!(w, " {{")?;
for field in fields {
if let clean::StructFieldItem(ref ty) = field.inner {
write!(w, "\n{} {}{}: {},",
tab,
VisSpace(&field.visibility),
field.name.as_ref().unwrap(),
*ty)?;
has_visible_fields = true;
}
}
if has_visible_fields {
if it.has_stripped_fields().unwrap() {
write!(w, "\n{} // some fields omitted", tab)?;
}
write!(w, "\n{}", tab)?;
} else if it.has_stripped_fields().unwrap() {
// If there are no visible fields we can just display
// `{ /* fields omitted */ }` to save space.
write!(w, " /* fields omitted */ ")?;
}
write!(w, "}}")?;
}
doctree::Tuple => {
write!(w, "(")?;
for (i, field) in fields.iter().enumerate() {
if i > 0 {
write!(w, ", ")?;
}
match field.inner {
clean::StrippedItem(box clean::StructFieldItem(..)) => {
write!(w, "_")?
}
clean::StructFieldItem(ref ty) => {
write!(w, "{}{}", VisSpace(&field.visibility), *ty)?
}
_ => unreachable!()
}
}
write!(w, ")")?;
if let Some(g) = g {
write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: false })?
}
write!(w, ";")?;
}
doctree::Unit => {
// Needed for PhantomData.
if let Some(g) = g {
write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: false })?
}
write!(w, ";")?;
}
}
Ok(())
}
fn render_union(w: &mut fmt::Formatter<'_>, it: &clean::Item,
g: Option<&clean::Generics>,
fields: &[clean::Item],
tab: &str,
structhead: bool) -> fmt::Result {
write!(w, "{}{}{}",
VisSpace(&it.visibility),
if structhead {"union "} else {""},
it.name.as_ref().unwrap())?;
if let Some(g) = g {
write!(w, "{}", g)?;
write!(w, "{}", WhereClause { gens: g, indent: 0, end_newline: true })?;
}
write!(w, " {{\n{}", tab)?;
for field in fields {
if let clean::StructFieldItem(ref ty) = field.inner {
write!(w, " {}{}: {},\n{}",
VisSpace(&field.visibility),
field.name.as_ref().unwrap(),
*ty,
tab)?;
}
}
if it.has_stripped_fields().unwrap() {
write!(w, " // some fields omitted\n{}", tab)?;
}
write!(w, "}}")?;
Ok(())
}
#[derive(Copy, Clone)]
enum AssocItemLink<'a> {
Anchor(Option<&'a str>),
GotoSource(DefId, &'a FxHashSet<String>),
}
impl<'a> AssocItemLink<'a> {
fn anchor(&self, id: &'a String) -> Self {
match *self {
AssocItemLink::Anchor(_) => { AssocItemLink::Anchor(Some(&id)) },
ref other => *other,
}
}
}
enum AssocItemRender<'a> {
All,
DerefFor { trait_: &'a clean::Type, type_: &'a clean::Type, deref_mut_: bool }
}
#[derive(Copy, Clone, PartialEq)]
enum RenderMode {
Normal,
ForDeref { mut_: bool },
}
fn render_assoc_items(w: &mut fmt::Formatter<'_>,
cx: &Context,
containing_item: &clean::Item,
it: DefId,
what: AssocItemRender<'_>) -> fmt::Result {
let c = cache();
let v = match c.impls.get(&it) {
Some(v) => v,
None => return Ok(()),
};
let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| {
i.inner_impl().trait_.is_none()
});
if !non_trait.is_empty() {
let render_mode = match what {
AssocItemRender::All => {
write!(w, "\
<h2 id='methods' class='small-section-header'>\
Methods<a href='#methods' class='anchor'></a>\
</h2>\
")?;
RenderMode::Normal
}
AssocItemRender::DerefFor { trait_, type_, deref_mut_ } => {
write!(w, "\
<h2 id='deref-methods' class='small-section-header'>\
Methods from {}<Target = {}>\
<a href='#deref-methods' class='anchor'></a>\
</h2>\
", trait_, type_)?;
RenderMode::ForDeref { mut_: deref_mut_ }
}
};
for i in &non_trait {
render_impl(w, cx, i, AssocItemLink::Anchor(None), render_mode,
containing_item.stable_since(), true, None)?;
}
}
if let AssocItemRender::DerefFor { .. } = what {
return Ok(());
}
if !traits.is_empty() {
let deref_impl = traits.iter().find(|t| {
t.inner_impl().trait_.def_id() == c.deref_trait_did
});
if let Some(impl_) = deref_impl {
let has_deref_mut = traits.iter().find(|t| {
t.inner_impl().trait_.def_id() == c.deref_mut_trait_did
}).is_some();
render_deref_methods(w, cx, impl_, containing_item, has_deref_mut)?;
}
let (synthetic, concrete): (Vec<&&Impl>, Vec<&&Impl>) = traits
.iter()
.partition(|t| t.inner_impl().synthetic);
let (blanket_impl, concrete) = concrete
.into_iter()
.partition(|t| t.inner_impl().blanket_impl.is_some());
struct RendererStruct<'a, 'b, 'c>(&'a Context, Vec<&'b &'b Impl>, &'c clean::Item);
impl<'a, 'b, 'c> fmt::Display for RendererStruct<'a, 'b, 'c> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
render_impls(self.0, fmt, &self.1, self.2)
}
}
let impls = RendererStruct(cx, concrete, containing_item).to_string();
if !impls.is_empty() {
write!(w, "\
<h2 id='implementations' class='small-section-header'>\
Trait Implementations<a href='#implementations' class='anchor'></a>\
</h2>\
<div id='implementations-list'>{}</div>", impls)?;
}
if !synthetic.is_empty() {
write!(w, "\
<h2 id='synthetic-implementations' class='small-section-header'>\
Auto Trait Implementations\
<a href='#synthetic-implementations' class='anchor'></a>\
</h2>\
<div id='synthetic-implementations-list'>\
")?;
render_impls(cx, w, &synthetic, containing_item)?;
write!(w, "</div>")?;
}
if !blanket_impl.is_empty() {
write!(w, "\
<h2 id='blanket-implementations' class='small-section-header'>\
Blanket Implementations\
<a href='#blanket-implementations' class='anchor'></a>\
</h2>\
<div id='blanket-implementations-list'>\
")?;
render_impls(cx, w, &blanket_impl, containing_item)?;
write!(w, "</div>")?;
}
}
Ok(())
}
fn render_deref_methods(w: &mut fmt::Formatter<'_>, cx: &Context, impl_: &Impl,
container_item: &clean::Item, deref_mut: bool) -> fmt::Result {
let deref_type = impl_.inner_impl().trait_.as_ref().unwrap();
let target = impl_.inner_impl().items.iter().filter_map(|item| {
match item.inner {
clean::TypedefItem(ref t, true) => Some(&t.type_),
_ => None,
}
}).next().expect("Expected associated type binding");
let what = AssocItemRender::DerefFor { trait_: deref_type, type_: target,
deref_mut_: deref_mut };
if let Some(did) = target.def_id() {
render_assoc_items(w, cx, container_item, did, what)
} else {
if let Some(prim) = target.primitive_type() {
if let Some(&did) = cache().primitive_locations.get(&prim) {
render_assoc_items(w, cx, container_item, did, what)?;
}
}
Ok(())
}
}
fn should_render_item(item: &clean::Item, deref_mut_: bool) -> bool {
let self_type_opt = match item.inner {
clean::MethodItem(ref method) => method.decl.self_type(),
clean::TyMethodItem(ref method) => method.decl.self_type(),
_ => None
};
if let Some(self_ty) = self_type_opt {
let (by_mut_ref, by_box, by_value) = match self_ty {
SelfTy::SelfBorrowed(_, mutability) |
SelfTy::SelfExplicit(clean::BorrowedRef { mutability, .. }) => {
(mutability == Mutability::Mutable, false, false)
},
SelfTy::SelfExplicit(clean::ResolvedPath { did, .. }) => {
(false, Some(did) == cache().owned_box_did, false)
},
SelfTy::SelfValue => (false, false, true),
_ => (false, false, false),
};
(deref_mut_ || !by_mut_ref) && !by_box && !by_value
} else {
false
}
}
fn render_spotlight_traits(item: &clean::Item) -> Result<String, fmt::Error> {
let mut out = String::new();
match item.inner {
clean::FunctionItem(clean::Function { ref decl, .. }) |
clean::TyMethodItem(clean::TyMethod { ref decl, .. }) |
clean::MethodItem(clean::Method { ref decl, .. }) |
clean::ForeignFunctionItem(clean::Function { ref decl, .. }) => {
out = spotlight_decl(decl)?;
}
_ => {}
}
Ok(out)
}
fn spotlight_decl(decl: &clean::FnDecl) -> Result<String, fmt::Error> {
let mut out = String::new();
let mut trait_ = String::new();
if let Some(did) = decl.output.def_id() {
let c = cache();
if let Some(impls) = c.impls.get(&did) {
for i in impls {
let impl_ = i.inner_impl();
if impl_.trait_.def_id().map_or(false, |d| c.traits[&d].is_spotlight) {
if out.is_empty() {
out.push_str(
&format!("<h3 class=\"important\">Important traits for {}</h3>\
<code class=\"content\">",
impl_.for_));
trait_.push_str(&impl_.for_.to_string());
}
//use the "where" class here to make it small
out.push_str(&format!("<span class=\"where fmt-newline\">{}</span>", impl_));
let t_did = impl_.trait_.def_id().unwrap();
for it in &impl_.items {
if let clean::TypedefItem(ref tydef, _) = it.inner {
out.push_str("<span class=\"where fmt-newline\"> ");
assoc_type(&mut out, it, &[],
Some(&tydef.type_),
AssocItemLink::GotoSource(t_did, &FxHashSet::default()))?;
out.push_str(";</span>");
}
}
}
}
}
}
if !out.is_empty() {
out.insert_str(0, &format!("<div class=\"important-traits\"><div class='tooltip'>ⓘ\
<span class='tooltiptext'>Important traits for {}</span></div>\
<div class=\"content hidden\">",
trait_));
out.push_str("</code></div></div>");
}
Ok(out)
}
fn render_impl(w: &mut fmt::Formatter<'_>, cx: &Context, i: &Impl, link: AssocItemLink<'_>,
render_mode: RenderMode, outer_version: Option<&str>,
show_def_docs: bool, use_absolute: Option<bool>) -> fmt::Result {
if render_mode == RenderMode::Normal {
let id = cx.derive_id(match i.inner_impl().trait_ {
Some(ref t) => format!("impl-{}", small_url_encode(&format!("{:#}", t))),
None => "impl".to_string(),
});
if let Some(use_absolute) = use_absolute {
write!(w, "<h3 id='{}' class='impl'><code class='in-band'>", id)?;
fmt_impl_for_trait_page(&i.inner_impl(), w, use_absolute)?;
if show_def_docs {
for it in &i.inner_impl().items {
if let clean::TypedefItem(ref tydef, _) = it.inner {
write!(w, "<span class=\"where fmt-newline\"> ")?;
assoc_type(w, it, &vec![], Some(&tydef.type_),
AssocItemLink::Anchor(None))?;
write!(w, ";</span>")?;
}
}
}
write!(w, "</code>")?;
} else {
write!(w, "<h3 id='{}' class='impl'><code class='in-band'>{}</code>",
id, i.inner_impl()
)?;
}
write!(w, "<a href='#{}' class='anchor'></a>", id)?;
let since = i.impl_item.stability.as_ref().map(|s| &s.since[..]);
render_stability_since_raw(w, since, outer_version)?;
if let Some(l) = (Item { item: &i.impl_item, cx: cx }).src_href() {
write!(w, "<a class='srclink' href='{}' title='{}'>[src]</a>",
l, "goto source code")?;
}
write!(w, "</h3>")?;
if let Some(ref dox) = cx.shared.maybe_collapsed_doc_value(&i.impl_item) {
let mut ids = cx.id_map.borrow_mut();
write!(w, "<div class='docblock'>{}</div>",
Markdown(&*dox, &i.impl_item.links(), RefCell::new(&mut ids), cx.codes))?;
}
}
fn doc_impl_item(w: &mut fmt::Formatter<'_>, cx: &Context, item: &clean::Item,
link: AssocItemLink<'_>, render_mode: RenderMode,
is_default_item: bool, outer_version: Option<&str>,
trait_: Option<&clean::Trait>, show_def_docs: bool) -> fmt::Result {
let item_type = item.type_();
let name = item.name.as_ref().unwrap();
let render_method_item: bool = match render_mode {
RenderMode::Normal => true,
RenderMode::ForDeref { mut_: deref_mut_ } => should_render_item(&item, deref_mut_),
};
let (is_hidden, extra_class) = if trait_.is_none() ||
item.doc_value().is_some() ||
item.inner.is_associated() {
(false, "")
} else {
(true, " hidden")
};
match item.inner {
clean::MethodItem(clean::Method { ref decl, .. }) |
clean::TyMethodItem(clean::TyMethod { ref decl, .. }) => {
// Only render when the method is not static or we allow static methods
if render_method_item {
let id = cx.derive_id(format!("{}.{}", item_type, name));
let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
write!(w, "{}", spotlight_decl(decl)?)?;
write!(w, "<code id='{}'>", ns_id)?;
render_assoc_item(w, item, link.anchor(&id), ItemType::Impl)?;
write!(w, "</code>")?;
render_stability_since_raw(w, item.stable_since(), outer_version)?;
if let Some(l) = (Item { cx, item }).src_href() {
write!(w, "<a class='srclink' href='{}' title='{}'>[src]</a>",
l, "goto source code")?;
}
write!(w, "</h4>")?;
}
}
clean::TypedefItem(ref tydef, _) => {
let id = cx.derive_id(format!("{}.{}", ItemType::AssociatedType, name));
let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
write!(w, "<code id='{}'>", ns_id)?;
assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link.anchor(&id))?;
write!(w, "</code></h4>")?;
}
clean::AssociatedConstItem(ref ty, ref default) => {
let id = cx.derive_id(format!("{}.{}", item_type, name));
let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
write!(w, "<code id='{}'>", ns_id)?;
assoc_const(w, item, ty, default.as_ref(), link.anchor(&id))?;
write!(w, "</code>")?;
render_stability_since_raw(w, item.stable_since(), outer_version)?;
if let Some(l) = (Item { cx, item }).src_href() {
write!(w, "<a class='srclink' href='{}' title='{}'>[src]</a>",
l, "goto source code")?;
}
write!(w, "</h4>")?;
}
clean::AssociatedTypeItem(ref bounds, ref default) => {
let id = cx.derive_id(format!("{}.{}", item_type, name));
let ns_id = cx.derive_id(format!("{}.{}", name, item_type.name_space()));
write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class)?;
write!(w, "<code id='{}'>", ns_id)?;
assoc_type(w, item, bounds, default.as_ref(), link.anchor(&id))?;
write!(w, "</code></h4>")?;
}
clean::StrippedItem(..) => return Ok(()),
_ => panic!("can't make docs for trait item with name {:?}", item.name)
}
if render_method_item || render_mode == RenderMode::Normal {
if !is_default_item {
if let Some(t) = trait_ {
// The trait item may have been stripped so we might not
// find any documentation or stability for it.
if let Some(it) = t.items.iter().find(|i| i.name == item.name) {
// We need the stability of the item from the trait
// because impls can't have a stability.
document_stability(w, cx, it, is_hidden)?;
if item.doc_value().is_some() {
document_full(w, item, cx, "", is_hidden)?;
} else if show_def_docs {
// In case the item isn't documented,
// provide short documentation from the trait.
document_short(w, cx, it, link, "", is_hidden)?;
}
}
} else {
document_stability(w, cx, item, is_hidden)?;
if show_def_docs {
document_full(w, item, cx, "", is_hidden)?;
}
}
} else {
document_stability(w, cx, item, is_hidden)?;
if show_def_docs {
document_short(w, cx, item, link, "", is_hidden)?;
}
}
}
Ok(())
}
let traits = &cache().traits;
let trait_ = i.trait_did().map(|did| &traits[&did]);
write!(w, "<div class='impl-items'>")?;
for trait_item in &i.inner_impl().items {
doc_impl_item(w, cx, trait_item, link, render_mode,
false, outer_version, trait_, show_def_docs)?;
}
fn render_default_items(w: &mut fmt::Formatter<'_>,
cx: &Context,
t: &clean::Trait,
i: &clean::Impl,
render_mode: RenderMode,
outer_version: Option<&str>,
show_def_docs: bool) -> fmt::Result {
for trait_item in &t.items {
let n = trait_item.name.clone();
if i.items.iter().find(|m| m.name == n).is_some() {
continue;
}
let did = i.trait_.as_ref().unwrap().def_id().unwrap();
let assoc_link = AssocItemLink::GotoSource(did, &i.provided_trait_methods);
doc_impl_item(w, cx, trait_item, assoc_link, render_mode, true,
outer_version, None, show_def_docs)?;
}
Ok(())
}
// If we've implemented a trait, then also emit documentation for all
// default items which weren't overridden in the implementation block.
if let Some(t) = trait_ {
render_default_items(w, cx, t, &i.inner_impl(),
render_mode, outer_version, show_def_docs)?;
}
write!(w, "</div>")?;
Ok(())
}
fn item_existential(
w: &mut fmt::Formatter<'_>,
cx: &Context,
it: &clean::Item,
t: &clean::Existential,
) -> fmt::Result {
write!(w, "<pre class='rust existential'>")?;
render_attributes(w, it)?;
write!(w, "existential type {}{}{where_clause}: {bounds};</pre>",
it.name.as_ref().unwrap(),
t.generics,
where_clause = WhereClause { gens: &t.generics, indent: 0, end_newline: true },
bounds = bounds(&t.bounds, false))?;
document(w, cx, it)?;
// Render any items associated directly to this alias, as otherwise they
// won't be visible anywhere in the docs. It would be nice to also show
// associated items from the aliased type (see discussion in #32077), but
// we need #14072 to make sense of the generics.
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_trait_alias(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
t: &clean::TraitAlias) -> fmt::Result {
write!(w, "<pre class='rust trait-alias'>")?;
render_attributes(w, it)?;
write!(w, "trait {}{}{} = {};</pre>",
it.name.as_ref().unwrap(),
t.generics,
WhereClause { gens: &t.generics, indent: 0, end_newline: true },
bounds(&t.bounds, true))?;
document(w, cx, it)?;
// Render any items associated directly to this alias, as otherwise they
// won't be visible anywhere in the docs. It would be nice to also show
// associated items from the aliased type (see discussion in #32077), but
// we need #14072 to make sense of the generics.
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_typedef(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
t: &clean::Typedef) -> fmt::Result {
write!(w, "<pre class='rust typedef'>")?;
render_attributes(w, it)?;
write!(w, "type {}{}{where_clause} = {type_};</pre>",
it.name.as_ref().unwrap(),
t.generics,
where_clause = WhereClause { gens: &t.generics, indent: 0, end_newline: true },
type_ = t.type_)?;
document(w, cx, it)?;
// Render any items associated directly to this alias, as otherwise they
// won't be visible anywhere in the docs. It would be nice to also show
// associated items from the aliased type (see discussion in #32077), but
// we need #14072 to make sense of the generics.
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_foreign_type(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item) -> fmt::Result {
writeln!(w, "<pre class='rust foreigntype'>extern {{")?;
render_attributes(w, it)?;
write!(
w,
" {}type {};\n}}</pre>",
VisSpace(&it.visibility),
it.name.as_ref().unwrap(),
)?;
document(w, cx, it)?;
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
impl<'a> fmt::Display for Sidebar<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let cx = self.cx;
let it = self.item;
let parentlen = cx.current.len() - if it.is_mod() {1} else {0};
if it.is_struct() || it.is_trait() || it.is_primitive() || it.is_union()
|| it.is_enum() || it.is_mod() || it.is_typedef() {
write!(fmt, "<p class='location'>{}{}</p>",
match it.inner {
clean::StructItem(..) => "Struct ",
clean::TraitItem(..) => "Trait ",
clean::PrimitiveItem(..) => "Primitive Type ",
clean::UnionItem(..) => "Union ",
clean::EnumItem(..) => "Enum ",
clean::TypedefItem(..) => "Type Definition ",
clean::ForeignTypeItem => "Foreign Type ",
clean::ModuleItem(..) => if it.is_crate() {
"Crate "
} else {
"Module "
},
_ => "",
},
it.name.as_ref().unwrap())?;
}
if it.is_crate() {
if let Some(ref version) = cache().crate_version {
write!(fmt,
"<div class='block version'>\
<p>Version {}</p>\
</div>",
version)?;
}
}
write!(fmt, "<div class=\"sidebar-elems\">")?;
if it.is_crate() {
write!(fmt, "<a id='all-types' href='all.html'><p>See all {}'s items</p></a>",
it.name.as_ref().expect("crates always have a name"))?;
}
match it.inner {
clean::StructItem(ref s) => sidebar_struct(fmt, it, s)?,
clean::TraitItem(ref t) => sidebar_trait(fmt, it, t)?,
clean::PrimitiveItem(ref p) => sidebar_primitive(fmt, it, p)?,
clean::UnionItem(ref u) => sidebar_union(fmt, it, u)?,
clean::EnumItem(ref e) => sidebar_enum(fmt, it, e)?,
clean::TypedefItem(ref t, _) => sidebar_typedef(fmt, it, t)?,
clean::ModuleItem(ref m) => sidebar_module(fmt, it, &m.items)?,
clean::ForeignTypeItem => sidebar_foreign_type(fmt, it)?,
_ => (),
}
// The sidebar is designed to display sibling functions, modules and
// other miscellaneous information. since there are lots of sibling
// items (and that causes quadratic growth in large modules),
// we refactor common parts into a shared JavaScript file per module.
// still, we don't move everything into JS because we want to preserve
// as much HTML as possible in order to allow non-JS-enabled browsers
// to navigate the documentation (though slightly inefficiently).
write!(fmt, "<p class='location'>")?;
for (i, name) in cx.current.iter().take(parentlen).enumerate() {
if i > 0 {
write!(fmt, "::<wbr>")?;
}
write!(fmt, "<a href='{}index.html'>{}</a>",
&cx.root_path()[..(cx.current.len() - i - 1) * 3],
*name)?;
}
write!(fmt, "</p>")?;
// Sidebar refers to the enclosing module, not this module.
let relpath = if it.is_mod() { "../" } else { "" };
write!(fmt,
"<script>window.sidebarCurrent = {{\
name: '{name}', \
ty: '{ty}', \
relpath: '{path}'\
}};</script>",
name = it.name.as_ref().map(|x| &x[..]).unwrap_or(""),
ty = it.type_().css_class(),
path = relpath)?;
if parentlen == 0 {
// There is no sidebar-items.js beyond the crate root path
// FIXME maybe dynamic crate loading can be merged here
} else {
write!(fmt, "<script defer src=\"{path}sidebar-items.js\"></script>",
path = relpath)?;
}
// Closes sidebar-elems div.
write!(fmt, "</div>")?;
Ok(())
}
}
fn get_next_url(used_links: &mut FxHashSet<String>, url: String) -> String {
if used_links.insert(url.clone()) {
return url;
}
let mut add = 1;
while used_links.insert(format!("{}-{}", url, add)) == false {
add += 1;
}
format!("{}-{}", url, add)
}
fn get_methods(
i: &clean::Impl,
for_deref: bool,
used_links: &mut FxHashSet<String>,
) -> Vec<String> {
i.items.iter().filter_map(|item| {
match item.name {
// Maybe check with clean::Visibility::Public as well?
Some(ref name) if !name.is_empty() && item.visibility.is_some() && item.is_method() => {
if !for_deref || should_render_item(item, false) {
Some(format!("<a href=\"#{}\">{}</a>",
get_next_url(used_links, format!("method.{}", name)),
name))
} else {
None
}
}
_ => None,
}
}).collect::<Vec<_>>()
}
// The point is to url encode any potential character from a type with genericity.
fn small_url_encode(s: &str) -> String {
s.replace("<", "%3C")
.replace(">", "%3E")
.replace(" ", "%20")
.replace("?", "%3F")
.replace("'", "%27")
.replace("&", "%26")
.replace(",", "%2C")
.replace(":", "%3A")
.replace(";", "%3B")
.replace("[", "%5B")
.replace("]", "%5D")
.replace("\"", "%22")
}
fn sidebar_assoc_items(it: &clean::Item) -> String {
let mut out = String::new();
let c = cache();
if let Some(v) = c.impls.get(&it.def_id) {
let mut used_links = FxHashSet::default();
{
let used_links_bor = Rc::new(RefCell::new(&mut used_links));
let mut ret = v.iter()
.filter(|i| i.inner_impl().trait_.is_none())
.flat_map(move |i| get_methods(i.inner_impl(),
false,
&mut used_links_bor.borrow_mut()))
.collect::<Vec<_>>();
// We want links' order to be reproducible so we don't use unstable sort.
ret.sort();
if !ret.is_empty() {
out.push_str(&format!("<a class=\"sidebar-title\" href=\"#methods\">Methods\
</a><div class=\"sidebar-links\">{}</div>", ret.join("")));
}
}
if v.iter().any(|i| i.inner_impl().trait_.is_some()) {
if let Some(impl_) = v.iter()
.filter(|i| i.inner_impl().trait_.is_some())
.find(|i| i.inner_impl().trait_.def_id() == c.deref_trait_did) {
if let Some(target) = impl_.inner_impl().items.iter().filter_map(|item| {
match item.inner {
clean::TypedefItem(ref t, true) => Some(&t.type_),
_ => None,
}
}).next() {
let inner_impl = target.def_id().or(target.primitive_type().and_then(|prim| {
c.primitive_locations.get(&prim).cloned()
})).and_then(|did| c.impls.get(&did));
if let Some(impls) = inner_impl {
out.push_str("<a class=\"sidebar-title\" href=\"#deref-methods\">");
out.push_str(&format!("Methods from {}<Target={}>",
Escape(&format!("{:#}",
impl_.inner_impl().trait_.as_ref().unwrap())),
Escape(&format!("{:#}", target))));
out.push_str("</a>");
let mut ret = impls.iter()
.filter(|i| i.inner_impl().trait_.is_none())
.flat_map(|i| get_methods(i.inner_impl(),
true,
&mut used_links))
.collect::<Vec<_>>();
// We want links' order to be reproducible so we don't use unstable sort.
ret.sort();
if !ret.is_empty() {
out.push_str(&format!("<div class=\"sidebar-links\">{}</div>",
ret.join("")));
}
}
}
}
let format_impls = |impls: Vec<&Impl>| {
let mut links = FxHashSet::default();
let mut ret = impls.iter()
.filter_map(|i| {
let is_negative_impl = is_negative_impl(i.inner_impl());
if let Some(ref i) = i.inner_impl().trait_ {
let i_display = format!("{:#}", i);
let out = Escape(&i_display);
let encoded = small_url_encode(&format!("{:#}", i));
let generated = format!("<a href=\"#impl-{}\">{}{}</a>",
encoded,
if is_negative_impl { "!" } else { "" },
out);
if links.insert(generated.clone()) {
Some(generated)
} else {
None
}
} else {
None
}
})
.collect::<Vec<String>>();
ret.sort();
ret.join("")
};
let (synthetic, concrete): (Vec<&Impl>, Vec<&Impl>) = v
.iter()
.partition::<Vec<_>, _>(|i| i.inner_impl().synthetic);
let (blanket_impl, concrete): (Vec<&Impl>, Vec<&Impl>) = concrete
.into_iter()
.partition::<Vec<_>, _>(|i| i.inner_impl().blanket_impl.is_some());
let concrete_format = format_impls(concrete);
let synthetic_format = format_impls(synthetic);
let blanket_format = format_impls(blanket_impl);
if !concrete_format.is_empty() {
out.push_str("<a class=\"sidebar-title\" href=\"#implementations\">\
Trait Implementations</a>");
out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", concrete_format));
}
if !synthetic_format.is_empty() {
out.push_str("<a class=\"sidebar-title\" href=\"#synthetic-implementations\">\
Auto Trait Implementations</a>");
out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", synthetic_format));
}
if !blanket_format.is_empty() {
out.push_str("<a class=\"sidebar-title\" href=\"#blanket-implementations\">\
Blanket Implementations</a>");
out.push_str(&format!("<div class=\"sidebar-links\">{}</div>", blanket_format));
}
}
}
out
}
fn sidebar_struct(fmt: &mut fmt::Formatter<'_>, it: &clean::Item,
s: &clean::Struct) -> fmt::Result {
let mut sidebar = String::new();
let fields = get_struct_fields_name(&s.fields);
if !fields.is_empty() {
if let doctree::Plain = s.struct_type {
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#fields\">Fields</a>\
<div class=\"sidebar-links\">{}</div>", fields));
}
}
sidebar.push_str(&sidebar_assoc_items(it));
if !sidebar.is_empty() {
write!(fmt, "<div class=\"block items\">{}</div>", sidebar)?;
}
Ok(())
}
fn extract_for_impl_name(item: &clean::Item) -> Option<(String, String)> {
match item.inner {
clean::ItemEnum::ImplItem(ref i) => {
if let Some(ref trait_) = i.trait_ {
Some((format!("{:#}", i.for_), format!("{:#}", trait_)))
} else {
None
}
},
_ => None,
}
}
fn is_negative_impl(i: &clean::Impl) -> bool {
i.polarity == Some(clean::ImplPolarity::Negative)
}
fn sidebar_trait(fmt: &mut fmt::Formatter<'_>, it: &clean::Item,
t: &clean::Trait) -> fmt::Result {
let mut sidebar = String::new();
let types = t.items
.iter()
.filter_map(|m| {
match m.name {
Some(ref name) if m.is_associated_type() => {
Some(format!("<a href=\"#associatedtype.{name}\">{name}</a>",
name=name))
}
_ => None,
}
})
.collect::<String>();
let consts = t.items
.iter()
.filter_map(|m| {
match m.name {
Some(ref name) if m.is_associated_const() => {
Some(format!("<a href=\"#associatedconstant.{name}\">{name}</a>",
name=name))
}
_ => None,
}
})
.collect::<String>();
let mut required = t.items
.iter()
.filter_map(|m| {
match m.name {
Some(ref name) if m.is_ty_method() => {
Some(format!("<a href=\"#tymethod.{name}\">{name}</a>",
name=name))
}
_ => None,
}
})
.collect::<Vec<String>>();
let mut provided = t.items
.iter()
.filter_map(|m| {
match m.name {
Some(ref name) if m.is_method() => {
Some(format!("<a href=\"#method.{0}\">{0}</a>", name))
}
_ => None,
}
})
.collect::<Vec<String>>();
if !types.is_empty() {
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#associated-types\">\
Associated Types</a><div class=\"sidebar-links\">{}</div>",
types));
}
if !consts.is_empty() {
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#associated-const\">\
Associated Constants</a><div class=\"sidebar-links\">{}</div>",
consts));
}
if !required.is_empty() {
required.sort();
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#required-methods\">\
Required Methods</a><div class=\"sidebar-links\">{}</div>",
required.join("")));
}
if !provided.is_empty() {
provided.sort();
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#provided-methods\">\
Provided Methods</a><div class=\"sidebar-links\">{}</div>",
provided.join("")));
}
let c = cache();
if let Some(implementors) = c.implementors.get(&it.def_id) {
let mut res = implementors.iter()
.filter(|i| i.inner_impl().for_.def_id()
.map_or(false, |d| !c.paths.contains_key(&d)))
.filter_map(|i| {
match extract_for_impl_name(&i.impl_item) {
Some((ref name, ref url)) => {
Some(format!("<a href=\"#impl-{}\">{}</a>",
small_url_encode(url),
Escape(name)))
}
_ => None,
}
})
.collect::<Vec<String>>();
if !res.is_empty() {
res.sort();
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#foreign-impls\">\
Implementations on Foreign Types</a><div \
class=\"sidebar-links\">{}</div>",
res.join("")));
}
}
sidebar.push_str("<a class=\"sidebar-title\" href=\"#implementors\">Implementors</a>");
if t.auto {
sidebar.push_str("<a class=\"sidebar-title\" \
href=\"#synthetic-implementors\">Auto Implementors</a>");
}
sidebar.push_str(&sidebar_assoc_items(it));
write!(fmt, "<div class=\"block items\">{}</div>", sidebar)
}
fn sidebar_primitive(fmt: &mut fmt::Formatter<'_>, it: &clean::Item,
_p: &clean::PrimitiveType) -> fmt::Result {
let sidebar = sidebar_assoc_items(it);
if !sidebar.is_empty() {
write!(fmt, "<div class=\"block items\">{}</div>", sidebar)?;
}
Ok(())
}
fn sidebar_typedef(fmt: &mut fmt::Formatter<'_>, it: &clean::Item,
_t: &clean::Typedef) -> fmt::Result {
let sidebar = sidebar_assoc_items(it);
if !sidebar.is_empty() {
write!(fmt, "<div class=\"block items\">{}</div>", sidebar)?;
}
Ok(())
}
fn get_struct_fields_name(fields: &[clean::Item]) -> String {
fields.iter()
.filter(|f| if let clean::StructFieldItem(..) = f.inner {
true
} else {
false
})
.filter_map(|f| match f.name {
Some(ref name) => Some(format!("<a href=\"#structfield.{name}\">\
{name}</a>", name=name)),
_ => None,
})
.collect()
}
fn sidebar_union(fmt: &mut fmt::Formatter<'_>, it: &clean::Item,
u: &clean::Union) -> fmt::Result {
let mut sidebar = String::new();
let fields = get_struct_fields_name(&u.fields);
if !fields.is_empty() {
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#fields\">Fields</a>\
<div class=\"sidebar-links\">{}</div>", fields));
}
sidebar.push_str(&sidebar_assoc_items(it));
if !sidebar.is_empty() {
write!(fmt, "<div class=\"block items\">{}</div>", sidebar)?;
}
Ok(())
}
fn sidebar_enum(fmt: &mut fmt::Formatter<'_>, it: &clean::Item,
e: &clean::Enum) -> fmt::Result {
let mut sidebar = String::new();
let variants = e.variants.iter()
.filter_map(|v| match v.name {
Some(ref name) => Some(format!("<a href=\"#variant.{name}\">{name}\
</a>", name = name)),
_ => None,
})
.collect::<String>();
if !variants.is_empty() {
sidebar.push_str(&format!("<a class=\"sidebar-title\" href=\"#variants\">Variants</a>\
<div class=\"sidebar-links\">{}</div>", variants));
}
sidebar.push_str(&sidebar_assoc_items(it));
if !sidebar.is_empty() {
write!(fmt, "<div class=\"block items\">{}</div>", sidebar)?;
}
Ok(())
}
fn item_ty_to_strs(ty: &ItemType) -> (&'static str, &'static str) {
match *ty {
ItemType::ExternCrate |
ItemType::Import => ("reexports", "Re-exports"),
ItemType::Module => ("modules", "Modules"),
ItemType::Struct => ("structs", "Structs"),
ItemType::Union => ("unions", "Unions"),
ItemType::Enum => ("enums", "Enums"),
ItemType::Function => ("functions", "Functions"),
ItemType::Typedef => ("types", "Type Definitions"),
ItemType::Static => ("statics", "Statics"),
ItemType::Constant => ("constants", "Constants"),
ItemType::Trait => ("traits", "Traits"),
ItemType::Impl => ("impls", "Implementations"),
ItemType::TyMethod => ("tymethods", "Type Methods"),
ItemType::Method => ("methods", "Methods"),
ItemType::StructField => ("fields", "Struct Fields"),
ItemType::Variant => ("variants", "Variants"),
ItemType::Macro => ("macros", "Macros"),
ItemType::Primitive => ("primitives", "Primitive Types"),
ItemType::AssociatedType => ("associated-types", "Associated Types"),
ItemType::AssociatedConst => ("associated-consts", "Associated Constants"),
ItemType::ForeignType => ("foreign-types", "Foreign Types"),
ItemType::Keyword => ("keywords", "Keywords"),
ItemType::Existential => ("existentials", "Existentials"),
ItemType::ProcAttribute => ("attributes", "Attribute Macros"),
ItemType::ProcDerive => ("derives", "Derive Macros"),
ItemType::TraitAlias => ("trait-aliases", "Trait aliases"),
}
}
fn sidebar_module(fmt: &mut fmt::Formatter<'_>, _it: &clean::Item,
items: &[clean::Item]) -> fmt::Result {
let mut sidebar = String::new();
if items.iter().any(|it| it.type_() == ItemType::ExternCrate ||
it.type_() == ItemType::Import) {
sidebar.push_str(&format!("<li><a href=\"#{id}\">{name}</a></li>",
id = "reexports",
name = "Re-exports"));
}
// ordering taken from item_module, reorder, where it prioritized elements in a certain order
// to print its headings
for &myty in &[ItemType::Primitive, ItemType::Module, ItemType::Macro, ItemType::Struct,
ItemType::Enum, ItemType::Constant, ItemType::Static, ItemType::Trait,
ItemType::Function, ItemType::Typedef, ItemType::Union, ItemType::Impl,
ItemType::TyMethod, ItemType::Method, ItemType::StructField, ItemType::Variant,
ItemType::AssociatedType, ItemType::AssociatedConst, ItemType::ForeignType] {
if items.iter().any(|it| !it.is_stripped() && it.type_() == myty) {
let (short, name) = item_ty_to_strs(&myty);
sidebar.push_str(&format!("<li><a href=\"#{id}\">{name}</a></li>",
id = short,
name = name));
}
}
if !sidebar.is_empty() {
write!(fmt, "<div class=\"block items\"><ul>{}</ul></div>", sidebar)?;
}
Ok(())
}
fn sidebar_foreign_type(fmt: &mut fmt::Formatter<'_>, it: &clean::Item) -> fmt::Result {
let sidebar = sidebar_assoc_items(it);
if !sidebar.is_empty() {
write!(fmt, "<div class=\"block items\">{}</div>", sidebar)?;
}
Ok(())
}
impl<'a> fmt::Display for Source<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let Source(s) = *self;
let lines = s.lines().count();
let mut cols = 0;
let mut tmp = lines;
while tmp > 0 {
cols += 1;
tmp /= 10;
}
write!(fmt, "<pre class=\"line-numbers\">")?;
for i in 1..=lines {
write!(fmt, "<span id=\"{0}\">{0:1$}</span>\n", i, cols)?;
}
write!(fmt, "</pre>")?;
write!(fmt, "{}",
highlight::render_with_highlighting(s, None, None, None))?;
Ok(())
}
}
fn item_macro(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item,
t: &clean::Macro) -> fmt::Result {
wrap_into_docblock(w, |w| {
w.write_str(&highlight::render_with_highlighting(&t.source,
Some("macro"),
None,
None))
})?;
document(w, cx, it)
}
fn item_proc_macro(w: &mut fmt::Formatter<'_>, cx: &Context, it: &clean::Item, m: &clean::ProcMacro)
-> fmt::Result
{
let name = it.name.as_ref().expect("proc-macros always have names");
match m.kind {
MacroKind::Bang => {
write!(w, "<pre class='rust macro'>")?;
write!(w, "{}!() {{ /* proc-macro */ }}", name)?;
write!(w, "</pre>")?;
}
MacroKind::Attr => {
write!(w, "<pre class='rust attr'>")?;
write!(w, "#[{}]", name)?;
write!(w, "</pre>")?;
}
MacroKind::Derive => {
write!(w, "<pre class='rust derive'>")?;
write!(w, "#[derive({})]", name)?;
if !m.helpers.is_empty() {
writeln!(w, "\n{{")?;
writeln!(w, " // Attributes available to this derive:")?;
for attr in &m.helpers {
writeln!(w, " #[{}]", attr)?;
}
write!(w, "}}")?;
}
write!(w, "</pre>")?;
}
_ => {}
}
document(w, cx, it)
}
fn item_primitive(w: &mut fmt::Formatter<'_>, cx: &Context,
it: &clean::Item,
_p: &clean::PrimitiveType) -> fmt::Result {
document(w, cx, it)?;
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
}
fn item_keyword(w: &mut fmt::Formatter<'_>, cx: &Context,
it: &clean::Item,
_p: &str) -> fmt::Result {
document(w, cx, it)
}
const BASIC_KEYWORDS: &'static str = "rust, rustlang, rust-lang";
fn make_item_keywords(it: &clean::Item) -> String {
format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap())
}
fn get_index_search_type(item: &clean::Item) -> Option<IndexItemFunctionType> {
let (all_types, ret_types) = match item.inner {
clean::FunctionItem(ref f) => (&f.all_types, &f.ret_types),
clean::MethodItem(ref m) => (&m.all_types, &m.ret_types),
clean::TyMethodItem(ref m) => (&m.all_types, &m.ret_types),
_ => return None,
};
let inputs = all_types.iter().map(|arg| {
get_index_type(&arg)
}).filter(|a| a.name.is_some()).collect();
let output = ret_types.iter().map(|arg| {
get_index_type(&arg)
}).filter(|a| a.name.is_some()).collect::<Vec<_>>();
let output = if output.is_empty() {
None
} else {
Some(output)
};
Some(IndexItemFunctionType { inputs, output })
}
fn get_index_type(clean_type: &clean::Type) -> Type {
let t = Type {
name: get_index_type_name(clean_type, true).map(|s| s.to_ascii_lowercase()),
generics: get_generics(clean_type),
};
t
}
/// Returns a list of all paths used in the type.
/// This is used to help deduplicate imported impls
/// for reexported types. If any of the contained
/// types are re-exported, we don't use the corresponding
/// entry from the js file, as inlining will have already
/// picked up the impl
fn collect_paths_for_type(first_ty: clean::Type) -> Vec<String> {
let mut out = Vec::new();
let mut visited = FxHashSet::default();
let mut work = VecDeque::new();
let cache = cache();
work.push_back(first_ty);
while let Some(ty) = work.pop_front() {
if !visited.insert(ty.clone()) {
continue;
}
match ty {
clean::Type::ResolvedPath { did, .. } => {
let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone());
let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern);
match fqp {
Some(path) => {
out.push(path.join("::"));
},
_ => {}
};
},
clean::Type::Tuple(tys) => {
work.extend(tys.into_iter());
},
clean::Type::Slice(ty) => {
work.push_back(*ty);
}
clean::Type::Array(ty, _) => {
work.push_back(*ty);
},
clean::Type::Unique(ty) => {
work.push_back(*ty);
},
clean::Type::RawPointer(_, ty) => {
work.push_back(*ty);
},
clean::Type::BorrowedRef { type_, .. } => {
work.push_back(*type_);
},
clean::Type::QPath { self_type, trait_, .. } => {
work.push_back(*self_type);
work.push_back(*trait_);
},
_ => {}
}
};
out
}
fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option<String> {
match *clean_type {
clean::ResolvedPath { ref path, .. } => {
let segments = &path.segments;
let path_segment = segments.into_iter().last().unwrap_or_else(|| panic!(
"get_index_type_name(clean_type: {:?}, accept_generic: {:?}) had length zero path",
clean_type, accept_generic
));
Some(path_segment.name.clone())
}
clean::Generic(ref s) if accept_generic => Some(s.clone()),
clean::Primitive(ref p) => Some(format!("{:?}", p)),
clean::BorrowedRef { ref type_, .. } => get_index_type_name(type_, accept_generic),
// FIXME: add all from clean::Type.
_ => None
}
}
fn get_generics(clean_type: &clean::Type) -> Option<Vec<String>> {
clean_type.generics()
.and_then(|types| {
let r = types.iter()
.filter_map(|t| get_index_type_name(t, false))
.map(|s| s.to_ascii_lowercase())
.collect::<Vec<_>>();
if r.is_empty() {
None
} else {
Some(r)
}
})
}
pub fn cache() -> Arc<Cache> {
CACHE_KEY.with(|c| c.borrow().clone())
}
#[cfg(test)]
#[test]
fn test_name_key() {
assert_eq!(name_key("0"), ("", 0, 1));
assert_eq!(name_key("123"), ("", 123, 0));
assert_eq!(name_key("Fruit"), ("Fruit", 0, 0));
assert_eq!(name_key("Fruit0"), ("Fruit", 0, 1));
assert_eq!(name_key("Fruit0000"), ("Fruit", 0, 4));
assert_eq!(name_key("Fruit01"), ("Fruit", 1, 1));
assert_eq!(name_key("Fruit10"), ("Fruit", 10, 0));
assert_eq!(name_key("Fruit123"), ("Fruit", 123, 0));
}
#[cfg(test)]
#[test]
fn test_name_sorting() {
let names = ["Apple",
"Banana",
"Fruit", "Fruit0", "Fruit00",
"Fruit1", "Fruit01",
"Fruit2", "Fruit02",
"Fruit20",
"Fruit30x",
"Fruit100",
"Pear"];
let mut sorted = names.to_owned();
sorted.sort_by_key(|&s| name_key(s));
assert_eq!(names, sorted);
}
|
Source
|
exec.sql.go
|
// Code generated by sqlc. DO NOT EDIT.
// source: exec.sql
package querytest
import (
"context"
)
const advisoryLockExec = `-- name: AdvisoryLockExec :exec
SELECT pg_advisory_lock($1)
`
func (q *Queries) AdvisoryLockExec(ctx context.Context, pgAdvisoryLock int64) error {
_, err := q.db.ExecContext(ctx, advisoryLockExec, pgAdvisoryLock)
return err
}
const advisoryLockExecRows = `-- name: AdvisoryLockExecRows :execrows
SELECT pg_advisory_lock($1)
`
func (q *Queries) AdvisoryLockExecRows(ctx context.Context, pgAdvisoryLock int64) (int64, error) {
result, err := q.db.ExecContext(ctx, advisoryLockExecRows, pgAdvisoryLock)
if err != nil
|
return result.RowsAffected()
}
|
{
return 0, err
}
|
embeddings.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras import constraints
from tensorflow.python.keras._impl.keras import initializers
from tensorflow.python.keras._impl.keras import regularizers
from tensorflow.python.keras._impl.keras.engine import Layer
from tensorflow.python.keras._impl.keras.utils import tf_utils
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export('keras.layers.Embedding')
class Embedding(Layer):
"""Turns positive integers (indexes) into dense vectors of fixed size.
eg. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]
This layer can only be used as the first layer in a model.
Example:
```python
model = Sequential()
model.add(Embedding(1000, 64, input_length=10))
# the model will take as input an integer matrix of size (batch,
input_length).
# the largest integer (i.e. word index) in the input should be no larger
than 999 (vocabulary size).
# now model.output_shape == (None, 10, 64), where None is the batch
dimension.
input_array = np.random.randint(1000, size=(32, 10))
model.compile('rmsprop', 'mse')
output_array = model.predict(input_array)
assert output_array.shape == (32, 10, 64)
```
Arguments:
input_dim: int > 0. Size of the vocabulary,
i.e. maximum integer index + 1.
output_dim: int >= 0. Dimension of the dense embedding.
embeddings_initializer: Initializer for the `embeddings` matrix.
embeddings_regularizer: Regularizer function applied to
the `embeddings` matrix.
embeddings_constraint: Constraint function applied to
the `embeddings` matrix.
mask_zero: Whether or not the input value 0 is a special "padding"
value that should be masked out.
This is useful when using recurrent layers
which may take variable length input.
If this is `True` then all subsequent layers
in the model need to support masking or an exception will be raised.
If mask_zero is set to True, as a consequence, index 0 cannot be
used in the vocabulary (input_dim should equal size of
vocabulary + 1).
input_length: Length of input sequences, when it is constant.
This argument is required if you are going to connect
`Flatten` then `Dense` layers upstream
(without it, the shape of the dense outputs cannot be computed).
Input shape:
2D tensor with shape: `(batch_size, sequence_length)`.
Output shape:
3D tensor with shape: `(batch_size, sequence_length, output_dim)`.
"""
def __init__(self,
input_dim,
output_dim,
embeddings_initializer='uniform',
embeddings_regularizer=None,
activity_regularizer=None,
embeddings_constraint=None,
mask_zero=False,
input_length=None,
**kwargs):
if 'input_shape' not in kwargs:
if input_length:
kwargs['input_shape'] = (input_length,)
else:
kwargs['input_shape'] = (None,)
dtype = kwargs.pop('dtype', K.floatx())
super(Embedding, self).__init__(dtype=dtype, **kwargs)
self.input_dim = input_dim
self.output_dim = output_dim
self.embeddings_initializer = initializers.get(embeddings_initializer)
self.embeddings_regularizer = regularizers.get(embeddings_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.embeddings_constraint = constraints.get(embeddings_constraint)
self.mask_zero = mask_zero
self.input_length = input_length
@tf_utils.shape_type_conversion
def build(self, input_shape):
self.embeddings = self.add_weight(
shape=(self.input_dim, self.output_dim),
initializer=self.embeddings_initializer,
name='embeddings',
regularizer=self.embeddings_regularizer,
constraint=self.embeddings_constraint)
self.built = True
def compute_mask(self, inputs, mask=None):
if not self.mask_zero:
return None
else:
return math_ops.not_equal(inputs, 0)
@tf_utils.shape_type_conversion
def compute_output_shape(self, input_shape):
if self.input_length is None:
return input_shape + (self.output_dim,)
else:
# input_length can be tuple if input is 3D or higher
if isinstance(self.input_length, (list, tuple)):
in_lens = list(self.input_length)
else:
in_lens = [self.input_length]
if len(in_lens) != len(input_shape) - 1:
ValueError('"input_length" is %s, but received input has shape %s' %
(str(self.input_length), str(input_shape)))
else:
for i, (s1, s2) in enumerate(zip(in_lens, input_shape[1:])):
if s1 is not None and s2 is not None and s1 != s2:
ValueError('"input_length" is %s, but received input has shape %s' %
(str(self.input_length), str(input_shape)))
elif s1 is None:
in_lens[i] = s2
return (input_shape[0],) + tuple(in_lens) + (self.output_dim,)
def call(self, inputs):
dtype = K.dtype(inputs)
if dtype != 'int32' and dtype != 'int64':
inputs = math_ops.cast(inputs, 'int32')
out = embedding_ops.embedding_lookup(self.embeddings, inputs)
return out
def get_config(self):
config = {
'input_dim':
self.input_dim,
'output_dim':
self.output_dim,
'embeddings_initializer':
initializers.serialize(self.embeddings_initializer),
'embeddings_regularizer':
regularizers.serialize(self.embeddings_regularizer),
'activity_regularizer':
regularizers.serialize(self.activity_regularizer),
'embeddings_constraint':
constraints.serialize(self.embeddings_constraint),
'mask_zero':
self.mask_zero,
'input_length':
self.input_length
}
base_config = super(Embedding, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
|
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Embedding layer.
|
block.rs
|
// Copyright (c) 2018-2021 The MobileCoin Foundation
use crate::{
tx::{TxOut, TxOutMembershipElement},
BlockContents, BlockContentsHash, BlockID, BlockVersion,
};
use mc_crypto_digestible::{DigestTranscript, Digestible, MerlinTranscript};
use prost::Message;
use serde::{Deserialize, Serialize};
/// The maximum supported block format version for this build of
/// mc-transaction-core
pub const MAX_BLOCK_VERSION: BlockVersion = BlockVersion::MAX;
/// The index of a block in the blockchain.
pub type BlockIndex = u64;
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Digestible, Message)]
/// A block of transactions in the blockchain.
pub struct Block {
/// Block ID.
#[prost(message, required, tag = "1")]
pub id: BlockID,
/// Block format version.
#[prost(uint32, tag = "2")]
pub version: u32,
/// Id of the previous block.
#[prost(message, required, tag = "3")]
pub parent_id: BlockID,
/// The index of this block in the blockchain.
#[prost(uint64, tag = "4")]
pub index: BlockIndex,
/// The cumulative number of Txos in the blockchain, including this block.
#[prost(uint64, tag = "5")]
pub cumulative_txo_count: u64,
/// Root hash of the membership proofs provided by the untrusted local
/// system for validation. This captures the state of all TxOuts in the
/// ledger that this block was validated against.
#[prost(message, required, tag = "6")]
pub root_element: TxOutMembershipElement,
/// Hash of the block's contents.
#[prost(message, required, tag = "7")]
pub contents_hash: BlockContentsHash,
}
impl Block {
/// Creates the origin block.
///
/// # Arguments
/// * `outputs` - Outputs "minted" by the origin block.
pub fn new_origin_block(outputs: &[TxOut]) -> Self
|
/// Creates a new `Block` intermediate in the block chain, from a parent
/// block Adds 1 to the parent.index, and adds
/// block_contents.outputs.len() to the parent.cumulative_txo_count, to
/// compute values for the next block.
///
/// # Arguments
/// * `version` - The block format version
/// * `parent` - The parent block
/// * `root_element` - The root element for membership proofs
/// * `block_contents - The Contents of the block.
pub fn new_with_parent(
version: BlockVersion,
parent: &Block,
root_element: &TxOutMembershipElement,
block_contents: &BlockContents,
) -> Self {
Block::new(
version,
&parent.id,
parent.index + 1,
parent.cumulative_txo_count + block_contents.outputs.len() as u64,
root_element,
block_contents,
)
}
/// Creates a new `Block`.
/// This low-level version doesn't require having the parent block in hand,
/// and takes all needed metadata for the block header as input.
///
/// # Arguments
/// * `version` - The block format version.
/// * `parent_id` - `BlockID` of previous block in the blockchain.
/// * `index` - The index of this block in the blockchain.
/// * `cumulative_txo_count` - The cumulative txo count *including this
/// block*
/// * `root_element` - The root element for membership proofs
/// * `block_contents` - Contents of the block.
pub fn new(
version: BlockVersion,
parent_id: &BlockID,
index: BlockIndex,
cumulative_txo_count: u64,
root_element: &TxOutMembershipElement,
block_contents: &BlockContents,
) -> Self {
let contents_hash = block_contents.hash();
let id = compute_block_id(
*version,
parent_id,
index,
cumulative_txo_count,
root_element,
&contents_hash,
);
Self {
id,
version: *version,
parent_id: parent_id.clone(),
index,
cumulative_txo_count,
root_element: root_element.clone(),
contents_hash,
}
}
/// Checks if the block's ID is valid for the block.
/// A block constructed with `new` will be valid by virtue of `calling
/// compute_block_id` on construction. However, when converting between
/// different block representations, you need to validate that the contents
/// of the converted structure is valid.
pub fn is_block_id_valid(&self) -> bool {
let expected_id = compute_block_id(
self.version,
&self.parent_id,
self.index,
self.cumulative_txo_count,
&self.root_element,
&self.contents_hash,
);
self.id == expected_id
}
}
/// Computes the BlockID by hashing the contents of a block.
///
/// The identifier of a block is the result of hashing everything inside a block
/// except the `id` field.
pub fn compute_block_id(
version: u32,
parent_id: &BlockID,
index: BlockIndex,
cumulative_txo_count: u64,
root_element: &TxOutMembershipElement,
contents_hash: &BlockContentsHash,
) -> BlockID {
let mut transcript = MerlinTranscript::new(b"mobilecoin-block-id");
version.append_to_transcript(b"version", &mut transcript);
parent_id.append_to_transcript(b"parent_id", &mut transcript);
index.append_to_transcript(b"index", &mut transcript);
cumulative_txo_count.append_to_transcript(b"cumulative_txo_count", &mut transcript);
root_element.append_to_transcript(b"root_element", &mut transcript);
contents_hash.append_to_transcript(b"contents_hash", &mut transcript);
let mut result = [0u8; 32];
transcript.extract_digest(&mut result);
BlockID(result)
}
#[cfg(test)]
mod block_tests {
use crate::{
encrypted_fog_hint::EncryptedFogHint,
membership_proofs::Range,
ring_signature::KeyImage,
tokens::Mob,
tx::{TxOut, TxOutMembershipElement, TxOutMembershipHash},
Amount, Block, BlockContents, BlockContentsHash, BlockID, BlockVersion, Token,
};
use alloc::vec::Vec;
use core::convert::TryFrom;
use mc_account_keys::AccountKey;
use mc_crypto_keys::RistrettoPrivate;
use mc_util_from_random::FromRandom;
use rand::{rngs::StdRng, CryptoRng, RngCore, SeedableRng};
const BLOCK_VERSION: BlockVersion = BlockVersion::ONE;
fn get_block_contents<RNG: CryptoRng + RngCore>(rng: &mut RNG) -> BlockContents {
let (key_images, outputs) = get_key_images_and_outputs(rng);
BlockContents {
key_images,
outputs,
..Default::default()
}
}
fn get_key_images_and_outputs<RNG: CryptoRng + RngCore>(
rng: &mut RNG,
) -> (Vec<KeyImage>, Vec<TxOut>) {
let recipient = AccountKey::random(rng);
let outputs: Vec<TxOut> = (0..8)
.map(|_i| {
let mut result = TxOut::new(
Amount {
value: rng.next_u64(),
token_id: Mob::ID,
},
&recipient.default_subaddress(),
&RistrettoPrivate::from_random(rng),
EncryptedFogHint::fake_onetime_hint(rng),
)
.unwrap();
result.masked_amount.masked_token_id = Default::default();
result
})
.collect();
let key_images = vec![
KeyImage::from(rng.next_u64()),
KeyImage::from(rng.next_u64()),
KeyImage::from(rng.next_u64()),
];
(key_images, outputs)
}
fn get_block<RNG: CryptoRng + RngCore>(rng: &mut RNG) -> Block {
let bytes = [14u8; 32];
let parent_id = BlockID::try_from(&bytes[..]).unwrap();
let root_element = TxOutMembershipElement {
range: Range::new(0, 15).unwrap(),
hash: TxOutMembershipHash::from([0u8; 32]),
};
let block_contents = get_block_contents(rng);
Block::new(
BLOCK_VERSION,
&parent_id,
3,
400,
&root_element,
&block_contents,
)
}
fn get_block_with_no_memo<RNG: CryptoRng + RngCore>(rng: &mut RNG) -> Block {
let bytes = [14u8; 32];
let parent_id = BlockID::try_from(&bytes[..]).unwrap();
let root_element = TxOutMembershipElement {
range: Range::new(0, 15).unwrap(),
hash: TxOutMembershipHash::from([0u8; 32]),
};
let (key_images, mut outputs) = get_key_images_and_outputs(rng);
for ref mut output in outputs.iter_mut() {
output.e_memo = None;
}
let block_contents = BlockContents {
key_images,
outputs,
..Default::default()
};
Block::new(
BLOCK_VERSION,
&parent_id,
3,
400,
&root_element,
&block_contents,
)
}
#[test]
/// The block returned by `get_block` should have a valid BlockID.
fn test_get_block_has_valid_id() {
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
let block = get_block(&mut rng);
assert!(block.is_block_id_valid());
}
#[test]
/// The block ID should depend on the block version.
fn test_block_id_includes_version() {
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
let mut block = get_block(&mut rng);
block.version += 1;
assert!(!block.is_block_id_valid());
}
#[test]
/// The block ID should depend on the parent_id.
fn test_block_id_includes_parent_id() {
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
let mut block = get_block(&mut rng);
let mut bytes = [0u8; 32];
rng.fill_bytes(&mut bytes);
let wrong_parent_id = BlockID(bytes);
block.parent_id = wrong_parent_id;
assert!(!block.is_block_id_valid());
}
#[test]
/// The block ID should depend on the block's index.
fn test_block_id_includes_block_index() {
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
let mut block = get_block(&mut rng);
block.index += 1;
assert!(!block.is_block_id_valid());
}
#[test]
/// The block ID should depend on the root element.
fn test_block_id_includes_root_element() {
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
let mut block = get_block(&mut rng);
let wrong_root_element = TxOutMembershipElement {
range: Range::new(13, 17).unwrap(),
hash: Default::default(),
};
block.root_element = wrong_root_element;
assert!(!block.is_block_id_valid());
}
#[test]
/// The block ID should depend on the content_hash.
fn test_block_id_includes_content_hash() {
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
let mut block = get_block(&mut rng);
let mut bytes = [0u8; 32];
rng.fill_bytes(&mut bytes);
let wrong_content_hash = BlockContentsHash(bytes);
block.contents_hash = wrong_content_hash;
assert!(!block.is_block_id_valid());
}
#[test]
#[ignore]
// TODO: Block::new should return an error if `tx_hashes` contains duplicates.
fn test_block_errors_on_duplicate_tx_hashes() {
unimplemented!()
}
#[test]
/// The block ID and block contents hash do not change as the code evolves.
/// This test was written by writing a failed assert and then copying the
/// actual block id into the test. This should hopefully catches cases where
/// we add/change Block/BlockContents and accidentally break id
/// calculation of old blocks.
fn test_hashing_is_consistent() {
let mut rng: StdRng = SeedableRng::from_seed([1u8; 32]);
//Check hash with memo
let block = get_block(&mut rng);
assert_eq!(
block.id.as_ref(),
&[
118, 205, 187, 34, 207, 104, 52, 137, 97, 124, 79, 205, 112, 204, 146, 217, 128,
178, 169, 214, 231, 120, 46, 237, 17, 93, 59, 136, 101, 131, 197, 217
]
);
let block_contents = get_block_contents(&mut rng);
assert_eq!(
block_contents.hash().as_ref(),
&[
130, 252, 161, 182, 34, 248, 219, 175, 99, 76, 204, 54, 204, 35, 147, 41, 168, 222,
68, 11, 76, 106, 243, 173, 136, 27, 208, 27, 85, 199, 193, 241
]
);
//Check hash without memo
let block_with_no_memo = get_block_with_no_memo(&mut rng);
assert_eq!(
block_with_no_memo.id.as_ref(),
&[
243, 102, 219, 76, 169, 151, 159, 65, 84, 34, 178, 32, 207, 95, 133, 127, 68, 161,
140, 254, 120, 243, 90, 232, 156, 40, 132, 101, 203, 160, 12, 159
]
);
assert_eq!(
block_with_no_memo.contents_hash.as_ref(),
&[
69, 203, 184, 52, 204, 228, 5, 91, 161, 228, 220, 116, 182, 23, 169, 32, 76, 104,
121, 186, 195, 20, 142, 138, 69, 155, 193, 215, 226, 117, 134, 74
]
);
}
}
|
{
let version = 0; // The origin block is always 0
let parent_id = BlockID::default();
let index: BlockIndex = 0;
let cumulative_txo_count = outputs.len() as u64;
let root_element = TxOutMembershipElement::default();
// The origin block does not contain anything but TxOuts.
let block_contents = BlockContents {
outputs: outputs.to_vec(),
..Default::default()
};
let contents_hash = block_contents.hash();
let id = compute_block_id(
version,
&parent_id,
index,
cumulative_txo_count,
&root_element,
&contents_hash,
);
Self {
id,
version,
parent_id,
index,
cumulative_txo_count,
root_element,
contents_hash,
}
}
|
main.rs
|
use actix_web::{error, get, web, App, HttpResponse, HttpServer};
use config::ConfigError;
use deadpool_postgres::{Client, Pool, PoolError};
use dotenv::dotenv;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
#[derive(Debug, Deserialize)]
struct Config {
listen: String,
pg: deadpool_postgres::Config,
}
|
impl Config {
fn from_env() -> Result<Self, ConfigError> {
let mut cfg = ::config::Config::new();
cfg.merge(::config::Environment::new())?;
cfg.try_into()
}
}
#[derive(Serialize, Deserialize)]
struct Event {
id: Uuid,
title: String,
}
#[derive(failure::Fail, Debug)]
enum Error {
#[fail(display = "An internal error occured. Please try again later.")]
PoolError(PoolError),
}
impl From<PoolError> for Error {
fn from(error: PoolError) -> Self {
Self::PoolError(error)
}
}
impl error::ResponseError for Error {}
async fn event_list(pool: &Pool) -> Result<Vec<Event>, PoolError> {
let client: Client = pool.get().await?;
let stmt = client.prepare("SELECT id, title FROM event").await?;
let rows = client.query(&stmt, &[]).await?;
Ok(rows
.into_iter()
.map(|row| Event {
id: row.get(0),
title: row.get(1),
})
.collect())
}
#[get("/v1.0/event.list")]
async fn index(db_pool: web::Data<Pool>) -> Result<HttpResponse, Error> {
let events = event_list(&db_pool).await?;
Ok(HttpResponse::Ok().json(events))
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
dotenv().ok();
let config = Config::from_env().unwrap();
let pool = config.pg.create_pool(tokio_postgres::NoTls).unwrap();
let server = HttpServer::new(move || App::new().data(pool.clone()).service(index))
.bind(&config.listen)?
.run();
println!("Server running at http://{}/", &config.listen);
println!(
"Try the following URLs: http://{}/v1.0/event.list",
&config.listen,
);
server.await
}
| |
fr.js
|
/*
Copyright (c) 2003-2018, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
|
Ntilde:"N caron majuscule",Ograve:"O accent grave majuscule",Oacute:"O accent aigu majuscule",Ocirc:"O accent circonflexe majuscule",Otilde:"O caron majuscule",Ouml:"O tréma majuscule",times:"Symbole de multiplication",Oslash:"O barré majuscule",Ugrave:"U accent grave majuscule",Uacute:"U accent aigu majuscule",Ucirc:"U accent circonflexe majuscule",Uuml:"U tréma majuscule",Yacute:"Y accent aigu majuscule",THORN:"Lettre islandaise thorn majuscule",szlig:"Lettre minuscule allemande S dur",agrave:"A accent grave minuscule",
aacute:"A accent aigu minuscule",acirc:"A accent circonflexe minuscule",atilde:"A tilde minuscule",auml:"A tréma minuscule",aring:"A rond minuscule",aelig:"Ligature Æ minuscule",ccedil:"C cédille minuscule",egrave:"E accent grave minuscule",eacute:"E accent aigu minuscule",ecirc:"E accent circonflexe minuscule",euml:"E tréma minuscule",igrave:"I accent grave minuscule",iacute:"I accent aigu minuscule",icirc:"I accent circonflexe minuscule",iuml:"i minuscule tréma",eth:"Lettre minuscule islandaise ED",
ntilde:"N caron minuscule",ograve:"O minuscule accent grave",oacute:"O accent aigu minuscule",ocirc:"O accent circonflexe minuscule",otilde:"O tilde minuscule",ouml:"O tréma minuscule",divide:"Symbole de division",oslash:"O barré minuscule",ugrave:"U accent grave minuscule",uacute:"U accent aigu minuscule",ucirc:"U accent circonflexe minuscule",uuml:"U tréma minuscule",yacute:"Y accent aigu minuscule",thorn:"Lettre islandaise thorn minuscule",yuml:"Y tréma minuscule",OElig:"Ligature Œ majuscule",
oelig:"Ligature Œ minuscule",372:"W accent circonflexe majuscule",374:"Y accent circonflexe majuscule",373:"W accent circonflexe minuscule",375:"Y accent circonflexe minuscule",sbquo:"Guillemet simple fermant inférieur",8219:"Guillemet-virgule supérieur culbuté",bdquo:"Guillemet-virgule double inférieur",hellip:"Points de suspension",trade:"Symbole de marque commerciale",9658:"Flèche noire pointant vers la droite",bull:"Gros point médian",rarr:"Flèche vers la droite",rArr:"Double flèche vers la droite",
hArr:"Double flèche vers la gauche",diams:"Losange noir",asymp:"Environ égal"});
|
CKEDITOR.plugins.setLang("specialchar","fr",{euro:"Symbole euro",lsquo:"Guillemet simple ouvrant",rsquo:"Guillemet simple fermant",ldquo:"Guillemet double ouvrant",rdquo:"Guillemet double fermant",ndash:"Tiret demi-cadratin",mdash:"Tiret cadratin",iexcl:"Point d'exclamation inversé",cent:"Symbole cent",pound:"Symbole Livre sterling",curren:"Symbole monétaire",yen:"Symbole yen",brvbar:"Barre verticale scindée",sect:"Signe de section",uml:"Tréma",copy:"Symbole Copyright",ordf:"Indicateur ordinal féminin",
laquo:"Guillemet français ouvrant",not:"Crochet de négation",reg:"Symbole de marque déposée",macr:"Macron",deg:"Symbole degré",sup2:"Exposant 2",sup3:"Exposant 3",acute:"Accent aigu",micro:"Symbole micro",para:"Symbole pied-de-mouche",middot:"Point médian",cedil:"Cédille",sup1:"Exposant 1",ordm:"Indicateur ordinal masculin",raquo:"Guillemet français fermant",frac14:"Fraction un quart",frac12:"Fraction un demi",frac34:"Fraction trois quarts",iquest:"Point d'interrogation inversé",Agrave:"A accent grave majuscule",
Aacute:"A accent aigu majuscule",Acirc:"A accent circonflexe majuscule",Atilde:"A caron majuscule",Auml:"A tréma majuscule",Aring:"A rond majuscule",AElig:"Ligature Æ majuscule",Ccedil:"C cédille majuscule",Egrave:"E accent grave majuscule",Eacute:"E accent aigu majuscule",Ecirc:"E accent circonflexe majuscule",Euml:"E tréma majuscule",Igrave:"I accent grave majuscule",Iacute:"I accent aigu majuscule",Icirc:"I accent circonflexe majuscule",Iuml:"I tréma majuscule",ETH:"Lettre majuscule islandaise ED",
|
main.go
|
// Copyright 2016-2018, Pulumi Corporation.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"github.com/pulumi/pulumi-terraform/pkg/tfbridge"
influxdb "github.com/pulumi/pulumi-influxdb"
"github.com/pulumi/pulumi-influxdb/pkg/version"
)
func
|
() {
// Modify the path to point to the new provider
tfbridge.Main("influxdb", version.Version, influxdb.Provider())
}
|
main
|
api.xpack.ml.delete_model_snapshot.go
|
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V. licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
//
// Code generated from specification version 7.7.0: DO NOT EDIT
package esapi
import (
"context"
"net/http"
"strings"
)
func newMLDeleteModelSnapshotFunc(t Transport) MLDeleteModelSnapshot
|
// ----- API Definition -------------------------------------------------------
// MLDeleteModelSnapshot - Deletes an existing model snapshot.
//
// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html.
//
type MLDeleteModelSnapshot func(snapshot_id string, job_id string, o ...func(*MLDeleteModelSnapshotRequest)) (*Response, error)
// MLDeleteModelSnapshotRequest configures the ML Delete Model Snapshot API request.
//
type MLDeleteModelSnapshotRequest struct {
JobID string
SnapshotID string
Pretty bool
Human bool
ErrorTrace bool
FilterPath []string
Header http.Header
ctx context.Context
}
// Do executes the request and returns response or error.
//
func (r MLDeleteModelSnapshotRequest) Do(ctx context.Context, transport Transport) (*Response, error) {
var (
method string
path strings.Builder
params map[string]string
)
method = "DELETE"
path.Grow(1 + len("_ml") + 1 + len("anomaly_detectors") + 1 + len(r.JobID) + 1 + len("model_snapshots") + 1 + len(r.SnapshotID))
path.WriteString("/")
path.WriteString("_ml")
path.WriteString("/")
path.WriteString("anomaly_detectors")
path.WriteString("/")
path.WriteString(r.JobID)
path.WriteString("/")
path.WriteString("model_snapshots")
path.WriteString("/")
path.WriteString(r.SnapshotID)
params = make(map[string]string)
if r.Pretty {
params["pretty"] = "true"
}
if r.Human {
params["human"] = "true"
}
if r.ErrorTrace {
params["error_trace"] = "true"
}
if len(r.FilterPath) > 0 {
params["filter_path"] = strings.Join(r.FilterPath, ",")
}
req, err := newRequest(method, path.String(), nil)
if err != nil {
return nil, err
}
if len(params) > 0 {
q := req.URL.Query()
for k, v := range params {
q.Set(k, v)
}
req.URL.RawQuery = q.Encode()
}
if len(r.Header) > 0 {
if len(req.Header) == 0 {
req.Header = r.Header
} else {
for k, vv := range r.Header {
for _, v := range vv {
req.Header.Add(k, v)
}
}
}
}
if ctx != nil {
req = req.WithContext(ctx)
}
res, err := transport.Perform(req)
if err != nil {
return nil, err
}
response := Response{
StatusCode: res.StatusCode,
Body: res.Body,
Header: res.Header,
}
return &response, nil
}
// WithContext sets the request context.
//
func (f MLDeleteModelSnapshot) WithContext(v context.Context) func(*MLDeleteModelSnapshotRequest) {
return func(r *MLDeleteModelSnapshotRequest) {
r.ctx = v
}
}
// WithPretty makes the response body pretty-printed.
//
func (f MLDeleteModelSnapshot) WithPretty() func(*MLDeleteModelSnapshotRequest) {
return func(r *MLDeleteModelSnapshotRequest) {
r.Pretty = true
}
}
// WithHuman makes statistical values human-readable.
//
func (f MLDeleteModelSnapshot) WithHuman() func(*MLDeleteModelSnapshotRequest) {
return func(r *MLDeleteModelSnapshotRequest) {
r.Human = true
}
}
// WithErrorTrace includes the stack trace for errors in the response body.
//
func (f MLDeleteModelSnapshot) WithErrorTrace() func(*MLDeleteModelSnapshotRequest) {
return func(r *MLDeleteModelSnapshotRequest) {
r.ErrorTrace = true
}
}
// WithFilterPath filters the properties of the response body.
//
func (f MLDeleteModelSnapshot) WithFilterPath(v ...string) func(*MLDeleteModelSnapshotRequest) {
return func(r *MLDeleteModelSnapshotRequest) {
r.FilterPath = v
}
}
// WithHeader adds the headers to the HTTP request.
//
func (f MLDeleteModelSnapshot) WithHeader(h map[string]string) func(*MLDeleteModelSnapshotRequest) {
return func(r *MLDeleteModelSnapshotRequest) {
if r.Header == nil {
r.Header = make(http.Header)
}
for k, v := range h {
r.Header.Add(k, v)
}
}
}
// WithOpaqueID adds the X-Opaque-Id header to the HTTP request.
//
func (f MLDeleteModelSnapshot) WithOpaqueID(s string) func(*MLDeleteModelSnapshotRequest) {
return func(r *MLDeleteModelSnapshotRequest) {
if r.Header == nil {
r.Header = make(http.Header)
}
r.Header.Set("X-Opaque-Id", s)
}
}
|
{
return func(snapshot_id string, job_id string, o ...func(*MLDeleteModelSnapshotRequest)) (*Response, error) {
var r = MLDeleteModelSnapshotRequest{SnapshotID: snapshot_id, JobID: job_id}
for _, f := range o {
f(&r)
}
return r.Do(r.ctx, t)
}
}
|
masterchef_apr_fetcher.py
|
from abc import abstractmethod
from .apr_fetcher import APRFetcher
from typing import Dict, List, Union, Any
from .dapp_apr_fetcher import DappAPRFetcher
from .utils.utils import (
calculate_lp_token_price,
get_block_average_time,
get_token_price_from_dexs,
open_contract,
usdt_address,
platform_name_mapping,
decimals_mapping,
symbol_mapping
)
class MasterchefAPRFetcher(DappAPRFetcher):
"""
Interface for data-fetching based APR fetcher
"""
@abstractmethod
def masterchef_address(self):
raise NotImplementedError()
@abstractmethod
def dapp_token_address_field(self):
raise NotImplementedError()
@abstractmethod
def dapp_token_per_block_or_per_second_field(self, per_block: bool) -> str:
raise NotImplementedError()
@abstractmethod
def _total_staked(self, pool_info):
raise NotImplementedError()
@abstractmethod
def _pool_address(self, pool_info):
raise NotImplementedError()
@abstractmethod
def _alloc_point(self, pool_info):
raise NotImplementedError()
def dapp_token_address(self, web3) -> str:
masterchef_contract = open_contract(self._web3, self._blockchain, self.masterchef_address())
return getattr(masterchef_contract.functions, self.dapp_token_address_field())().call()
def dapp_pools_infos(self, web3) -> List[Dict[str, Union[str, float]]]:
masterchef_contract = open_contract(self._web3, self._blockchain, self.masterchef_address())
d = []
for i in range(masterchef_contract.functions.poolLength().call()):
pool_info = masterchef_contract.functions.poolInfo(i).call()
d.append({
"total_staked": self._total_staked(i, pool_info),
"pool_address": self._pool_address(i, pool_info),
"alloc_point": self._alloc_point(i, pool_info),
})
return d
def dapp_token_per_year(self, web3) -> float:
field_per_second = self.dapp_token_per_block_or_per_second_field(per_block=False)
masterchef_contract = open_contract(self._web3, self._blockchain, self.masterchef_address())
token_contract = open_contract(web3, self._blockchain, self.dapp_token_address(web3))
decimals = token_contract.functions.decimals().call()
if field_per_second is None or field_per_second == "":
average_time_per_block_seconds = get_block_average_time(web3, span=100)
block_per_seconds = 1.0 / average_time_per_block_seconds
block_per_year = block_per_seconds * 3600 * 24 * 365
token_per_block = getattr(masterchef_contract.functions, self.dapp_token_per_block_field(per_block=True))().call()
annual_token_emission = block_per_year * (token_per_block/(10**decimals))
else:
annual_token_emission = getattr(masterchef_contract.functions, field_per_second)().call() * 10**(-decimals) * 3600 * 24 * 365
|
return total_alloc
def dapp_token_price(self, web3) -> float:
return get_token_price_from_dexs(web3, self._blockchain, self.dapp_token_address(web3))
|
return annual_token_emission
def dapp_token_total_alloc(self, web3) -> int:
total_alloc = sum([p["alloc_point"] for p in self.dapp_pools_infos(web3)])
|
accessTypes.py
|
"""
The roseguarden project
Copyright (C) 2018-2020 Marcus Drobisch,
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
"""
__authors__ = ["Marcus Drobisch"]
__contact__ = "[email protected]"
__credits__ = []
__license__ = "GPLv3"
from enum import Enum
class SpaceAccessType(Enum):
NO_ACCESS = "No access"
UNLIMITED = "Unlimited"
USER_BUDGET = "User Budget (days)"
GROUP_BUDGET = "Group Budget (days)"
AUTO_CHARGED_MONTHLY_BUDGET = "Auto-charged budget (monthly)"
AUTO_CHARGED_WEEKLY_BUDGET = "Auto-charged budget (weekly)"
def
|
(node):
return []
def checkUserAccessToSpace(user, space):
return False
def setDefaultAccessTypeUserProperties(user, access_type):
pass
|
getAccessSpacesOfNode
|
reader_test.go
|
package rtpdump
import (
"bytes"
"errors"
"io"
"net"
"reflect"
"testing"
"time"
)
func
|
(t *testing.T) {
validPreamble := []byte("#!rtpplay1.0 224.2.0.1/3456\n")
for _, test := range []struct {
Name string
Data []byte
WantHeader Header
WantPackets []Packet
WantErr error
}{
{
Name: "empty",
Data: nil,
WantErr: errMalformed,
},
{
Name: "hashbang missing ip/port",
Data: append(
[]byte("#!rtpplay1.0 \n"),
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
),
WantErr: errMalformed,
},
{
Name: "hashbang missing port",
Data: append(
[]byte("#!rtpplay1.0 0.0.0.0\n"),
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
),
WantErr: errMalformed,
},
{
Name: "valid empty file",
Data: append(
validPreamble,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x01, 0x01, 0x01, 0x01,
0x22, 0xB8, 0x00, 0x00,
),
WantHeader: Header{
Start: time.Unix(1, 0).UTC(),
Source: net.IPv4(1, 1, 1, 1),
Port: 8888,
},
},
{
Name: "malformed packet header",
Data: append(
validPreamble,
// header
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// packet header
0x00,
),
WantHeader: Header{
Start: time.Unix(0, 0).UTC(),
Source: net.IPv4(0, 0, 0, 0),
Port: 0,
},
WantErr: errMalformed,
},
{
Name: "short packet payload",
Data: append(
validPreamble,
// header
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// packet header len=1048575
0xFF, 0xFF, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// packet payload
0x00,
),
WantHeader: Header{
Start: time.Unix(0, 0).UTC(),
Source: net.IPv4(0, 0, 0, 0),
Port: 0,
},
WantErr: errMalformed,
},
{
Name: "empty packet payload",
Data: append(
validPreamble,
// header
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// packet header len=0
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
),
WantHeader: Header{
Start: time.Unix(0, 0).UTC(),
Source: net.IPv4(0, 0, 0, 0),
Port: 0,
},
WantErr: errMalformed,
},
{
Name: "valid rtcp packet",
Data: append(
validPreamble,
// header
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// packet header len=20, pLen=0, off=1
0x00, 0x14, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
// packet payload (BYE)
0x81, 0xcb, 0x00, 0x0c,
0x90, 0x2f, 0x9e, 0x2e,
0x03, 0x46, 0x4f, 0x4f,
),
WantHeader: Header{
Start: time.Unix(0, 0).UTC(),
Source: net.IPv4(0, 0, 0, 0),
Port: 0,
},
WantPackets: []Packet{
{
Offset: time.Millisecond,
IsRTCP: true,
Payload: []byte{
0x81, 0xcb, 0x00, 0x0c,
0x90, 0x2f, 0x9e, 0x2e,
0x03, 0x46, 0x4f, 0x4f,
},
},
},
WantErr: nil,
},
{
Name: "truncated rtcp packet",
Data: append(
validPreamble,
// header
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// packet header len=9, pLen=0, off=1
0x00, 0x09, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
// invalid payload
0x81,
),
WantHeader: Header{
Start: time.Unix(0, 0).UTC(),
Source: net.IPv4(0, 0, 0, 0),
Port: 0,
},
WantPackets: []Packet{
{
Offset: time.Millisecond,
IsRTCP: true,
Payload: []byte{0x81},
},
},
},
{
Name: "two valid packets",
Data: append(
validPreamble,
// header
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
// packet header len=20, pLen=0, off=1
0x00, 0x14, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
// packet payload (BYE)
0x81, 0xcb, 0x00, 0x0c,
0x90, 0x2f, 0x9e, 0x2e,
0x03, 0x46, 0x4f, 0x4f,
// packet header len=33, pLen=0, off=2
0x00, 0x21, 0x00, 0x19,
0x00, 0x00, 0x00, 0x02,
// packet payload (RTP)
0x90, 0x60, 0x69, 0x8f,
0xd9, 0xc2, 0x93, 0xda,
0x1c, 0x64, 0x27, 0x82,
0x00, 0x01, 0x00, 0x01,
0xFF, 0xFF, 0xFF, 0xFF,
0x98, 0x36, 0xbe, 0x88,
0x9e,
),
WantHeader: Header{
Start: time.Unix(0, 0).UTC(),
Source: net.IPv4(0, 0, 0, 0),
Port: 0,
},
WantPackets: []Packet{
{
Offset: time.Millisecond,
IsRTCP: true,
Payload: []byte{
0x81, 0xcb, 0x00, 0x0c,
0x90, 0x2f, 0x9e, 0x2e,
0x03, 0x46, 0x4f, 0x4f,
},
},
{
Offset: 2 * time.Millisecond,
IsRTCP: false,
Payload: []byte{
0x90, 0x60, 0x69, 0x8f,
0xd9, 0xc2, 0x93, 0xda,
0x1c, 0x64, 0x27, 0x82,
0x00, 0x01, 0x00, 0x01,
0xFF, 0xFF, 0xFF, 0xFF,
0x98, 0x36, 0xbe, 0x88,
0x9e,
},
},
},
WantErr: nil,
},
} {
r, hdr, err := NewReader(bytes.NewReader(test.Data))
if err != nil {
if got, want := err, test.WantErr; !errors.Is(got, want) {
t.Fatalf("NewReader(%s) err=%v want %v", test.Name, got, want)
}
continue
}
if got, want := hdr, test.WantHeader; !reflect.DeepEqual(got, want) {
t.Fatalf("%q Header = %#v, want %#v", test.Name, got, want)
}
var nextErr error
var packets []Packet
for {
pkt, err := r.Next()
if err == io.EOF {
break
}
if err != nil {
nextErr = err
break
}
packets = append(packets, pkt)
}
if got, want := nextErr, test.WantErr; !errors.Is(got, want) {
t.Fatalf("%s err=%v want %v", test.Name, got, want)
}
if got, want := packets, test.WantPackets; !reflect.DeepEqual(got, want) {
t.Fatalf("%q packets=%#v, want %#v", test.Name, got, want)
}
}
}
|
TestReader
|
MRIAThreads.py
|
from PyQt5 import QtCore
import os, tempfile
from scipy.misc import toimage
import brukerWriter as bw
import utils
from FilesTreeWidget import *
__all__ = ["FilesTreeThread", "SaveThread"]
class FilesTreeThread(QtCore.QThread):
def __init__(self, parent = None, mode = "create", dirnames = ""):
super().__init__()
self.parent = parent
self.fail = 0
self.mode = mode
self.dirnames = dirnames
def run(self):
if not self.dirnames:
self.parent.tree.manageTree(self.parent.curDir, self.mode)
else:
#for dirname in self.dirnames:
self.parent.tree.manageTree(self.dirnames, self.mode)
class CANCELThread(Exception):
pass
class SaveThread(QtCore.QThread):
"""
Create thread for saving experiment in the text format
if self.trigger == "all" then each experiment will be saved as
a single text file in the folder corresponding to the experiment name
else self.trigger == "single" then only one experiment will be saved without creating folder
"""
progressText = QtCore.pyqtSignal(str)
progress = QtCore.pyqtSignal(int)
suggestedTypes = ["Image", "XML", "Text"]
def __init__(self, parent, savepath, saveType, form = "", filename = ""):
super().__init__()
self.saveType = saveType
if self.saveType not in self.suggestedTypes:
raise CANCELThread("Uncorrect function type")
self.parent = parent
self.SaveDir = savepath
self.form = "xml" if self.saveType=="XML" else form
self.trigger = "all"
self.cancelThread = False
self.filename = filename
def _SaveAllChecked(self):
completed = 0
data = self.parent.tree.ImageData
checkedItemList = []
self.parent.tree.findCheckedItems(self.parent.tree.invisibleRootItem(), checkedItemList)
allDim = 0
self.progressText.emit(self.tr("Data size counting"))
for expNumItem in checkedItemList:
allDim += int(utils.num_pattern.findall(expNumItem.text(0))[1])
for expNumItem in checkedItemList:
exp_name = self.parent.tree.getExpNameItem(expNumItem).text(0)
exp_num = utils.num_pattern.findall(expNumItem.text(0))[0]
saveDir = os.path.join(self.tmp_folder.name, exp_name)
utils.checkdir(saveDir)
if self.saveType == "Image":
saveDir = os.path.join(saveDir, exp_num)
utils.checkdir(saveDir)
if self.saveType != "Image":
fname = '{0}{1}Experiment_{2}.{3}'.format(saveDir,
os.sep,
exp_num,
self.form)
img_data = data[exp_name][exp_num]["data"]
for i in range(img_data.Dimension[0]):
if self.cancelThread:
raise CANCELThread()
if self.saveType == "Image":
fname = '{0}{1}Image_{2}.{3}'.format(saveDir,
os.sep,
i+1,
self.form)
self.progressText.emit(
self.tr("Writting Image_{0}.{1} to the folder /{2}/{3}").format(
i+1,
self.form,
exp_name,
exp_num))
toimage(img_data.IntenseData[i,:,:],
cmin=img_data.min_val, cmax=img_data.max_val).save(fname)
else:
self.progressText.emit(
self.tr("Writting Image {0}\{1} to the Experiment_{2}.{3}").format(
i+1,
img_data.Dimension[0],
exp_num,
self.form))
eval("bw.SingleWriteTo{}File".format(self.saveType))(fname,
img_data,
i,
i==0)
completed += 100/allDim
self.progress.emit(completed)
def _SaveSingle(self):
"""
Saving current experiment number
"""
completed = 0
allDim = self.parent.scroll.maximum()
saveDir = self.tmp_folder.name
img_data = self.parent.tree.ImageData[self.parent.curExpName][self.parent.curExpNum]["data"]
# add ".xml" postfix if it's not presented for XML files
if self.saveType == "XML":
try:
self.filename = re.search(r".+\.xml$", self.filename).group()
except AttributeError:
self.filename += ".xml"
fname = '{0}{1}{2}'.format(saveDir,
os.sep,
self.filename)
for i in range(allDim):
if self.cancelThread:
raise CANCELThread()
if self.saveType == "Image":
fname = '{0}{1}{2}_{3}.{4}'.format(saveDir,
os.sep,
self.filename,
i+1,
self.form)
self.progressText.emit(
self.tr("Writting {0}_{1}.{2}").format(self.filename,
i+1,
self.form))
toimage(img_data.IntenseData[i,:,:],
cmin=img_data.min_val, cmax=img_data.max_val).save(fname)
else:
self.progressText.emit(
self.tr("Writting Image {0}\{1} to the {2}").format(i+1,
allDim + 1,
self.filename))
eval("bw.SingleWriteTo{}File".format(self.saveType))(fname,
img_data,
i,
i==0)
completed += 100/allDim
self.progress.emit(completed)
def run(self):
try:
utils.checkdir(self.SaveDir)
# create a temporary folder
self.tmp_folder = tempfile.TemporaryDirectory(suffix = ".TMP",
prefix="_MRIAssimilator_",
dir = self.SaveDir)
if self.trigger == "all":
|
elif self.trigger == "single":
self._SaveSingle()
except CANCELThread:
self.quit()
|
self._SaveAllChecked()
|
572. Subtree of Another Tree.go
|
package binary_tree
func isSubtree(root *TreeNode, subRoot *TreeNode) bool {
if root == nil && subRoot == nil {
return true
}
if root == nil || subRoot == nil {
return false
}
mid := isSameTree(root, subRoot) // 中
if mid {
return true
}
left := isSubtree(root.Left, subRoot)
if left {
return true
}
right := isSubtree(root.Right, subRoot)
if right {
|
return false
}
|
return true
}
|
module_bar.py
|
#!/usr/bin/env python
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Auxiliary module for testing gflags.py.
The purpose of this module is to define a few flags. We want to make
sure the unit tests for gflags.py involve more than one module.
"""
__author__ = '[email protected] (Alex Salcianu)'
import gflags
from gflags import _helpers
FLAGS = gflags.FLAGS
def
|
(flag_values=FLAGS):
"""Defines some flags.
Args:
flag_values: The FlagValues object we want to register the flags
with.
"""
# The 'tmod_bar_' prefix (short for 'test_module_bar') ensures there
# is no name clash with the existing flags.
gflags.DEFINE_boolean('tmod_bar_x', True, 'Boolean flag.',
flag_values=flag_values)
gflags.DEFINE_string('tmod_bar_y', 'default', 'String flag.',
flag_values=flag_values)
gflags.DEFINE_boolean('tmod_bar_z', False,
'Another boolean flag from module bar.',
flag_values=flag_values)
gflags.DEFINE_integer('tmod_bar_t', 4, 'Sample int flag.',
flag_values=flag_values)
gflags.DEFINE_integer('tmod_bar_u', 5, 'Sample int flag.',
flag_values=flag_values)
gflags.DEFINE_integer('tmod_bar_v', 6, 'Sample int flag.',
flag_values=flag_values)
def RemoveOneFlag(flag_name, flag_values=FLAGS):
"""Removes the definition of one flag from gflags.FLAGS.
Note: if the flag is not defined in gflags.FLAGS, this function does
not do anything (in particular, it does not raise any exception).
Motivation: We use this function for cleanup *after* a test: if
there was a failure during a test and not all flags were declared,
we do not want the cleanup code to crash.
Args:
flag_name: A string, the name of the flag to delete.
flag_values: The FlagValues object we remove the flag from.
"""
if flag_name in flag_values.FlagDict():
flag_values.__delattr__(flag_name)
def NamesOfDefinedFlags():
"""Returns: List of names of the flags declared in this module."""
return ['tmod_bar_x',
'tmod_bar_y',
'tmod_bar_z',
'tmod_bar_t',
'tmod_bar_u',
'tmod_bar_v']
def RemoveFlags(flag_values=FLAGS):
"""Deletes the flag definitions done by the above DefineFlags().
Args:
flag_values: The FlagValues object we remove the flags from.
"""
for flag_name in NamesOfDefinedFlags():
RemoveOneFlag(flag_name, flag_values=flag_values)
def GetModuleName():
"""Uses GetCallingModule() to return the name of this module.
For checking that _GetCallingModule works as expected.
Returns:
A string, the name of this module.
"""
return _helpers.GetCallingModule()
def ExecuteCode(code, global_dict):
"""Executes some code in a given global environment.
For testing of _GetCallingModule.
Args:
code: A string, the code to be executed.
global_dict: A dictionary, the global environment that code should
be executed in.
"""
# Indeed, using exec generates a lint warning. But some user code
# actually uses exec, and we have to test for it ...
exec(code, global_dict) # pylint: disable=exec-used
def DisclaimKeyFlags():
"""Disclaims flags declared in this module."""
gflags.DISCLAIM_key_flags()
|
DefineFlags
|
read-ds18b20.py
|
import sys
from w1thermsensor import W1ThermSensor
if len(sys.argv) == 2:
sensor_id = sys.argv[1]
else:
print('usage: sudo ' + sys.argv[0] + ' <sensor id>')
print('example: sudo ' + sys.argv[0] + ' 00000588806a - Read from an DS18B20 wiht id 00000588806a')
sys.exit(1)
sensor = W1ThermSensor(W1ThermSensor.THERM_SENSOR_DS18B20, sensor_id)
temperature_in_celsius = sensor.get_temperature()
print('Temp={0:0.1f}*'.format(temperature_in_celsius))
|
#!/usr/bin/python
|
|
util.rs
|
use serde::{Serialize, Deserialize};
#[derive(Eq, PartialEq)]
pub enum Category {
Shared,
Master,
Server,
Init
}
#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, PartialOrd)]
pub enum VarReaderTypes {
Bool(bool),
I32(i32),
I64(i64),
F64(f64)
}
#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
#[serde(rename_all = "lowercase")]
pub enum InDataTypes {
Bool,
I32,
I64,
F64
}
pub struct NumberDigits {
digits: Vec<i32>
}
impl NumberDigits {
pub fn new(value: i32) -> Self {
let mut digits = Vec::new();
let mut value = value;
while value > 0 {
digits.push(value % 10);
value /= 10;
}
Self {
digits
}
}
// Returns a 0 to simulate padding if the value is missing
// Reads left to right
pub fn get(&self, index: usize) -> i32 {
if index + 1 > self.digits.len() {return 0}
return self.digits[index]
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_number_digits()
|
}
|
{
let digits = NumberDigits::new(503);
// Get 1s place
assert_eq!(digits.get(0), 3);
assert_eq!(digits.get(1), 0);
assert_eq!(digits.get(2), 5);
// Simulate padding in thousands place
assert_eq!(digits.get(3), 0);
}
|
index.tsx
|
import { useAtom } from 'jotai';
import './styles.css';
import logo from '../../assets/images/logo192.png';
import { IS_LOADING } from '../../atoms/spinner';
import { CEP_DEFAULT } from '../../atoms/cep';
import { ADDRESS_DEFAULT } from '../../atoms/address';
import Button from '../button';
import getAddress from '../../services/address';
export default function
|
() {
const [cep, setCep] = useAtom(CEP_DEFAULT);
const [address, setAddress] = useAtom(ADDRESS_DEFAULT);
const [headerLoader, setHeaderLoader] = useAtom(IS_LOADING);
const getAddressResult = async () => {
setHeaderLoader(true);
await getAddress({ cep }).then((result) => {
setCep(result.cep);
setAddress(result);
setHeaderLoader(false);
});
};
return (
<header className='header'>
<img src={logo} className='logo' alt='logo' />
<Button label='Update Content state' method={getAddressResult} />
</header>
);
}
|
Header
|
else-if-brace-style-closing-next-line.rs
|
// rustfmt-single_line_if_else_max_width: 0
// rustfmt-else_if_brace_style: ClosingNextLine
fn main()
|
{
if false {
();
();
}
if false
// lone if comment
{
();
();
}
let a = if 0 > 1 {
unreachable!()
}
else {
0x0
};
if true {
();
}
else if false {
();
();
}
else {
();
();
();
}
if true
// else-if-chain if comment
{
();
}
else if false
// else-if-chain else-if comment
{
();
();
}
else
// else-if-chain else comment
{
();
();
();
}
}
|
|
googlecode_upload.py
|
#!/usr/bin/env python
#
# Copyright 2006, 2007 Google Inc. All Rights Reserved.
# Author: [email protected] (David Anderson)
#
# Script for uploading files to a Google Code project.
#
# This is intended to be both a useful script for people who want to
# streamline project uploads and a reference implementation for
# uploading files to Google Code projects.
#
# To upload a file to Google Code, you need to provide a path to the
# file on your local machine, a small summary of what the file is, a
# project name, and a valid account that is a member or owner of that
# project. You can optionally provide a list of labels that apply to
# the file. The file will be uploaded under the same name that it has
# in your local filesystem (that is, the "basename" or last path
# component). Run the script with '--help' to get the exact syntax
# and available options.
#
# Note that the upload script requests that you enter your
# googlecode.com password. This is NOT your Gmail account password!
# This is the password you use on googlecode.com for committing to
# Subversion and uploading files. You can find your password by going
# to http://code.google.com/hosting/settings when logged in with your
# Gmail account. If you have already committed to your project's
# Subversion repository, the script will automatically retrieve your
# credentials from there (unless disabled, see the output of '--help'
# for details).
#
# If you are looking at this script as a reference for implementing
# your own Google Code file uploader, then you should take a look at
# the upload() function, which is the meat of the uploader. You
# basically need to build a multipart/form-data POST request with the
# right fields and send it to https://PROJECT.googlecode.com/files .
# Authenticate the request using HTTP Basic authentication, as is
# shown below.
#
# Licensed under the terms of the Apache Software License 2.0:
# http://www.apache.org/licenses/LICENSE-2.0
#
# Questions, comments, feature requests and patches are most welcome.
# Please direct all of these to the Google Code users group:
# http://groups.google.com/group/google-code-hosting
"""Google Code file uploader script.
"""
__author__ = '[email protected] (David Anderson)'
import httplib
import os.path
import optparse
import getpass
import base64
import sys
def get_svn_config_dir():
"""Return user's Subversion configuration directory."""
try:
from win32com.shell.shell import SHGetFolderPath
import win32com.shell.shellcon
except ImportError:
# If we can't import the win32api, just use ~; this is right on unix, and
# returns not entirely unreasonable results on Windows.
return os.path.expanduser('~/.subversion')
# We're on Windows with win32api; use APPDATA.
return os.path.join(SHGetFolderPath(0, win32com.shell.shellcon.CSIDL_APPDATA,
0, 0).encode('utf-8'),
'Subversion')
def get_svn_auth(project_name, config_dir):
"""Return (username, password) for project_name in config_dir."""
# Default to returning nothing.
result = (None, None)
try:
from svn.core import SVN_AUTH_CRED_SIMPLE, svn_config_read_auth_data
from svn.core import SubversionException
except ImportError:
return result
realm = ('<https://%s.googlecode.com:443> Google Code Subversion Repository'
% project_name)
# auth may be none even if no exception is raised, e.g. if config_dir does
# not exist, or exists but has no entry for realm.
try:
auth = svn_config_read_auth_data(SVN_AUTH_CRED_SIMPLE, realm, config_dir)
except SubversionException:
auth = None
if auth is not None:
try:
result = (auth['username'], auth['password'])
except KeyError:
# Missing the keys, so return nothing.
pass
return result
def upload(file, project_name, user_name, password, summary, labels=None):
"""Upload a file to a Google Code project's file server.
Args:
file: The local path to the file.
project_name: The name of your project on Google Code.
user_name: Your Google account name.
password: The googlecode.com password for your account.
Note that this is NOT your global Google Account password!
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
Returns: a tuple:
http_status: 201 if the upload succeeded, something else if an
error occured.
http_reason: The human-readable string associated with http_status
file_url: If the upload succeeded, the URL of the file on Google
Code, None otherwise.
"""
# The login is the user part of [email protected]. If the login provided
# is in the full user@domain form, strip it down.
if user_name.endswith('@gmail.com'):
user_name = user_name[:user_name.index('@gmail.com')]
form_fields = [('summary', summary)]
if labels is not None:
form_fields.extend([('label', l.strip()) for l in labels])
content_type, body = encode_upload_request(form_fields, file)
upload_host = '%s.googlecode.com' % project_name
upload_uri = '/files'
auth_token = base64.b64encode('%s:%s'% (user_name, password))
headers = {
'Authorization': 'Basic %s' % auth_token,
'User-Agent': 'Googlecode.com uploader v0.9.4',
'Content-Type': content_type,
}
server = httplib.HTTPSConnection(upload_host)
server.request('POST', upload_uri, body, headers)
resp = server.getresponse()
server.close()
if resp.status == 201:
location = resp.getheader('Location', None)
else:
location = None
return resp.status, resp.reason, location
def encode_upload_request(fields, file_path):
"""Encode the given fields and file into a multipart form body.
fields is a sequence of (name, value) pairs. file is the path of
the file to upload. The file will be uploaded to Google Code with
the same file name.
Returns: (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------Googlecode_boundary_reindeer_flotilla'
CRLF = '\r\n'
body = []
# Add the metadata about the upload first
for key, value in fields:
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="%s"' % key,
'',
value,
])
# Now add the file itself
file_name = os.path.basename(file_path)
f = open(file_path, 'rb')
file_content = f.read()
f.close()
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="filename"; filename="%s"'
% file_name,
# The upload server determines the mime-type, no need to set it.
'Content-Type: application/octet-stream',
'',
file_content,
])
# Finalize the form body
body.extend(['--' + BOUNDARY + '--', ''])
return 'multipart/form-data; boundary=%s' % BOUNDARY, CRLF.join(body)
def
|
(file_path, project_name, summary, labels=None,
config_dir=None, user_name=None, tries=3):
"""Find credentials and upload a file to a Google Code project's file server.
file_path, project_name, summary, and labels are passed as-is to upload.
If config_dir is None, try get_svn_config_dir(); if it is 'none', skip
trying the Subversion configuration entirely. If user_name is not None, use
it for the first attempt; prompt for subsequent attempts.
Args:
file_path: The local path to the file.
project_name: The name of your project on Google Code.
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
config_dir: Path to Subversion configuration directory, 'none', or None.
user_name: Your Google account name.
tries: How many attempts to make.
"""
if config_dir != 'none':
# Try to load username/password from svn config for first try.
if config_dir is None:
config_dir = get_svn_config_dir()
(svn_username, password) = get_svn_auth(project_name, config_dir)
if user_name is None:
# If username was not supplied by caller, use svn config.
user_name = svn_username
else:
# Just initialize password for the first try.
password = None
while tries > 0:
if user_name is None:
# Read username if not specified or loaded from svn config, or on
# subsequent tries.
sys.stdout.write('Please enter your googlecode.com username: ')
sys.stdout.flush()
user_name = sys.stdin.readline().rstrip()
if password is None:
# Read password if not loaded from svn config, or on subsequent tries.
print 'Please enter your googlecode.com password.'
print '** Note that this is NOT your Gmail account password! **'
print 'It is the password you use to access Subversion repositories,'
print 'and can be found here: http://code.google.com/hosting/settings'
password = getpass.getpass()
status, reason, url = upload(file_path, project_name, user_name, password,
summary, labels)
# Returns 403 Forbidden instead of 401 Unauthorized for bad
# credentials as of 2007-07-17.
if status in [httplib.FORBIDDEN, httplib.UNAUTHORIZED]:
# Rest for another try.
user_name = password = None
tries = tries - 1
else:
# We're done.
break
return status, reason, url
def main():
parser = optparse.OptionParser(usage='googlecode-upload.py -s SUMMARY '
'-p PROJECT [options] FILE')
parser.add_option('--config-dir', dest='config_dir', metavar='DIR',
help='read svn auth data from DIR'
' ("none" means not to use svn auth data)')
parser.add_option('-s', '--summary', dest='summary',
help='Short description of the file')
parser.add_option('-p', '--project', dest='project',
help='Google Code project name')
parser.add_option('-u', '--user', dest='user',
help='Your Google Code username')
parser.add_option('-l', '--labels', dest='labels',
help='An optional list of labels to attach to the file')
options, args = parser.parse_args()
if not options.summary:
parser.error('File summary is missing.')
elif not options.project:
parser.error('Project name is missing.')
elif len(args) < 1:
parser.error('File to upload not provided.')
elif len(args) > 1:
parser.error('Only one file may be specified.')
file_path = args[0]
if options.labels:
labels = options.labels.split(',')
else:
labels = None
status, reason, url = upload_find_auth(file_path, options.project,
options.summary, labels,
options.config_dir, options.user)
if url:
print 'The file was uploaded successfully.'
print 'URL: %s' % url
return 0
else:
print 'An error occurred. Your file was not uploaded.'
print 'Google Code upload server said: %s (%s)' % (reason, status)
return 1
if __name__ == '__main__':
sys.exit(main())
|
upload_find_auth
|
pw.ts
|
/**
* Palau subdivision code.
*
* ISO 3166-1 alpha-2: PW
*
* @link http://www.geonames.org/PW/administrative-division-palau.html
*/
export const PW: string[] = [
'002', // Aimeliik
|
'100', // Kayangel
'150', // Koror
'212', // Melekeok
'214', // Ngaraard
'218', // Ngarchelong
'222', // Ngardmau
'224', // Ngatpang
'226', // Ngchesar
'227', // Ngeremlengui
'228', // Ngiwal
'350', // Peleliu
'370' // Sonsorol
];
|
'004', // Airai
'010', // Angaur
'050', // Hatohobei
|
utils.rs
|
use crate::*;
// -----------------------------------------------------------------------------
pub(crate) fn ws<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, (O, Vec<WhiteSpace>)>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (s, x) = f(s)?;
let (s, y) = many0(white_space)(s)?;
Ok((s, (x, y)))
}
}
pub(crate) fn no_ws<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, (O, Vec<WhiteSpace>)>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (s, x) = f(s)?;
Ok((s, (x, vec![])))
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
move |s: Span<'a>| {
let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
}
}
#[cfg(feature = "trace")]
pub(crate) fn symbol<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
let body = || {
let (s, x) = map(ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
};
let ret = body();
nom_tracable::backward_trace(ret, &format!("symbol(\"{}\")", t), depth)
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
move |s: Span<'a>| {
let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
}
}
#[cfg(feature = "trace")]
pub(crate) fn symbol_exact<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Symbol> {
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, &format!("symbol(\"{}\")", t));
let body = || {
let (s, x) = map(no_ws(map(tag(t), into_locate)), |x| Symbol { nodes: x })(s)?;
Ok((s, x))
};
let ret = body();
nom_tracable::backward_trace(ret, &format!("symbol(\"{}\")", t), depth)
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Keyword> {
move |s: Span<'a>| {
let (s, x) = map(
ws(alt((
all_consuming(map(tag(t), into_locate)),
terminated(map(tag(t), into_locate), peek(none_of(AZ09_))),
))),
|x| Keyword { nodes: x },
)(s)?;
Ok((s, x))
}
}
#[cfg(feature = "trace")]
pub(crate) fn keyword<'a>(t: &'a str) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Keyword> {
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, &format!("keyword(\"{}\")", t));
let body = || {
let (s, x) = map(
ws(alt((
all_consuming(map(tag(t), into_locate)),
terminated(map(tag(t), into_locate), peek(none_of(AZ09_))),
))),
|x| Keyword { nodes: x },
)(s)?;
Ok((s, x))
};
let ret = body();
nom_tracable::backward_trace(ret, &format!("keyword(\"{}\")", t), depth)
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn paren<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Paren<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (s, a) = symbol("(")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol(")")(s)?;
Ok((s, Paren { nodes: (a, b, c) }))
}
}
#[cfg(feature = "trace")]
pub(crate) fn paren<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Paren<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, "paren");
let body = || {
let (s, a) = symbol("(")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol(")")(s)?;
Ok((s, Paren { nodes: (a, b, c) }))
};
let ret = body();
nom_tracable::backward_trace(ret, "paren", depth)
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn paren_exact<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Paren<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (s, a) = symbol("(")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol_exact(")")(s)?;
Ok((s, Paren { nodes: (a, b, c) }))
}
}
#[cfg(feature = "trace")]
pub(crate) fn paren_exact<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Paren<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, "paren");
let body = || {
let (s, a) = symbol("(")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol_exact(")")(s)?;
Ok((s, Paren { nodes: (a, b, c) }))
};
let ret = body();
nom_tracable::backward_trace(ret, "paren", depth)
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn bracket<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Bracket<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (s, a) = symbol("[")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol("]")(s)?;
Ok((s, Bracket { nodes: (a, b, c) }))
}
}
#[cfg(feature = "trace")]
pub(crate) fn bracket<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Bracket<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, "bracket");
let body = || {
let (s, a) = symbol("[")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol("]")(s)?;
Ok((s, Bracket { nodes: (a, b, c) }))
};
let ret = body();
nom_tracable::backward_trace(ret, "bracket", depth)
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn brace<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Brace<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
|
#[cfg(feature = "trace")]
pub(crate) fn brace<'a, O, F>(f: F) -> impl Fn(Span<'a>) -> IResult<Span<'a>, Brace<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, "brace");
let body = || {
let (s, a) = symbol("{")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol("}")(s)?;
Ok((s, Brace { nodes: (a, b, c) }))
};
let ret = body();
nom_tracable::backward_trace(ret, "brace", depth)
}
}
#[cfg(not(feature = "trace"))]
pub(crate) fn apostrophe_brace<'a, O, F>(
f: F,
) -> impl Fn(Span<'a>) -> IResult<Span<'a>, ApostropheBrace<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (s, a) = symbol("'{")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol("}")(s)?;
Ok((s, ApostropheBrace { nodes: (a, b, c) }))
}
}
#[cfg(feature = "trace")]
pub(crate) fn apostrophe_brace<'a, O, F>(
f: F,
) -> impl Fn(Span<'a>) -> IResult<Span<'a>, ApostropheBrace<O>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O>,
{
move |s: Span<'a>| {
let (depth, s) = nom_tracable::forward_trace(s, "apostrophe_brace");
let body = || {
let (s, a) = symbol("'{")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol("}")(s)?;
Ok((s, ApostropheBrace { nodes: (a, b, c) }))
};
let ret = body();
nom_tracable::backward_trace(ret, "apostrophe_brace", depth)
}
}
pub(crate) fn list<'a, O1, O2, F, G>(
f: F,
g: G,
) -> impl Fn(Span<'a>) -> IResult<Span<'a>, List<O1, O2>>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O1>,
G: Fn(Span<'a>) -> IResult<Span<'a>, O2>,
{
move |s: Span<'a>| {
let (s, a) = g(s)?;
let mut s = s;
let mut ret = Vec::new();
while let Ok((t, b)) = f(s) {
if let Ok((u, c)) = g(t) {
s = u;
ret.push((b, c));
} else {
break;
}
}
Ok((s, List { nodes: (a, ret) }))
}
}
pub(crate) fn triple<'a, O1, O2, O3, F, G, H>(
f: F,
g: G,
h: H,
) -> impl Fn(Span<'a>) -> IResult<Span<'a>, (O1, O2, O3)>
where
F: Fn(Span<'a>) -> IResult<Span<'a>, O1>,
G: Fn(Span<'a>) -> IResult<Span<'a>, O2>,
H: Fn(Span<'a>) -> IResult<Span<'a>, O3>,
{
move |s: Span<'a>| {
let (s, x) = f(s)?;
let (s, y) = g(s)?;
let (s, z) = h(s)?;
Ok((s, (x, y, z)))
}
}
// -----------------------------------------------------------------------------
pub(crate) fn eof(s: Span) -> IResult<Span, Span> {
use nom::InputLength;
if s.input_len() == 0 {
Ok((s, s))
} else {
Err(Err::Error(make_error(s, ErrorKind::Eof)))
}
}
#[tracable_parser]
#[packrat_parser]
pub(crate) fn white_space(s: Span) -> IResult<Span, WhiteSpace> {
if in_directive() {
alt((
map(multispace1, |x: Span| {
WhiteSpace::Space(Box::new(into_locate(x)))
}),
map(preceded(peek(char('/')), comment), |x| {
WhiteSpace::Comment(Box::new(x))
}),
))(s)
} else {
alt((
map(multispace1, |x: Span| {
WhiteSpace::Space(Box::new(into_locate(x)))
}),
map(preceded(peek(char('/')), comment), |x| {
WhiteSpace::Comment(Box::new(x))
}),
map(
preceded(peek(char('`')), compiler_directive_without_resetall),
|x| WhiteSpace::CompilerDirective(Box::new(x)),
),
))(s)
}
}
thread_local!(
static IN_DIRECTIVE: core::cell::RefCell<Vec<()>> = {
core::cell::RefCell::new(Vec::new())
}
);
pub(crate) fn in_directive() -> bool {
IN_DIRECTIVE.with(|x| x.borrow().last().is_some())
}
pub(crate) fn begin_directive() {
IN_DIRECTIVE.with(|x| x.borrow_mut().push(()));
}
pub(crate) fn end_directive() {
IN_DIRECTIVE.with(|x| x.borrow_mut().pop());
}
// -----------------------------------------------------------------------------
#[derive(Clone, Copy)]
pub(crate) enum VersionSpecifier {
Ieee1364_1995,
Ieee1364_2001,
Ieee1364_2001Noconfig,
Ieee1364_2005,
Ieee1800_2005,
Ieee1800_2009,
Ieee1800_2012,
Ieee1800_2017,
Directive,
}
thread_local!(
static CURRENT_VERSION: core::cell::RefCell<Vec<VersionSpecifier>> = {
core::cell::RefCell::new(Vec::new())
}
);
pub(crate) fn begin_keywords(version: &str) {
CURRENT_VERSION.with(|current_version| match version {
"1364-1995" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1364_1995),
"1364-2001" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1364_2001),
"1364-2001-noconfig" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1364_2001Noconfig),
"1364-2005" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1364_2005),
"1800-2005" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1800_2005),
"1800-2009" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1800_2009),
"1800-2012" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1800_2012),
"1800-2017" => current_version
.borrow_mut()
.push(VersionSpecifier::Ieee1800_2017),
"directive" => current_version
.borrow_mut()
.push(VersionSpecifier::Directive),
_ => (),
});
}
pub(crate) fn end_keywords() {
CURRENT_VERSION.with(|current_version| {
current_version.borrow_mut().pop();
});
}
pub(crate) fn current_version() -> Option<VersionSpecifier> {
CURRENT_VERSION.with(|current_version| match current_version.borrow().last() {
Some(x) => Some(*x),
None => None,
})
}
// -----------------------------------------------------------------------------
pub(crate) fn concat<'a>(a: Span<'a>, b: Span<'a>) -> Option<Span<'a>> {
let c = unsafe { str_concat::concat(a.fragment(), b.fragment()) };
if let Ok(c) = c {
let ret = unsafe {
Span::new_from_raw_offset(a.location_offset(), a.location_line(), c, a.extra)
};
Some(ret)
} else {
None
}
}
pub(crate) fn is_keyword(s: &Span) -> bool {
let keywords = match current_version() {
Some(VersionSpecifier::Ieee1364_1995) => KEYWORDS_1364_1995,
Some(VersionSpecifier::Ieee1364_2001) => KEYWORDS_1364_2001,
Some(VersionSpecifier::Ieee1364_2001Noconfig) => KEYWORDS_1364_2001_NOCONFIG,
Some(VersionSpecifier::Ieee1364_2005) => KEYWORDS_1364_2005,
Some(VersionSpecifier::Ieee1800_2005) => KEYWORDS_1800_2005,
Some(VersionSpecifier::Ieee1800_2009) => KEYWORDS_1800_2009,
Some(VersionSpecifier::Ieee1800_2012) => KEYWORDS_1800_2012,
Some(VersionSpecifier::Ieee1800_2017) => KEYWORDS_1800_2017,
Some(VersionSpecifier::Directive) => KEYWORDS_DIRECTIVE,
None => KEYWORDS_1800_2017,
};
for k in keywords {
if s.fragment() == k {
return true;
}
}
false
}
pub(crate) fn into_locate(s: Span) -> Locate {
Locate {
offset: s.location_offset(),
line: s.location_line(),
len: s.fragment().len(),
}
}
|
{
move |s: Span<'a>| {
let (s, a) = symbol("{")(s)?;
let (s, b) = f(s)?;
let (s, c) = symbol("}")(s)?;
Ok((s, Brace { nodes: (a, b, c) }))
}
}
|
test_iter.rs
|
mod test_iter {
use abyssiniandb::{DbBytes, DbMap, DbMapKeyType, DbString, DbU64};
use std::collections::BTreeMap;
//
fn iter_test_map_empty_iter<T: DbMap<K>, K: DbMapKeyType>(db_map: &mut T) {
assert_eq!(db_map.is_empty().unwrap(), true);
assert_eq!(db_map.len().unwrap(), 0);
//
//assert_eq!(db_map.drain().next(), None);
//assert_eq!(db_map.keys().next(), None);
//assert_eq!(db_map.values().next(), None);
//assert_eq!(db_map.values_mut().next(), None);
assert_eq!(db_map.iter().next(), None);
//assert_eq!(db_map.iter_mut().next(), None);
//assert_eq!(db_map.into_iter().next(), None);
}
//
fn basic_test_map_string<T: DbMap<DbString>>(db_map: &mut T) {
// insert
db_map.put_string("key01", "value1").unwrap();
db_map.put_string("key02", "value2").unwrap();
db_map.put_string("key03", "value3").unwrap();
db_map.put_string("key04", "value4").unwrap();
db_map.put_string("key05", "value5").unwrap();
assert_eq!(db_map.len().unwrap(), 5);
// iterator
let btmap: BTreeMap<DbString, Vec<u8>> = db_map.iter_mut().collect();
let mut iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
assert_eq!(iter.next(), Some(("key01".into(), "value1".into())));
assert_eq!(iter.next(), Some(("key02".into(), "value2".into())));
assert_eq!(iter.next(), Some(("key03".into(), "value3".into())));
assert_eq!(iter.next(), Some(("key04".into(), "value4".into())));
assert_eq!(iter.next(), Some(("key05".into(), "value5".into())));
assert_eq!(iter.next(), None);
//
db_map.sync_data().unwrap();
}
fn medium_test_map_string<T: DbMap<DbString>>(db_map: &mut T) {
const LOOP_MAX: u64 = 100;
// insert
for i in 0..LOOP_MAX {
let key = format!("key{:02}", i);
let value = format!("value{}", i);
db_map.put_string(&key, &value).unwrap();
}
assert_eq!(db_map.len().unwrap(), LOOP_MAX);
// iterator
let btmap: BTreeMap<DbString, Vec<u8>> = db_map.iter_mut().collect();
let mut iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
for i in 0..LOOP_MAX {
let key = format!("key{:02}", i);
let value = format!("value{}", i);
assert_eq!(iter.next(), Some((key.into(), value.as_bytes().to_vec())));
}
assert_eq!(iter.next(), None);
//
// iter on loop
let btmap: BTreeMap<DbString, Vec<u8>> = db_map.iter_mut().collect();
let iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
let mut i: i32 = 0;
for (k, v) in iter {
let key = format!("key{:02}", i);
let value = format!("value{}", i);
assert_eq!(k, key.into());
assert_eq!(v, value.as_bytes().to_vec());
i += 1;
}
//
// into iter on loop
//let mut iter = db_map.into_iter();
//
//db_map.sync_data().unwrap();
}
fn basic_test_map_dbint<T: DbMap<DbU64>>(db_map: &mut T) {
// insert
db_map.put_string(&12301, "value1").unwrap();
db_map.put_string(&12302, "value2").unwrap();
db_map.put_string(&12303, "value3").unwrap();
db_map.put_string(&12304, "value4").unwrap();
db_map.put_string(&12305, "value5").unwrap();
assert_eq!(db_map.len().unwrap(), 5);
// iterator
let btmap: BTreeMap<DbU64, Vec<u8>> = db_map.iter_mut().collect();
let mut iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
assert_eq!(iter.next(), Some((12301.into(), b"value1".to_vec())));
assert_eq!(iter.next(), Some((12302.into(), b"value2".to_vec())));
assert_eq!(iter.next(), Some((12303.into(), b"value3".to_vec())));
assert_eq!(iter.next(), Some((12304.into(), b"value4".to_vec())));
assert_eq!(iter.next(), Some((12305.into(), b"value5".to_vec())));
assert_eq!(iter.next(), None);
//
db_map.sync_data().unwrap();
}
fn medium_test_map_dbint<T: DbMap<DbU64>>(db_map: &mut T) {
const LOOP_MAX: u64 = 100;
// insert
for i in 0..LOOP_MAX {
let key = 12300u64 + i as u64;
let value = format!("value{}", i);
db_map.put_string(&key, &value).unwrap();
}
assert_eq!(db_map.len().unwrap(), LOOP_MAX);
// iterator
let btmap: BTreeMap<DbU64, Vec<u8>> = db_map.iter_mut().collect();
let mut iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
for i in 0..LOOP_MAX {
let key = 12300u64 + i as u64;
let value = format!("value{}", i);
assert_eq!(iter.next(), Some((key.into(), value.as_bytes().to_vec())));
}
assert_eq!(iter.next(), None);
//
// iter on loop
let btmap: BTreeMap<DbU64, Vec<u8>> = db_map.iter_mut().collect();
let iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
let mut i: i32 = 0;
for (k, v) in iter {
let key = 12300u64 + i as u64;
let value = format!("value{}", i);
assert_eq!(k, key.into());
assert_eq!(v, value.as_bytes().to_vec());
i += 1;
}
//
// into iter on loop
//let mut iter = db_map.into_iter();
//
//db_map.sync_data().unwrap();
}
fn basic_test_map_bytes<T: DbMap<DbBytes>>(db_map: &mut T) {
// insert
db_map.put_string(b"key01".into(), "value1").unwrap();
db_map.put_string(b"key02".into(), "value2").unwrap();
db_map.put_string(b"key03".into(), "value3").unwrap();
db_map.put_string(b"key04".into(), "value4").unwrap();
db_map.put_string(b"key05".into(), "value5").unwrap();
assert_eq!(db_map.len().unwrap(), 5);
// iterator
let btmap: BTreeMap<DbBytes, Vec<u8>> = db_map.iter_mut().collect();
let mut iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
assert_eq!(iter.next(), Some((b"key01".into(), b"value1".to_vec())));
assert_eq!(iter.next(), Some((b"key02".into(), b"value2".to_vec())));
assert_eq!(iter.next(), Some((b"key03".into(), b"value3".to_vec())));
assert_eq!(iter.next(), Some((b"key04".into(), b"value4".to_vec())));
assert_eq!(iter.next(), Some((b"key05".into(), b"value5".to_vec())));
assert_eq!(iter.next(), None);
//
|
fn medium_test_map_bytes<T: DbMap<DbBytes>>(db_map: &mut T) {
const LOOP_MAX: u64 = 100;
// insert
for i in 0..LOOP_MAX {
let key = format!("key{:02}", i);
let value = format!("value{}", i);
db_map.put_string(&key, &value).unwrap();
}
assert_eq!(db_map.len().unwrap(), LOOP_MAX);
// iterator
let btmap: BTreeMap<DbBytes, Vec<u8>> = db_map.iter_mut().collect();
let mut iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
for i in 0..LOOP_MAX {
let key = format!("key{:02}", i);
let value = format!("value{}", i);
assert_eq!(iter.next(), Some((key.into(), value.as_bytes().to_vec())));
}
assert_eq!(iter.next(), None);
//
// iter on loop
let btmap: BTreeMap<DbBytes, Vec<u8>> = db_map.iter_mut().collect();
let iter = btmap.into_iter();
//let mut iter = db_map.iter_mut();
let mut i: i32 = 0;
for (k, v) in iter {
let key = format!("key{:02}", i);
let value = format!("value{}", i);
assert_eq!(k, key.into());
assert_eq!(v, value.as_bytes().to_vec());
i += 1;
}
//
// into iter on loop
//let mut iter = db_map.into_iter();
//
//db_map.sync_data().unwrap();
}
////
#[test]
fn test_file_map_string() {
let db_name = "target/tmp/test_iter-s.abyssiniandb";
let _ = std::fs::remove_dir_all(db_name);
let db = abyssiniandb::open_file(db_name).unwrap();
let mut db_map = db.db_map_string("some_string_1").unwrap();
//
iter_test_map_empty_iter(&mut db_map);
//
basic_test_map_string(&mut db_map);
medium_test_map_string(&mut db_map);
}
#[test]
fn test_file_map_dbu64() {
let db_name = "target/tmp/test_iter-u.abyssiniandb";
let _ = std::fs::remove_dir_all(db_name);
let db = abyssiniandb::open_file(db_name).unwrap();
let mut db_map = db.db_map_u64("some_u64_1").unwrap();
//
iter_test_map_empty_iter(&mut db_map);
//
basic_test_map_dbint(&mut db_map);
medium_test_map_dbint(&mut db_map);
}
#[test]
fn test_file_map_bytes() {
let db_name = "target/tmp/test_iter-b.abyssiniandb";
let _ = std::fs::remove_dir_all(db_name);
let db = abyssiniandb::open_file(db_name).unwrap();
let mut db_map = db.db_map_bytes("some_bytes_1").unwrap();
//
iter_test_map_empty_iter(&mut db_map);
//
basic_test_map_bytes(&mut db_map);
medium_test_map_bytes(&mut db_map);
}
}
|
db_map.sync_data().unwrap();
}
|
ingest.go
|
package config
const (
// Keep 1024 chunks in cache; keeps 2G if chunks are 2MB.
defaultLinkCacheSize = 1024
// Multihashes are 128 bytes so 16384 redults in 2MB chunk when full.
defaultLinkedChunkSize = 16384
defaultPubSubTopic = "indexer/ingest"
)
type PublisherKind string
const (
DTSyncPublisherKind PublisherKind = "dtsync"
HttpPublisherKind PublisherKind = "http"
)
// Ingest configures settings related to the ingestion protocol.
type Ingest struct {
// LinkCacheSize is the maximum number of links that cash can store before
// LRU eviction. If a single linked list has more links than the cache can
// hold, the cache is resized to be able to hold all links.
LinkCacheSize int
// LinkedChunkSize is the number of multihashes in each chunk of in the
// advertised entries linked list. If multihashes are 128 bytes, then
// setting LinkedChunkSize = 16384 will result in blocks of about 2Mb when
// full.
LinkedChunkSize int
// PubSubTopic used to advertise ingestion announcements.
PubSubTopic string
// PurgeLinkCache tells whether to purge the link cache on daemon startup.
PurgeLinkCache bool
// HttpPublisher configures the go-legs httpsync publisher.
HttpPublisher HttpPublisher
// PublisherKind specifies which legs.Publisher implementation to use.
PublisherKind PublisherKind
}
// NewIngest instantiates a new Ingest configuration with default values.
func NewIngest() Ingest
|
// PopulateDefaults replaces zero-values in the config with default values.
func (c *Ingest) PopulateDefaults() {
if c.LinkCacheSize == 0 {
c.LinkCacheSize = defaultLinkCacheSize
}
if c.LinkedChunkSize == 0 {
c.LinkedChunkSize = defaultLinkedChunkSize
}
if c.PubSubTopic == "" {
c.PubSubTopic = defaultPubSubTopic
}
}
|
{
return Ingest{
LinkCacheSize: defaultLinkCacheSize,
LinkedChunkSize: defaultLinkedChunkSize,
PubSubTopic: defaultPubSubTopic,
HttpPublisher: NewHttpPublisher(),
PublisherKind: DTSyncPublisherKind,
}
}
|
read_value_test.py
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
import mock
from dashboard.pinpoint.models.quest import read_value
from tracing.value import histogram_set
from tracing.value import histogram as histogram_module
from tracing.value.diagnostics import generic_set
from tracing.value.diagnostics import reserved_infos
_BASE_ARGUMENTS_HISTOGRAMS = {'benchmark': 'speedometer'}
_BASE_ARGUMENTS_GRAPH_JSON = {
'chart': 'chart_name',
'trace': 'trace_name',
}
class ReadHistogramsJsonValueQuestTest(unittest.TestCase):
def testMinimumArguments(self):
quest = read_value.ReadHistogramsJsonValue.FromDict(
_BASE_ARGUMENTS_HISTOGRAMS)
expected = read_value.ReadHistogramsJsonValue('chartjson-output.json')
self.assertEqual(quest, expected)
def testAllArguments(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['chart'] = 'timeToFirst'
arguments['tir_label'] = 'pcv1-cold'
arguments['trace'] = 'trace_name'
arguments['statistic'] = 'avg'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', 'timeToFirst',
'pcv1-cold', 'trace_name', 'avg')
self.assertEqual(quest, expected)
def testPerformanceTestSuite(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer/perf_results.json')
self.assertEqual(quest, expected)
def testPerformanceTestSuiteWindows(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['dimensions'] = [{'key': 'os', 'value': 'Windows-10'}]
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer\\perf_results.json')
self.assertEqual(quest, expected)
class ReadGraphJsonValueQuestTest(unittest.TestCase):
def testAllArguments(self):
quest = read_value.ReadGraphJsonValue.FromDict(_BASE_ARGUMENTS_GRAPH_JSON)
expected = read_value.ReadGraphJsonValue('chart_name', 'trace_name')
self.assertEqual(quest, expected)
def testMissingChart(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['chart']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
def testMissingTrace(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['trace']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
class _ReadValueExecutionTest(unittest.TestCase):
def setUp(self):
patcher = mock.patch('dashboard.services.isolate.Retrieve')
self._retrieve = patcher.start()
self.addCleanup(patcher.stop)
def SetOutputFileContents(self, contents):
self._retrieve.side_effect = (
'{"files": {"chartjson-output.json": {"h": "output json hash"}}}',
json.dumps(contents),
)
def assertReadValueError(self, execution):
self.assertTrue(execution.completed)
self.assertTrue(execution.failed)
self.assertIsInstance(execution.exception, basestring)
last_exception_line = execution.exception.splitlines()[-1]
self.assertTrue(last_exception_line.startswith('ReadValueError'))
def assertReadValueSuccess(self, execution):
self.assertTrue(execution.completed)
self.assertFalse(execution.failed)
self.assertEqual(execution.result_arguments, {})
def assertRetrievedOutputJson(self):
expected_calls = [
mock.call('server', 'output hash'),
mock.call('server', 'output json hash'),
]
self.assertEqual(self._retrieve.mock_calls, expected_calls)
class ReadHistogramsJsonValueTest(_ReadValueExecutionTest):
def testReadHistogramsJsonValue(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatistic(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (1,))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatisticNoSamples(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
|
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueMultipleHistograms(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist2 = histogram_module.Histogram('hist', 'count')
hist2.AddSample(0)
hist2.AddSample(1)
hist2.AddSample(2)
hist3 = histogram_module.Histogram('some_other_histogram', 'count')
hist3.AddSample(3)
hist3.AddSample(4)
hist3.AddSample(5)
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2, 0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist3 = histogram_module.Histogram('hist3', 'count')
hist3.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url2']))
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
{
'key': 'trace',
'value': 'trace_url3',
'url': 'trace_url3',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsDiagnosticRefSkipTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist2.diagnostics[reserved_infos.TRACE_URLS.name].guid = 'foo'
histograms = histogram_set.HistogramSet([hist, hist2])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoTirLabel(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoStory(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueSummary(self):
samples = []
hists = []
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['story%d' % i]))
hists.append(hist)
samples.extend(hist.sample_values)
histograms = histogram_set.HistogramSet(hists)
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hists[0].name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, tuple(samples))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueEmptyHistogramSet(self):
self.SetOutputFileContents([])
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithMissingHistogram(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='does_not_exist')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueTirLabelWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueStoryWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
class ReadGraphJsonValueTest(_ReadValueExecutionTest):
def testReadGraphJsonValue(self):
self.SetOutputFileContents(
{'chart': {'traces': {'trace': ['126444.869721', '0.0']}}})
quest = read_value.ReadGraphJsonValue('chart', 'trace')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (126444.869721,))
self.assertRetrievedOutputJson()
def testReadGraphJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingChart(self):
self.SetOutputFileContents({})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingTrace(self):
self.SetOutputFileContents({'chart': {'traces': {}}})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
|
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
|
insidersExtensionPrompt.ts
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
'use strict';
import { inject, injectable } from 'inversify';
import { sendTelemetryEvent } from '../../telemetry';
import { EventName } from '../../telemetry/constants';
import { IApplicationShell, ICommandManager } from '../application/types';
import { traceDecorators } from '../logger';
import { IPersistentState, IPersistentStateFactory } from '../types';
import { Common, DataScienceSurveyBanner, ExtensionChannels } from '../utils/localize';
import { noop } from '../utils/misc';
import { IExtensionChannelService, IInsiderExtensionPrompt } from './types';
export const insidersPromptStateKey = 'INSIDERS_PROMPT_STATE_KEY';
@injectable()
export class
|
implements IInsiderExtensionPrompt {
public readonly hasUserBeenNotified: IPersistentState<boolean>;
constructor(
@inject(IApplicationShell) private readonly appShell: IApplicationShell,
@inject(IExtensionChannelService) private readonly insidersDownloadChannelService: IExtensionChannelService,
@inject(ICommandManager) private readonly cmdManager: ICommandManager,
@inject(IPersistentStateFactory) private readonly persistentStateFactory: IPersistentStateFactory
) {
this.hasUserBeenNotified = this.persistentStateFactory.createGlobalPersistentState(insidersPromptStateKey, false);
}
@traceDecorators.error('Error in prompting to install insiders')
public async notifyToInstallInsiders(): Promise<void> {
const prompts = [ExtensionChannels.yesWeekly(), ExtensionChannels.yesDaily(), DataScienceSurveyBanner.bannerLabelNo()];
const telemetrySelections: ['Yes, weekly', 'Yes, daily', 'No, thanks'] = ['Yes, weekly', 'Yes, daily', 'No, thanks'];
const selection = await this.appShell.showInformationMessage(ExtensionChannels.promptMessage(), ...prompts);
sendTelemetryEvent(EventName.INSIDERS_PROMPT, undefined, { selection: selection ? telemetrySelections[prompts.indexOf(selection)] : undefined });
await this.hasUserBeenNotified.updateValue(true);
if (!selection) {
return;
}
if (selection === ExtensionChannels.yesWeekly()) {
await this.insidersDownloadChannelService.updateChannel('weekly');
} else if (selection === ExtensionChannels.yesDaily()) {
await this.insidersDownloadChannelService.updateChannel('daily');
}
}
@traceDecorators.error('Error in prompting to reload')
public async promptToReload(): Promise<void> {
const selection = await this.appShell.showInformationMessage(ExtensionChannels.reloadToUseInsidersMessage(), Common.reload());
sendTelemetryEvent(EventName.INSIDERS_RELOAD_PROMPT, undefined, { selection: selection ? 'Reload' : undefined });
if (!selection) {
return;
}
if (selection === Common.reload()) {
this.cmdManager.executeCommand('workbench.action.reloadWindow').then(noop);
}
}
}
|
InsidersExtensionPrompt
|
morse.go
|
package lib
import (
"bytes"
"errors"
"io"
"strings"
)
// Morse is a interface that you encode and decode
// with your input.
type Morse interface {
Encode(r io.Reader) (string, error)
Decode(r io.Reader) (string, error)
}
// NewMorse returns a new morse struct.
func NewMorse() Morse {
return &morse{}
}
type morse struct{}
// Encode returns encoded morse code for the given input.
func (m *morse) Encode(r io.Reader) (string, error) {
buf := new(bytes.Buffer)
_, err := buf.ReadFrom(r)
if err != nil {
return "", errors.New("Error occured while reading the input.")
}
data := strings.ToUpper(strings.TrimSpace(buf.String()))
if data == "" {
return "", errors.New("You have to give an input.")
}
words := strings.Split(data, " ")
var resultCodes []string
var foundLetterCount int
for _, word := range words {
var wordCodes []string
for _, letter := range word {
if code, ok := letterToCode[letter]; ok {
wordCodes = append(wordCodes, code)
foundLetterCount++
}
}
wResult := strings.Join(wordCodes, " ")
if wResult == "" {
continue
}
resultCodes = append(resultCodes, wResult)
}
result := strings.Join(resultCodes, " / ")
return result, nil
}
// Decode returns decoded morse code result for the given input.
func (m *morse) Decode(r io.Reader) (string, error) {
buf := new(bytes.Buffer)
_, err := buf.ReadFrom(r)
if err != nil {
return "", errors.New("Error occured while reading the input.")
}
data := buf.String()
if data == "" {
return "", errors.New("You have to give an input.")
}
resultCodes := strings.Split(data, "/")
var result []string
for _, resultCode := range resultCodes {
var word string
resultCode = strings.TrimSpace(resultCode)
for _, letterCode := range strings.Split(resultCode, " ") {
if letter, ok := codeToLetter[letterCode]; ok
|
}
result = append(result, word)
}
v := strings.Join(result, " ")
return v, nil
}
// Letters for long and short morse signals.
const (
S = "." // Short signal.
L = "-" // Long signal.
)
// Morse Codes
//
// Sources:
// - https://morsecode.world/international/morse.html
// - https://morsedecoder.com/tr/
var letterToCode = map[rune]string{
'A': S + L,
'B': L + S + S + S,
'C': L + S + L + S,
'D': L + S + S,
'E': S,
'F': S + S + L + S,
'G': L + L + S,
'H': S + S + S + S,
'I': S + S,
'J': S + L + L + L,
'K': L + S + L,
'L': S + L + S + S,
'M': L + L,
'N': L + S,
'O': L + L + L,
'P': S + L + L + S,
'Q': L + L + S + L,
'R': S + L + S,
'S': S + S + S,
'T': L,
'U': S + S + L,
'V': S + S + S + L,
'W': S + L + L,
'X': L + S + S + L,
'Y': L + S + L + L,
'Z': L + L + S + S,
'Ç': L + S + L + S + S, // Turkish Special Letters
'Ğ': L + L + S + L + S, //
'İ': S + L + S + S + L, //
'Ö': L + L + L + S, //
'Ş': S + L + L + S + S, //
'Ü': S + S + L + L, //
'Á': S + L + L + S + L, // Continental (Gerke)
'Ä': S + L + S + L, //
'É': S + S + L + S + S, //
'Ñ': L + L + S + L + L, //
'1': S + L + L + L + L,
'2': S + S + L + L + L,
'3': S + S + S + L + L,
'4': S + S + S + S + L,
'5': S + S + S + S + S,
'6': L + S + S + S + S,
'7': L + L + S + S + S,
'8': L + L + L + S + S,
'9': L + L + L + L + S,
'0': L + L + L + L + L,
'.': S + L + S + L + S + L,
':': L + L + L + S + S + S,
',': L + L + S + S + L + L,
';': L + S + L + S + L,
'?': S + S + L + L + S + S,
'=': L + S + S + S + L,
'\'': S + L + L + L + L + S,
'/': L + S + S + L + S,
'!': L + S + L + S + L + L,
'-': L + S + S + S + S + L,
'_': S + S + L + L + S + L,
'"': S + L + S + S + L + S,
'(': L + S + L + L + S,
')': L + S + L + L + S + L,
'$': S + S + S + L + S + S + L,
'&': S + L + S + S + S,
'@': S + L + L + S + L + S,
'+': S + L + S + L + S,
}
// codeToLetter is auto-created while initializing the package.
var codeToLetter map[string]rune
func init() {
codeToLetter = make(map[string]rune)
for k, v := range letterToCode {
codeToLetter[v] = k
}
}
|
{
word += string(letter)
}
|
utils.rs
|
use super::writer::Close;
use std::io;
use std::io::Write;
impl<T: Close + ?Sized> Close for &mut T {
#[inline(always)]
fn close(&mut self) -> io::Result<()> {
Close::close(*self)
}
}
impl Close for Vec<u8> {
#[inline(always)]
fn close(&mut self) -> io::Result<()> {
self.flush()
}
}
impl Close for io::Sink {
#[inline(always)]
fn close(&mut self) -> io::Result<()> {
self.flush()
}
}
impl<W: Close + ?Sized> Close for Box<W> {
#[inline(always)]
fn close(&mut self) -> io::Result<()> {
self.as_mut().close()
}
}
impl<W: Write + Close> Close for io::BufWriter<W> {
#[inline]
fn close(&mut self) -> io::Result<()> {
self.flush().and_then(|_| self.get_mut().close())
}
}
impl<W: Write + Close> Close for io::LineWriter<W> {
#[inline]
fn close(&mut self) -> io::Result<()> {
self.flush().and_then(|_| self.get_mut().close())
}
}
/// NopCloser wraps a writer and implements the `Close` trait by
/// performing a `flush` when the `close` method is called. It should
/// only be used to wrap a writer which does not implement the `Close`
/// trait.
///
/// # Examples
///
/// ```
/// use std::{io, io::Write};
/// use sio::{Key, Nonce, Aad, EncWriter, CHACHA20_POLY1305, NopCloser};
///
/// // Load your secret keys from a secure location or derive
/// // them using a secure (password-based) key-derivation-function, like Argon2id.
/// // Obviously, don't use this all-zeros key for anything real.
/// let key: Key<CHACHA20_POLY1305> = Key::new([0; Key::<CHACHA20_POLY1305>::SIZE]);
///
/// // Make sure you use an unique key-nonce combination!
/// // Reusing a nonce value for the same secret key breaks
/// // the security of the encryption algorithm.
/// let nonce = Nonce::new([0; Nonce::<CHACHA20_POLY1305>::SIZE]);
///
/// // You must be able to re-generate this aad to decrypt
/// // the ciphertext again. Usually, it's stored together with
/// // the encrypted data.
/// let aad = Aad::from("Some authenticated but not encrypted data".as_bytes());
///
/// let plaintext = "Some example plaintext".as_bytes();
///
/// let mut ciphertext: Vec<u8> = Vec::default(); // Store the ciphertext in memory.
/// let mut writer = EncWriter::new(
/// NopCloser::wrap(io::stdout()), // Without wrapping STDOUT the code would not compile.
/// &key,
/// nonce,
/// aad,
/// );
///
/// writer.write_all(plaintext).expect("There could be your error handling");
///
/// // Complete the encryption process explicitly.
/// writer.close().expect("There could be your error handling");
/// ```
pub struct NopCloser<W: Write>(W);
impl<W: Write> NopCloser<W> {
/// Wraps a writer.
#[inline(always)]
pub fn wrap(w: W) -> Self {
Self(w)
}
}
impl<W: Write> From<W> for NopCloser<W> {
#[inline(always)]
fn from(w: W) -> Self {
Self::wrap(w)
}
}
impl<W: Write> Write for NopCloser<W> {
#[inline(always)]
fn
|
(&mut self, buf: &[u8]) -> io::Result<usize> {
self.0.write(buf)
}
#[inline(always)]
fn flush(&mut self) -> io::Result<()> {
self.0.flush()
}
}
impl<W: Write> Close for NopCloser<W> {
#[inline(always)]
fn close(&mut self) -> io::Result<()> {
self.flush()
}
}
impl<W: Write> AsRef<W> for NopCloser<W> {
#[inline(always)]
fn as_ref(&self) -> &W {
&self.0
}
}
impl<W: Write> AsMut<W> for NopCloser<W> {
#[inline(always)]
fn as_mut(&mut self) -> &mut W {
&mut self.0
}
}
|
write
|
ardrive_factory.js
|
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.arDriveAnonymousFactory = exports.arDriveFactory = void 0;
const arweave_1 = __importDefault(require("arweave"));
const ardrive_community_oracle_1 = require("./community/ardrive_community_oracle");
const arfsdao_1 = require("./arfs/arfsdao");
const arfsdao_anonymous_1 = require("./arfs/arfsdao_anonymous");
const constants_1 = require("./utils/constants");
const ardrive_1 = require("./ardrive");
const ardrive_anonymous_1 = require("./ardrive_anonymous");
const types_1 = require("./types");
const wallet_dao_1 = require("./wallet_dao");
const arfs_upload_planner_1 = require("./arfs/arfs_upload_planner");
const arfs_tag_settings_1 = require("./arfs/arfs_tag_settings");
const ar_data_price_network_estimator_1 = require("./pricing/ar_data_price_network_estimator");
const defaultArweave = arweave_1.default.init({
host: 'arweave.net',
//host: 'arweave.dev', // Arweave Dev Gateway
port: 443,
protocol: 'https',
timeout: 600000
});
function
|
({ wallet, arweave = defaultArweave, priceEstimator = new ar_data_price_network_estimator_1.ARDataPriceNetworkEstimator(), communityOracle = new ardrive_community_oracle_1.ArDriveCommunityOracle(arweave), dryRun = false, feeMultiple = new types_1.FeeMultiple(1.0), appName = constants_1.DEFAULT_APP_NAME, appVersion = constants_1.DEFAULT_APP_VERSION, walletDao = new wallet_dao_1.WalletDAO(arweave, appName, appVersion), shouldBundle = true, arFSTagSettings = new arfs_tag_settings_1.ArFSTagSettings({ appName, appVersion }), uploadPlanner = new arfs_upload_planner_1.ArFSUploadPlanner({
shouldBundle,
feeMultiple,
priceEstimator,
arFSTagSettings
}), arfsDao = new arfsdao_1.ArFSDAO(wallet, arweave, dryRun, appName, appVersion, arFSTagSettings) }) {
return new ardrive_1.ArDrive(wallet, walletDao, arfsDao, communityOracle, appName, appVersion, priceEstimator, feeMultiple, dryRun, arFSTagSettings, uploadPlanner);
}
exports.arDriveFactory = arDriveFactory;
function arDriveAnonymousFactory({ arweave = defaultArweave }) {
return new ardrive_anonymous_1.ArDriveAnonymous(new arfsdao_anonymous_1.ArFSDAOAnonymous(arweave));
}
exports.arDriveAnonymousFactory = arDriveAnonymousFactory;
|
arDriveFactory
|
8.1.py
|
#definition for music_func goes here
def music_func(music, group, singer):
print("The best kind of music is", music)
print("The best music group is", group)
print("The best lead vocalist is", singer)
def
|
():
music, group, singer = '', '', ''
while music != 'quit':
try:
music, group, singer = input().split(',')
music_func(music, group, singer)
except (EOFError, ValueError):
music, group, singer = 'Classic Rock', 'The Beatles', 'Freddie Mercury'
music_func(music, group, singer)
quit()
main()
|
main
|
qasm_snapshot.py
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
QasmSimulator Integration Tests for Snapshot instructions
"""
import logging
import itertools as it
import numpy as np
from qiskit import QuantumCircuit
from qiskit.compiler import assemble
from qiskit.quantum_info import DensityMatrix, Pauli, Operator
from qiskit.providers.aer import QasmSimulator
from qiskit.providers.aer import AerError
from test.terra.reference.ref_snapshot_state import (
snapshot_state_circuits_deterministic, snapshot_state_counts_deterministic,
snapshot_state_pre_measure_statevector_deterministic,
snapshot_state_post_measure_statevector_deterministic,
snapshot_state_circuits_nondeterministic,
|
snapshot_state_post_measure_statevector_nondeterministic)
from test.terra.reference.ref_snapshot_probabilities import (
snapshot_probabilities_circuits, snapshot_probabilities_counts,
snapshot_probabilities_labels_qubits,
snapshot_probabilities_post_meas_probs,
snapshot_probabilities_pre_meas_probs)
from test.terra.reference.ref_snapshot_expval import (
snapshot_expval_circuits, snapshot_expval_counts, snapshot_expval_labels,
snapshot_expval_post_meas_values, snapshot_expval_pre_meas_values)
class QasmSnapshotStatevectorTests:
"""QasmSimulator snapshot statevector tests."""
SIMULATOR = QasmSimulator()
SUPPORTED_QASM_METHODS = [
'automatic', 'statevector', 'statevector_gpu', 'statevector_thrust',
'matrix_product_state'
]
BACKEND_OPTS = {}
def statevector_snapshots(self, data, label):
"""Format snapshots as list of Numpy arrays"""
snaps = data.get("snapshots", {}).get("statevector", {}).get(label, [])
statevecs = []
for snap in snaps:
self.assertIsInstance(snap, np.ndarray)
statevecs.append(snap)
return statevecs
def test_snapshot_statevector_pre_measure_det(self):
"""Test snapshot statevector before deterministic final measurement"""
shots = 10
label = "snap"
counts_targets = snapshot_state_counts_deterministic(shots)
statevec_targets = snapshot_state_pre_measure_statevector_deterministic(
)
circuits = snapshot_state_circuits_deterministic(label,
'statevector',
post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStatevectorTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result, circuits, counts_targets, delta=0)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.statevector_snapshots(data, label)
self.assertTrue(len(snaps), 1)
target = statevec_targets[j]
value = snaps[0]
self.assertTrue(np.allclose(value, target))
def test_snapshot_statevector_pre_measure_nondet(self):
"""Test snapshot statevector before non-deterministic final measurement"""
shots = 100
label = "snap"
counts_targets = snapshot_state_counts_nondeterministic(shots)
statevec_targets = snapshot_state_pre_measure_statevector_nondeterministic(
)
circuits = snapshot_state_circuits_nondeterministic(label,
'statevector',
post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStatevectorTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.2 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.statevector_snapshots(data, label)
self.assertTrue(len(snaps), 1)
target = statevec_targets[j]
value = snaps[0]
self.assertTrue(np.allclose(value, target))
def test_snapshot_statevector_post_measure_det(self):
"""Test snapshot statevector after deterministic final measurement"""
shots = 10
label = "snap"
counts_targets = snapshot_state_counts_deterministic(shots)
statevec_targets = snapshot_state_post_measure_statevector_deterministic(
)
circuits = snapshot_state_circuits_deterministic(label,
'statevector',
post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, memory=True, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStatevectorTests.SUPPORTED_QASM_METHODS:
logging.getLogger().setLevel(logging.CRITICAL)
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result, circuits, counts_targets, delta=0)
# Check snapshots
for i, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.statevector_snapshots(data, label)
for j, mem in enumerate(data['memory']):
target = statevec_targets[i].get(mem)
self.assertTrue(np.allclose(snaps[j], target))
def test_snapshot_statevector_post_measure_nondet(self):
"""Test snapshot statevector after non-deterministic final measurement"""
shots = 100
label = "snap"
counts_targets = snapshot_state_counts_nondeterministic(shots)
statevec_targets = snapshot_state_post_measure_statevector_nondeterministic(
)
circuits = snapshot_state_circuits_nondeterministic(label,
'statevector',
post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, memory=True, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStatevectorTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.2 * shots)
# Check snapshots
for i, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.statevector_snapshots(data, label)
for j, mem in enumerate(data['memory']):
target = statevec_targets[i].get(mem)
self.assertTrue(np.allclose(snaps[j], target))
class QasmSnapshotStabilizerTests:
"""QasmSimulator method snapshot stabilizer tests."""
SIMULATOR = QasmSimulator()
SUPPORTED_QASM_METHODS = ['automatic', 'stabilizer']
BACKEND_OPTS = {}
@staticmethod
def stabilizer_snapshots(data, label):
"""Get stabilizer snapshots"""
return data.get("snapshots", {}).get("stabilizer", {}).get(label, [])
@staticmethod
def stabilizes_statevector(stabilizer, statevector):
"""Return True if two stabilizer states are equal."""
# Get stabilizer and destabilizers and convert to sets
for stab in stabilizer:
if stab[0] == '-':
pauli_mat = -1 * Pauli.from_label(stab[1:]).to_matrix()
else:
pauli_mat = Pauli.from_label(stab).to_matrix()
val = statevector.conj().dot(pauli_mat.dot(statevector))
if not np.isclose(val, 1):
return False
return True
def test_snapshot_stabilizer_pre_measure_det(self):
"""Test snapshot stabilizer before deterministic final measurement"""
shots = 10
label = "snap"
counts_targets = snapshot_state_counts_deterministic(shots)
statevec_targets = snapshot_state_pre_measure_statevector_deterministic(
)
circuits = snapshot_state_circuits_deterministic(label,
'stabilizer',
post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStabilizerTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result, circuits, counts_targets, delta=0)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.stabilizer_snapshots(data, label)
self.assertEqual(len(snaps), 1)
statevec = statevec_targets[j]
stabilizer = snaps[0]
self.assertTrue(
self.stabilizes_statevector(stabilizer, statevec))
def test_snapshot_stabilizer_pre_measure_nondet(self):
"""Test snapshot stabilizer before non-deterministic final measurement"""
shots = 100
label = "snap"
counts_targets = snapshot_state_counts_nondeterministic(shots)
statevec_targets = snapshot_state_pre_measure_statevector_nondeterministic(
)
circuits = snapshot_state_circuits_nondeterministic(label,
'stabilizer',
post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStabilizerTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.2 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.stabilizer_snapshots(data, label)
self.assertEqual(len(snaps), 1)
statevec = statevec_targets[j]
stabilizer = snaps[0]
self.assertTrue(
self.stabilizes_statevector(stabilizer, statevec))
def test_snapshot_stabilizer_post_measure_det(self):
"""Test snapshot stabilizer after deterministic final measurement"""
shots = 10
label = "snap"
counts_targets = snapshot_state_counts_deterministic(shots)
statevec_targets = snapshot_state_post_measure_statevector_deterministic(
)
circuits = snapshot_state_circuits_deterministic(label,
'stabilizer',
post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, memory=True, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStabilizerTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result, circuits, counts_targets, delta=0)
# Check snapshots
for i, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.stabilizer_snapshots(data, label)
for j, mem in enumerate(data['memory']):
statevec = statevec_targets[i].get(mem)
stabilizer = snaps[j]
self.assertTrue(
self.stabilizes_statevector(stabilizer, statevec))
def test_snapshot_stabilizer_post_measure_nondet(self):
"""Test snapshot stabilizer after non-deterministic final measurement"""
shots = 100
label = "snap"
counts_targets = snapshot_state_counts_nondeterministic(shots)
statevec_targets = snapshot_state_post_measure_statevector_nondeterministic(
)
circuits = snapshot_state_circuits_nondeterministic(label,
'stabilizer',
post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, memory=True, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotStabilizerTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.2 * shots)
# Check snapshots
for i, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.stabilizer_snapshots(data, label)
for j, mem in enumerate(data['memory']):
statevec = statevec_targets[i].get(mem)
stabilizer = snaps[j]
self.assertTrue(
self.stabilizes_statevector(stabilizer, statevec))
class QasmSnapshotDensityMatrixTests:
"""QasmSimulator snapshot density matrix tests."""
SIMULATOR = QasmSimulator()
SUPPORTED_QASM_METHODS = [
'automatic', 'density_matrix', 'density_matrix_gpu',
'density_matrix_thrust'
]
BACKEND_OPTS = {}
def density_snapshots(self, data, label):
"""Format snapshots as list of Numpy arrays"""
# Check snapshot entry exists in data
snaps = data.get("snapshots", {}).get("density_matrix",
{}).get(label, [])
# Convert nested lists to numpy arrays
output = {}
for snap_dict in snaps:
memory = snap_dict['memory']
self.assertIsInstance(snap_dict['value'], np.ndarray)
output[memory] = snap_dict['value']
return output
def test_snapshot_density_matrix_pre_measure_det(self):
"""Test snapshot density matrix before deterministic final measurement"""
shots = 10
label = "snap"
counts_targets = snapshot_state_counts_deterministic(shots)
statevec_targets = snapshot_state_pre_measure_statevector_deterministic(
)
circuits = snapshot_state_circuits_deterministic(label,
'density_matrix',
post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotDensityMatrixTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result, circuits, counts_targets, delta=0)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.density_snapshots(data, label)
self.assertTrue(len(snaps), 1)
target = np.outer(statevec_targets[j],
statevec_targets[j].conj())
# Pre-measurement all memory bits should be 0
value = snaps.get('0x0')
self.assertTrue(np.allclose(value, target))
def test_snapshot_density_matrix_pre_measure_nondet(self):
"""Test snapshot density matrix before non-deterministic final measurement"""
shots = 100
label = "snap"
counts_targets = snapshot_state_counts_nondeterministic(shots)
statevec_targets = snapshot_state_pre_measure_statevector_nondeterministic(
)
circuits = snapshot_state_circuits_nondeterministic(label,
'density_matrix',
post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotDensityMatrixTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.2 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.density_snapshots(data, label)
self.assertTrue(len(snaps), 1)
target = np.outer(statevec_targets[j],
statevec_targets[j].conj())
value = snaps.get('0x0')
self.assertTrue(np.allclose(value, target))
def test_snapshot_density_matrix_post_measure_det(self):
"""Test snapshot density matrix after deterministic final measurement"""
shots = 10
label = "snap"
counts_targets = snapshot_state_counts_deterministic(shots)
statevec_targets = snapshot_state_post_measure_statevector_deterministic(
)
circuits = snapshot_state_circuits_deterministic(label,
'density_matrix',
post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, memory=True, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotDensityMatrixTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result, circuits, counts_targets, delta=0)
# Check snapshots
for i, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.density_snapshots(data, label)
for j, mem in enumerate(data['memory']):
target = statevec_targets[i].get(mem)
target = np.outer(target, target.conj())
value = snaps.get(mem)
self.assertTrue(np.allclose(value, target))
def test_snapshot_density_matrix_post_measure_nondet(self):
"""Test snapshot density matrix after non-deterministic final measurement"""
shots = 100
label = "snap"
counts_targets = snapshot_state_counts_nondeterministic(shots)
statevec_targets = snapshot_state_post_measure_statevector_nondeterministic(
)
circuits = snapshot_state_circuits_nondeterministic(label,
'density_matrix',
post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, memory=True, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotDensityMatrixTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.2 * shots)
# Check snapshots
for i, circuit in enumerate(circuits):
data = result.data(circuit)
snaps = self.density_snapshots(data, label)
for j, mem in enumerate(data['memory']):
target = statevec_targets[i].get(mem)
target = np.outer(target, target.conj())
value = snaps.get(mem)
self.assertTrue(np.allclose(value, target))
class QasmSnapshotProbabilitiesTests:
"""QasmSimulator snapshot probabilities tests."""
SIMULATOR = QasmSimulator()
SUPPORTED_QASM_METHODS = [
'automatic',
'statevector',
'statevector_gpu',
'statevector_thrust',
'stabilizer',
'density_matrix',
'density_matrix_gpu',
'density_matrix_thrust',
'matrix_product_state',
]
BACKEND_OPTS = {}
@staticmethod
def probability_snapshots(data, labels):
"""Format snapshots as nested dicts"""
# Check snapshot entry exists in data
output = {}
for label in labels:
snaps = data.get("snapshots", {}).get("probabilities",
{}).get(label, [])
output[label] = {
snap_dict['memory']: snap_dict['value']
for snap_dict in snaps
}
return output
def test_snapshot_probabilities_pre_measure(self):
"""Test snapshot probabilities before final measurement"""
shots = 1000
labels = list(snapshot_probabilities_labels_qubits().keys())
counts_targets = snapshot_probabilities_counts(shots)
prob_targets = snapshot_probabilities_pre_meas_probs()
circuits = snapshot_probabilities_circuits(post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotProbabilitiesTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.1 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
all_snapshots = self.probability_snapshots(data, labels)
for label in labels:
snaps = all_snapshots.get(label, {})
self.assertTrue(len(snaps), 1)
for memory, value in snaps.items():
target = prob_targets[j].get(label, {}).get(memory, {})
self.assertDictAlmostEqual(value, target, delta=1e-7)
def test_snapshot_probabilities_post_measure(self):
"""Test snapshot probabilities after final measurement"""
shots = 1000
labels = list(snapshot_probabilities_labels_qubits().keys())
counts_targets = snapshot_probabilities_counts(shots)
prob_targets = snapshot_probabilities_post_meas_probs()
circuits = snapshot_probabilities_circuits(post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotProbabilitiesTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.1 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
all_snapshots = self.probability_snapshots(data, labels)
for label in labels:
snaps = all_snapshots.get(label, {})
for memory, value in snaps.items():
target = prob_targets[j].get(label, {}).get(memory, {})
self.assertDictAlmostEqual(value, target, delta=1e-7)
class QasmSnapshotExpValPauliTests:
"""QasmSimulator snapshot pauli expectation value tests."""
SIMULATOR = QasmSimulator()
SUPPORTED_QASM_METHODS = [
'automatic', 'statevector', 'statevector_gpu', 'statevector_thrust',
'density_matrix', 'density_matrix_gpu', 'density_matrix_thrust',
'matrix_product_state', 'stabilizer'
]
BACKEND_OPTS = {}
@staticmethod
def expval_snapshots(data, labels):
"""Format snapshots as nested dicts"""
# Check snapshot entry exists in data
output = {}
for label in labels:
snaps = data.get("snapshots", {}).get("expectation_value",
{}).get(label, [])
# Convert list into dict
inner = {}
for snap_dict in snaps:
val = snap_dict['value']
inner[snap_dict['memory']] = val
output[label] = inner
return output
def test_snapshot_expval_pauli_pre_measure(self):
"""Test snapshot expectation value (pauli) before final measurement"""
shots = 1000
labels = snapshot_expval_labels()
counts_targets = snapshot_expval_counts(shots)
value_targets = snapshot_expval_pre_meas_values()
circuits = snapshot_expval_circuits(pauli=True, post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotExpValPauliTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.1 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
all_snapshots = self.expval_snapshots(data, labels)
for label in labels:
snaps = all_snapshots.get(label, {})
self.assertTrue(len(snaps), 1)
for memory, value in snaps.items():
target = value_targets[j].get(label,
{}).get(memory, {})
self.assertAlmostEqual(value, target, delta=1e-7)
def test_snapshot_expval_pauli_post_measure(self):
"""Test snapshot expectation value (pauli) after final measurement"""
shots = 1000
labels = snapshot_expval_labels()
counts_targets = snapshot_expval_counts(shots)
value_targets = snapshot_expval_post_meas_values()
circuits = snapshot_expval_circuits(pauli=True, post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotExpValPauliTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.1 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
all_snapshots = self.expval_snapshots(data, labels)
for label in labels:
snaps = all_snapshots.get(label, {})
self.assertTrue(len(snaps), 1)
for memory, value in snaps.items():
target = value_targets[j].get(label,
{}).get(memory, {})
self.assertAlmostEqual(value, target, delta=1e-7)
class QasmSnapshotExpvalPauliNCTests:
"""QasmSimulator snapshot pauli expectation value tests on random states."""
SIMULATOR = QasmSimulator()
SUPPORTED_QASM_METHODS = [
'automatic', 'statevector', 'statevector_gpu', 'statevector_thrust',
'density_matrix', 'density_matrix_gpu', 'density_matrix_thrust',
'matrix_product_state',
]
BACKEND_OPTS = {}
def general_test(self, pauli, num_qubits=None, seed=None):
"""General test case"""
pauli_qubits = list(range(len(pauli)))
if num_qubits is None:
num_qubits = len(pauli_qubits)
# Prepare random N-qubit product input state
# from seed
rng = np.random.default_rng(seed)
params = rng.uniform(-1, 1, size=(num_qubits, 3))
init_circ = QuantumCircuit(num_qubits)
for i, par in enumerate(params):
init_circ.u3(*par, i)
# Compute the target expectation value
rho = DensityMatrix.from_instruction(init_circ)
op = Operator.from_label(pauli)
target = np.trace(Operator(rho).compose(op, pauli_qubits).data)
# Simulate expectation value
qc = init_circ.copy()
qc.snapshot_expectation_value('final', [(1, pauli)], pauli_qubits)
qobj = assemble(qc)
result = self.SIMULATOR.run(
qobj, backend_options=self.BACKEND_OPTS).result()
self.assertTrue(getattr(result, 'success', False))
snapshots = result.data(0).get('snapshots', {})
self.assertIn('expectation_value', snapshots)
self.assertIn('final', snapshots['expectation_value'])
expval = snapshots.get('expectation_value', {})['final'][0]['value']
self.assertAlmostEqual(expval, target)
def test_pauli1(self):
"""Test all 1-qubit Pauli snapshots."""
seed = 100
for tup in ['I', 'X', 'Y', 'Z']:
pauli = ''.join(reversed(tup))
with self.subTest(msg='Pauli {}'.format(pauli)):
self.general_test(pauli, num_qubits=3, seed=seed)
def test_pauli2(self):
"""Test all 2-qubit Pauli snapshots."""
seed = 100
for tup in it.product(['I', 'X', 'Y', 'Z'], repeat=2):
pauli = ''.join(reversed(tup))
with self.subTest(msg='Pauli {}'.format(pauli)):
self.general_test(pauli, num_qubits=3, seed=seed)
def test_pauli3(self):
"""Test all 3-qubit Pauli snapshots."""
seed = 100
for tup in it.product(['I', 'X', 'Y', 'Z'], repeat=3):
pauli = ''.join(reversed(tup))
with self.subTest(msg='Pauli {}'.format(pauli)):
self.general_test(pauli, num_qubits=3, seed=seed)
class QasmSnapshotExpValMatrixTests:
"""QasmSimulator snapshot pauli expectation value tests."""
SIMULATOR = QasmSimulator()
SUPPORTED_QASM_METHODS = [
'automatic', 'statevector', 'statevector_gpu', 'statevector_thrust',
'matrix_product_state'
]
BACKEND_OPTS = {}
@staticmethod
def expval_snapshots(data, labels):
"""Format snapshots as nested dicts"""
# Check snapshot entry exists in data
output = {}
for label in labels:
snaps = data.get("snapshots", {}).get("expectation_value",
{}).get(label, [])
# Convert list into dict
inner = {}
for snap_dict in snaps:
inner[snap_dict['memory']] = snap_dict['value']
output[label] = inner
return output
def test_snapshot_expval_matrix_pre_measure(self):
"""Test snapshot expectation value (matrix) before final measurement"""
shots = 1000
labels = snapshot_expval_labels()
counts_targets = snapshot_expval_counts(shots)
value_targets = snapshot_expval_pre_meas_values()
circuits = snapshot_expval_circuits(pauli=False, post_measure=False)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotExpValMatrixTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.1 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
all_snapshots = self.expval_snapshots(data, labels)
for label in labels:
snaps = all_snapshots.get(label, {})
self.assertTrue(len(snaps), 1)
for memory, value in snaps.items():
target = value_targets[j].get(label,
{}).get(memory, {})
self.assertAlmostEqual(value, target, delta=1e-7)
def test_snapshot_expval_matrix_post_measure(self):
"""Test snapshot expectation value (matrix) after final measurement"""
shots = 1000
labels = snapshot_expval_labels()
counts_targets = snapshot_expval_counts(shots)
value_targets = snapshot_expval_post_meas_values()
circuits = snapshot_expval_circuits(pauli=False, post_measure=True)
qobj = assemble(circuits, self.SIMULATOR, shots=shots)
job = self.SIMULATOR.run(qobj, backend_options=self.BACKEND_OPTS)
result = job.result()
success = getattr(result, 'success', False)
method = self.BACKEND_OPTS.get('method', 'automatic')
if method not in QasmSnapshotExpValMatrixTests.SUPPORTED_QASM_METHODS:
self.assertFalse(success)
else:
self.assertTrue(success)
self.compare_counts(result,
circuits,
counts_targets,
delta=0.1 * shots)
# Check snapshots
for j, circuit in enumerate(circuits):
data = result.data(circuit)
all_snapshots = self.expval_snapshots(data, labels)
for label in labels:
snaps = all_snapshots.get(label, {})
self.assertTrue(len(snaps), 1)
for memory, value in snaps.items():
target = value_targets[j].get(label,
{}).get(memory, {})
self.assertAlmostEqual(value, target, delta=1e-7)
|
snapshot_state_counts_nondeterministic,
snapshot_state_pre_measure_statevector_nondeterministic,
|
App.js
|
import React from "react";
import Developer from "./components/Developer.js";
function
|
() {
return (
<div>
<header>
<h1>Ma première app React</h1>
</header>
<h2>Développeurs :</h2>
<Developer />
</div>
);
}
export default App;
|
App
|
views.py
|
from global_decorators import render_to
from accounts.models import UserProfile, FLAG_ADMIN, FLAG_OP, FLAG_VIP
from admin.decorators import admin_only
from django.contrib import messages
from django.http import HttpResponseRedirect
from admin.users.forms import AdminProfileForm
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
@admin_only
@render_to('accounts_list.html')
def index(request):
title = "Accounts"
icon = "user"
accounts = UserProfile.objects.all()
return locals()
@admin_only
@render_to("accounts_edit.html")
def edit(request, user_id):
try:
profile = UserProfile.objects.get(user_id=user_id)
except UserProfile.DoesNotExist:
messages.error(request, "Unable to find a user with ID %s" % user_id)
return HttpResponseRedirect('/admin/users/')
title = "Edit %s" % profile.user.username
icon = "users"
form = AdminProfileForm(instance=profile, initial={'username': profile.user.username,
'first_name': profile.user.first_name,
'last_name': profile.user.last_name,
'admin': profile.admin,
'op': profile.operator,
'vip': profile.vip})
form.helper.form_action = "/admin/users/%s/save/" % user_id
return locals()
@admin_only
@render_to("accounts_delete.html")
def delete(request, user_id):
try:
profile = UserProfile.objects.get(user_id=user_id)
except UserProfile.DoesNotExist:
messages.error(request, "Unable to find a user with ID %s" % user_id)
return HttpResponseRedirect('/admin/users/')
title = "Edit %s" % profile.user.username
icon = "users"
if request.POST.get('axn') == 'delete':
# do it!
profile.user.delete()
messages.success(request, "The user %s has been deleted!" % profile.user.username)
return HttpResponseRedirect("/admin/users/")
elif request.POST.get('axn') == 'cancel':
return HttpResponseRedirect("/admin/users/")
return locals()
@admin_only
@require_POST
@render_to("accounts_edit.html")
def save(request, user_id):
try:
profile = UserProfile.objects.get(user_id=user_id)
except UserProfile.DoesNotExist:
messages.error(request, "Unable to find a user with ID %s" % user_id)
return HttpResponseRedirect('/admin/users/')
title = "Edit %s" % profile.user.username
icon = "users"
if request.POST.get('save_changes'):
form = AdminProfileForm(data=request.POST, files=request.FILES, instance=profile)
form.helper.form_action = "/admin/users/%s/save/" % user_id
if form.is_valid():
if request.POST.get('username', profile.user.username) != profile.user.username:
# change username
try:
|
except User.DoesNotExist:
profile.user.username = request.POST.get('username')
profile.user.save()
form.save()
# flags
if request.POST.get('admin', 'off') == 'on' and not profile.admin:
profile.set_flag(FLAG_ADMIN)
if request.POST.get('op', 'off') == 'on' and not profile.operator:
profile.set_flag(FLAG_OP)
if request.POST.get('vip', 'off') == 'on' and not profile.vip:
profile.set_flag(FLAG_VIP)
profile.save()
# first/last name
profile.user.first_name = request.POST.get('first_name', profile.user.first_name)
profile.user.last_name = request.POST.get('last_name', profile.user.last_name)
profile.user.save()
messages.success(request, 'The profile has been updated!')
else:
messages.error(request, 'Unable to save changes to the profile!')
return locals()
return HttpResponseRedirect("/admin/users/")
|
check = User.objects.get(username=request.POST.get('username'))
messages.error(request, 'That username has already been taken!')
return locals()
|
get_enum_attribute_action.py
|
# stdlib
from typing import Dict
from typing import Optional
# third party
from google.protobuf.reflection import GeneratedProtocolMessageType
from nacl.signing import VerifyKey
# syft absolute
import syft as sy
# relative
from ..... import lib
from .....proto.core.node.common.action.get_enum_attribute_pb2 import (
GetEnumAttributeAction as GetEnumAttributeAction_PB,
)
from ....common.serde.serializable import serializable
from ....common.uid import UID
from ....io.address import Address
from ....store.storeable_object import StorableObject
from ...abstract.node import AbstractNode
from .common import ImmediateActionWithoutReply
from .run_class_method_action import RunClassMethodAction
@serializable()
class EnumAttributeAction(ImmediateActionWithoutReply):
def __init__(
self,
path: str,
id_at_location: UID,
address: Address,
msg_id: Optional[UID] = None,
):
super().__init__(address, msg_id=msg_id)
self.id_at_location = id_at_location
self.path = path
def intersect_keys(
self,
left: Dict[VerifyKey, Optional[UID]],
right: Dict[VerifyKey, Optional[UID]],
) -> Dict[VerifyKey, Optional[UID]]:
return RunClassMethodAction.intersect_keys(left, right)
def
|
(self, node: AbstractNode, verify_key: VerifyKey) -> None:
enum_attribute = node.lib_ast.query(self.path)
result = enum_attribute.solve_get_enum_attribute().value
result = lib.python.primitive_factory.PrimitiveFactory.generate_primitive(
value=result, id=self.id_at_location
)
result = StorableObject(
id=self.id_at_location,
data=result,
)
node.store[self.id_at_location] = result
def _object2proto(self) -> GetEnumAttributeAction_PB:
"""Returns a protobuf serialization of self.
As a requirement of all objects which inherit from Serializable,
this method transforms the current object into the corresponding
Protobuf object so that it can be further serialized.
:return: returns a protobuf object
:rtype: GetOrSetPropertyAction_PB
.. note::
This method is purely an internal method. Please use serialize(object) or one of
the other public serialization methods if you wish to serialize an
object.
"""
return GetEnumAttributeAction_PB(
path=self.path,
id_at_location=sy.serialize(self.id_at_location),
address=sy.serialize(self.address),
msg_id=sy.serialize(self.id),
)
@staticmethod
def _proto2object(
proto: GetEnumAttributeAction_PB,
) -> "EnumAttributeAction":
"""Creates a ObjectWithID from a protobuf
As a requirement of all objects which inherit from Serializable,
this method transforms a protobuf object into an instance of this class.
:return: returns an instance of GetOrSetPropertyAction
:rtype: GetOrSetPropertyAction
.. note::
This method is purely an internal method. Please use syft.deserialize()
if you wish to deserialize an object.
"""
return EnumAttributeAction(
path=proto.path,
id_at_location=sy.deserialize(blob=proto.id_at_location),
address=sy.deserialize(blob=proto.address),
msg_id=sy.deserialize(blob=proto.msg_id),
)
@staticmethod
def get_protobuf_schema() -> GeneratedProtocolMessageType:
"""Return the type of protobuf object which stores a class of this type
As a part of serialization and deserialization, we need the ability to
lookup the protobuf object type directly from the object type. This
static method allows us to do this.
Importantly, this method is also used to create the reverse lookup ability within
the metaclass of Serializable. In the metaclass, it calls this method and then
it takes whatever type is returned from this method and adds an attribute to it
with the type of this class attached to it. See the MetaSerializable class for details.
:return: the type of protobuf object which corresponds to this class.
:rtype: GeneratedProtocolMessageType
"""
return GetEnumAttributeAction_PB
|
execute_action
|
builder.rs
|
use std::any::Any;
use std::cell::{Cell, RefCell};
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::env;
use std::fmt::Debug;
use std::fs;
use std::hash::Hash;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::{Duration, Instant};
use crate::cache::{Cache, Interned, INTERNER};
use crate::check;
use crate::compile;
use crate::dist;
use crate::doc;
use crate::flags::Subcommand;
use crate::install;
use crate::native;
use crate::test;
use crate::tool;
use crate::util::{self, add_lib_path, exe, libdir};
use crate::{Build, DocTests, Mode, GitRepo};
pub use crate::Compiler;
use petgraph::graph::NodeIndex;
use petgraph::Graph;
pub struct Builder<'a> {
pub build: &'a Build,
pub top_stage: u32,
pub kind: Kind,
cache: Cache,
stack: RefCell<Vec<Box<dyn Any>>>,
time_spent_on_dependencies: Cell<Duration>,
pub paths: Vec<PathBuf>,
graph_nodes: RefCell<HashMap<String, NodeIndex>>,
graph: RefCell<Graph<String, bool>>,
parent: Cell<Option<NodeIndex>>,
}
impl<'a> Deref for Builder<'a> {
type Target = Build;
fn deref(&self) -> &Self::Target {
self.build
}
}
pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
/// `PathBuf` when directories are created or to return a `Compiler` once
/// it's been assembled.
type Output: Clone;
const DEFAULT: bool = false;
/// Run this rule for all hosts without cross compiling.
const ONLY_HOSTS: bool = false;
/// Primary function to execute this rule. Can call `builder.ensure(...)`
/// with other steps to run those.
fn run(self, builder: &Builder) -> Self::Output;
/// When bootstrap is passed a set of paths, this controls whether this rule
/// will execute. However, it does not get called in a "default" context
/// when we are not passed any paths; in that case, make_run is called
/// directly.
fn should_run(run: ShouldRun) -> ShouldRun;
/// Build up a "root" rule, either as a default rule or from a path passed
/// to us.
///
/// When path is `None`, we are executing in a context where no paths were
/// passed. When `./x.py build` is run, for example, this rule could get
/// called if it is in the correct list below with a path of `None`.
fn make_run(_run: RunConfig) {
// It is reasonable to not have an implementation of make_run for rules
// who do not want to get called from the root context. This means that
// they are likely dependencies (e.g., sysroot creation) or similar, and
// as such calling them from ./x.py isn't logical.
unimplemented!()
}
}
pub struct RunConfig<'a> {
pub builder: &'a Builder<'a>,
pub host: Interned<String>,
pub target: Interned<String>,
pub path: PathBuf,
}
struct StepDescription {
default: bool,
only_hosts: bool,
should_run: fn(ShouldRun) -> ShouldRun,
make_run: fn(RunConfig),
name: &'static str,
}
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)]
pub enum PathSet {
Set(BTreeSet<PathBuf>),
Suite(PathBuf),
}
impl PathSet {
fn empty() -> PathSet {
PathSet::Set(BTreeSet::new())
}
fn one<P: Into<PathBuf>>(path: P) -> PathSet {
let mut set = BTreeSet::new();
set.insert(path.into());
PathSet::Set(set)
}
fn has(&self, needle: &Path) -> bool {
match self {
PathSet::Set(set) => set.iter().any(|p| p.ends_with(needle)),
PathSet::Suite(suite) => suite.ends_with(needle),
}
}
fn path(&self, builder: &Builder) -> PathBuf {
match self {
PathSet::Set(set) => set
.iter()
.next()
.unwrap_or(&builder.build.src)
.to_path_buf(),
PathSet::Suite(path) => PathBuf::from(path),
}
}
}
impl StepDescription {
fn from<S: Step>() -> StepDescription {
StepDescription {
default: S::DEFAULT,
only_hosts: S::ONLY_HOSTS,
should_run: S::should_run,
make_run: S::make_run,
name: unsafe { ::std::intrinsics::type_name::<S>() },
}
}
fn maybe_run(&self, builder: &Builder, pathset: &PathSet) {
if builder.config.exclude.iter().any(|e| pathset.has(e)) {
eprintln!("Skipping {:?} because it is excluded", pathset);
return;
} else if !builder.config.exclude.is_empty() {
eprintln!(
"{:?} not skipped for {:?} -- not in {:?}",
pathset, self.name, builder.config.exclude
);
}
let hosts = &builder.hosts;
// Determine the targets participating in this rule.
let targets = if self.only_hosts {
if !builder.config.run_host_only {
return; // don't run anything
} else {
&builder.hosts
}
} else {
&builder.targets
};
for host in hosts {
for target in targets {
let run = RunConfig {
builder,
path: pathset.path(builder),
host: *host,
target: *target,
};
(self.make_run)(run);
}
}
}
fn run(v: &[StepDescription], builder: &Builder, paths: &[PathBuf]) {
let should_runs = v
.iter()
.map(|desc| (desc.should_run)(ShouldRun::new(builder)))
.collect::<Vec<_>>();
// sanity checks on rules
for (desc, should_run) in v.iter().zip(&should_runs) {
assert!(
!should_run.paths.is_empty(),
"{:?} should have at least one pathset",
desc.name
);
}
if paths.is_empty() {
for (desc, should_run) in v.iter().zip(should_runs) {
if desc.default && should_run.is_really_default {
for pathset in &should_run.paths {
desc.maybe_run(builder, pathset);
}
}
}
} else {
for path in paths {
// strip CurDir prefix if present
let path = match path.strip_prefix(".") {
Ok(p) => p,
Err(_) => path,
};
let mut attempted_run = false;
for (desc, should_run) in v.iter().zip(&should_runs) {
if let Some(suite) = should_run.is_suite_path(path) {
attempted_run = true;
desc.maybe_run(builder, suite);
} else if let Some(pathset) = should_run.pathset_for_path(path) {
attempted_run = true;
desc.maybe_run(builder, pathset);
}
}
if !attempted_run {
panic!("Error: no rules matched {}.", path.display());
}
}
}
}
}
#[derive(Clone)]
pub struct ShouldRun<'a> {
pub builder: &'a Builder<'a>,
// use a BTreeSet to maintain sort order
paths: BTreeSet<PathSet>,
// If this is a default rule, this is an additional constraint placed on
// its run. Generally something like compiler docs being enabled.
is_really_default: bool,
}
impl<'a> ShouldRun<'a> {
fn new(builder: &'a Builder) -> ShouldRun<'a> {
ShouldRun {
builder,
paths: BTreeSet::new(),
is_really_default: true, // by default no additional conditions
}
}
pub fn default_condition(mut self, cond: bool) -> Self {
self.is_really_default = cond;
self
}
// Unlike `krate` this will create just one pathset. As such, it probably shouldn't actually
// ever be used, but as we transition to having all rules properly handle passing krate(...) by
// actually doing something different for every crate passed.
pub fn all_krates(mut self, name: &str) -> Self {
let mut set = BTreeSet::new();
for krate in self.builder.in_tree_crates(name) {
set.insert(PathBuf::from(&krate.path));
}
self.paths.insert(PathSet::Set(set));
self
}
pub fn krate(mut self, name: &str) -> Self {
for krate in self.builder.in_tree_crates(name) {
self.paths.insert(PathSet::one(&krate.path));
}
self
}
// single, non-aliased path
pub fn path(self, path: &str) -> Self {
self.paths(&[path])
}
// multiple aliases for the same job
pub fn paths(mut self, paths: &[&str]) -> Self {
self.paths
.insert(PathSet::Set(paths.iter().map(PathBuf::from).collect()));
self
}
pub fn is_suite_path(&self, path: &Path) -> Option<&PathSet> {
self.paths.iter().find(|pathset| match pathset {
PathSet::Suite(p) => path.starts_with(p),
PathSet::Set(_) => false,
})
}
pub fn suite_path(mut self, suite: &str) -> Self {
self.paths.insert(PathSet::Suite(PathBuf::from(suite)));
self
}
// allows being more explicit about why should_run in Step returns the value passed to it
pub fn never(mut self) -> ShouldRun<'a> {
self.paths.insert(PathSet::empty());
self
}
fn pathset_for_path(&self, path: &Path) -> Option<&PathSet> {
self.paths.iter().find(|pathset| pathset.has(path))
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Kind {
Build,
Check,
Test,
Bench,
Dist,
Doc,
Install,
}
impl<'a> Builder<'a> {
fn get_step_descriptions(kind: Kind) -> Vec<StepDescription> {
macro_rules! describe {
($($rule:ty),+ $(,)*) => {{
vec![$(StepDescription::from::<$rule>()),+]
}};
}
match kind {
Kind::Build => describe!(
compile::Std,
compile::Test,
compile::Rustc,
compile::CodegenBackend,
compile::StartupObjects,
tool::BuildManifest,
tool::Rustbook,
tool::ErrorIndex,
tool::UnstableBookGen,
tool::Tidy,
tool::Linkchecker,
tool::CargoTest,
tool::Compiletest,
tool::RemoteTestServer,
tool::RemoteTestClient,
tool::RustInstaller,
tool::Cargo,
tool::Rls,
tool::Rustdoc,
tool::Clippy,
native::Llvm,
tool::Rustfmt,
tool::Miri,
native::Lld
),
Kind::Check => describe!(
check::Std,
check::Test,
check::Rustc,
check::CodegenBackend,
check::Rustdoc
),
Kind::Test => describe!(
test::Tidy,
test::Ui,
test::RunPass,
test::CompileFail,
test::RunFail,
test::RunPassValgrind,
test::MirOpt,
test::Codegen,
test::CodegenUnits,
test::Incremental,
test::Debuginfo,
test::UiFullDeps,
test::RunPassFullDeps,
test::Rustdoc,
test::Pretty,
test::RunPassPretty,
test::RunFailPretty,
test::RunPassValgrindPretty,
test::Crate,
test::CrateLibrustc,
test::CrateRustdoc,
test::Linkcheck,
test::Cargotest,
test::Cargo,
test::Rls,
test::ErrorIndex,
test::Distcheck,
test::RunMakeFullDeps,
test::Nomicon,
test::Reference,
test::RustdocBook,
test::RustByExample,
test::TheBook,
test::UnstableBook,
test::RustcBook,
test::Rustfmt,
test::Miri,
test::Clippy,
test::CompiletestTest,
test::RustdocJS,
test::RustdocTheme,
// Run bootstrap close to the end as it's unlikely to fail
test::Bootstrap,
// Run run-make last, since these won't pass without make on Windows
test::RunMake,
test::RustdocUi
),
Kind::Bench => describe!(test::Crate, test::CrateLibrustc),
Kind::Doc => describe!(
doc::UnstableBook,
doc::UnstableBookGen,
doc::TheBook,
doc::Standalone,
doc::Std,
doc::Test,
doc::WhitelistedRustc,
doc::Rustc,
doc::Rustdoc,
doc::ErrorIndex,
doc::Nomicon,
doc::Reference,
doc::RustdocBook,
doc::RustByExample,
doc::RustcBook,
doc::CargoBook,
doc::EditionGuide,
),
Kind::Dist => describe!(
dist::Docs,
dist::RustcDocs,
dist::Mingw,
dist::Rustc,
dist::DebuggerScripts,
dist::Std,
dist::Analysis,
dist::Src,
dist::PlainSourceTarball,
dist::Cargo,
dist::Rls,
dist::Rustfmt,
dist::Clippy,
dist::Miri,
dist::LlvmTools,
dist::Lldb,
dist::Extended,
dist::HashSign
),
Kind::Install => describe!(
install::Docs,
install::Std,
install::Cargo,
install::Rls,
install::Rustfmt,
install::Clippy,
install::Miri,
install::Analysis,
install::Src,
install::Rustc
),
}
}
pub fn get_help(build: &Build, subcommand: &str) -> Option<String> {
let kind = match subcommand {
"build" => Kind::Build,
"doc" => Kind::Doc,
"test" => Kind::Test,
"bench" => Kind::Bench,
"dist" => Kind::Dist,
"install" => Kind::Install,
_ => return None,
};
let builder = Builder {
build,
top_stage: build.config.stage.unwrap_or(2),
kind,
cache: Cache::new(),
stack: RefCell::new(Vec::new()),
time_spent_on_dependencies: Cell::new(Duration::new(0, 0)),
paths: vec![],
graph_nodes: RefCell::new(HashMap::new()),
graph: RefCell::new(Graph::new()),
parent: Cell::new(None),
};
let builder = &builder;
let mut should_run = ShouldRun::new(builder);
for desc in Builder::get_step_descriptions(builder.kind) {
should_run = (desc.should_run)(should_run);
}
let mut help = String::from("Available paths:\n");
for pathset in should_run.paths {
if let PathSet::Set(set) = pathset {
set.iter().for_each(|path| {
help.push_str(
format!(" ./x.py {} {}\n", subcommand, path.display()).as_str(),
)
})
}
}
Some(help)
}
pub fn new(build: &Build) -> Builder {
let (kind, paths) = match build.config.cmd {
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
Subcommand::Check { ref paths } => (Kind::Check, &paths[..]),
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
Subcommand::Clean { .. } => panic!(),
};
let builder = Builder {
build,
top_stage: build.config.stage.unwrap_or(2),
kind,
cache: Cache::new(),
stack: RefCell::new(Vec::new()),
time_spent_on_dependencies: Cell::new(Duration::new(0, 0)),
paths: paths.to_owned(),
graph_nodes: RefCell::new(HashMap::new()),
graph: RefCell::new(Graph::new()),
parent: Cell::new(None),
};
if kind == Kind::Dist {
assert!(
!builder.config.test_miri,
"Do not distribute with miri enabled.\n\
The distributed libraries would include all MIR (increasing binary size).
The distributed MIR would include validation statements."
);
}
builder
}
pub fn execute_cli(&self) -> Graph<String, bool> {
self.run_step_descriptions(&Builder::get_step_descriptions(self.kind), &self.paths);
self.graph.borrow().clone()
}
pub fn default_doc(&self, paths: Option<&[PathBuf]>) {
let paths = paths.unwrap_or(&[]);
self.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), paths);
}
fn run_step_descriptions(&self, v: &[StepDescription], paths: &[PathBuf]) {
StepDescription::run(v, self, paths);
}
/// Obtain a compiler at a given stage and for a given host. Explicitly does
/// not take `Compiler` since all `Compiler` instances are meant to be
/// obtained through this function, since it ensures that they are valid
/// (i.e., built and assembled).
pub fn compiler(&self, stage: u32, host: Interned<String>) -> Compiler {
self.ensure(compile::Assemble {
target_compiler: Compiler { stage, host },
})
}
pub fn sysroot(&self, compiler: Compiler) -> Interned<PathBuf> {
self.ensure(compile::Sysroot { compiler })
}
/// Returns the libdir where the standard library and other artifacts are
/// found for a compiler's sysroot.
pub fn sysroot_libdir(
&self,
compiler: Compiler,
target: Interned<String>,
) -> Interned<PathBuf> {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
struct Libdir {
compiler: Compiler,
target: Interned<String>,
}
impl Step for Libdir {
type Output = Interned<PathBuf>;
fn should_run(run: ShouldRun) -> ShouldRun {
run.never()
}
fn run(self, builder: &Builder) -> Interned<PathBuf> {
let compiler = self.compiler;
let config = &builder.build.config;
let lib = if compiler.stage >= 1 && config.libdir_relative().is_some() {
builder.build.config.libdir_relative().unwrap()
} else {
Path::new("lib")
};
let sysroot = builder
.sysroot(self.compiler)
.join(lib)
.join("rustlib")
.join(self.target)
.join("lib");
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
INTERNER.intern_path(sysroot)
}
}
self.ensure(Libdir { compiler, target })
}
pub fn sysroot_codegen_backends(&self, compiler: Compiler) -> PathBuf {
self.sysroot_libdir(compiler, compiler.host)
.with_file_name(self.config.rust_codegen_backends_dir.clone())
}
/// Returns the compiler's libdir where it stores the dynamic libraries that
/// it itself links against.
///
/// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
/// Windows.
pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf {
if compiler.is_snapshot(self) {
self.rustc_snapshot_libdir()
} else {
self.sysroot(compiler).join(libdir(&compiler.host))
}
}
/// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
/// library lookup path.
pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut Command) {
// Windows doesn't need dylib path munging because the dlls for the
// compiler live next to the compiler and the system will find them
// automatically.
if cfg!(windows) {
return;
}
add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
}
/// Get a path to the compiler specified.
pub fn rustc(&self, compiler: Compiler) -> PathBuf {
if compiler.is_snapshot(self) {
self.initial_rustc.clone()
} else {
self.sysroot(compiler)
.join("bin")
.join(exe("rustc", &compiler.host))
}
}
/// Get the paths to all of the compiler's codegen backends.
fn codegen_backends(&self, compiler: Compiler) -> impl Iterator<Item = PathBuf> {
fs::read_dir(self.sysroot_codegen_backends(compiler))
.into_iter()
.flatten()
.filter_map(Result::ok)
.map(|entry| entry.path())
}
pub fn rustdoc(&self, host: Interned<String>) -> PathBuf {
self.ensure(tool::Rustdoc { host })
}
pub fn rustdoc_cmd(&self, host: Interned<String>) -> Command {
let mut cmd = Command::new(&self.out.join("bootstrap/debug/rustdoc"));
let compiler = self.compiler(self.top_stage, host);
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
.env("RUSTC_SYSROOT", self.sysroot(compiler))
.env(
"RUSTDOC_LIBDIR",
self.sysroot_libdir(compiler, self.config.build),
)
.env("CFG_RELEASE_CHANNEL", &self.config.channel)
.env("RUSTDOC_REAL", self.rustdoc(host))
.env("RUSTDOC_CRATE_VERSION", self.rust_version())
.env("RUSTC_BOOTSTRAP", "1");
// Remove make-related flags that can cause jobserver problems.
cmd.env_remove("MAKEFLAGS");
cmd.env_remove("MFLAGS");
if let Some(linker) = self.linker(host) {
cmd.env("RUSTC_TARGET_LINKER", linker);
}
cmd
}
/// Prepares an invocation of `cargo` to be run.
///
/// This will create a `Command` that represents a pending execution of
/// Cargo. This cargo will be configured to use `compiler` as the actual
/// rustc compiler, its output will be scoped by `mode`'s output directory,
/// it will pass the `--target` flag for the specified `target`, and will be
/// executing the Cargo command `cmd`.
pub fn cargo(
&self,
compiler: Compiler,
mode: Mode,
target: Interned<String>,
cmd: &str,
) -> Command {
let mut cargo = Command::new(&self.initial_cargo);
let out_dir = self.stage_out(compiler, mode);
// command specific path, we call clear_if_dirty with this
let mut my_out = match cmd {
"build" => self.cargo_out(compiler, mode, target),
// This is the intended out directory for crate documentation.
"doc" | "rustdoc" => self.crate_doc_out(target),
_ => self.stage_out(compiler, mode),
};
// This is for the original compiler, but if we're forced to use stage 1, then
// std/test/rustc stamps won't exist in stage 2, so we need to get those from stage 1, since
// we copy the libs forward.
let cmp = if self.force_use_stage1(compiler, target) {
self.compiler(1, compiler.host)
} else {
compiler
};
let libstd_stamp = match cmd {
"check" => check::libstd_stamp(self, cmp, target),
_ => compile::libstd_stamp(self, cmp, target),
};
let libtest_stamp = match cmd {
"check" => check::libtest_stamp(self, cmp, target),
_ => compile::libstd_stamp(self, cmp, target),
};
let librustc_stamp = match cmd {
"check" => check::librustc_stamp(self, cmp, target),
_ => compile::librustc_stamp(self, cmp, target),
};
if cmd == "doc" || cmd == "rustdoc" {
if mode == Mode::Rustc || mode == Mode::ToolRustc || mode == Mode::Codegen {
// This is the intended out directory for compiler documentation.
my_out = self.compiler_doc_out(target);
}
let rustdoc = self.rustdoc(compiler.host);
self.clear_if_dirty(&my_out, &rustdoc);
} else if cmd != "test" {
match mode {
Mode::Std => {
self.clear_if_dirty(&my_out, &self.rustc(compiler));
for backend in self.codegen_backends(compiler) {
self.clear_if_dirty(&my_out, &backend);
}
},
Mode::Test => {
self.clear_if_dirty(&my_out, &libstd_stamp);
},
Mode::Rustc => {
self.clear_if_dirty(&my_out, &self.rustc(compiler));
self.clear_if_dirty(&my_out, &libstd_stamp);
self.clear_if_dirty(&my_out, &libtest_stamp);
},
Mode::Codegen => {
self.clear_if_dirty(&my_out, &librustc_stamp);
},
Mode::ToolBootstrap => { },
Mode::ToolStd => {
self.clear_if_dirty(&my_out, &libstd_stamp);
},
Mode::ToolTest => {
self.clear_if_dirty(&my_out, &libstd_stamp);
self.clear_if_dirty(&my_out, &libtest_stamp);
},
Mode::ToolRustc => {
self.clear_if_dirty(&my_out, &libstd_stamp);
self.clear_if_dirty(&my_out, &libtest_stamp);
self.clear_if_dirty(&my_out, &librustc_stamp);
},
}
}
cargo
.env("CARGO_TARGET_DIR", out_dir)
.arg(cmd);
// See comment in librustc_llvm/build.rs for why this is necessary, largely llvm-config
// needs to not accidentally link to libLLVM in stage0/lib.
cargo.env("REAL_LIBRARY_PATH_VAR", &util::dylib_path_var());
if let Some(e) = env::var_os(util::dylib_path_var()) {
cargo.env("REAL_LIBRARY_PATH", e);
}
if cmd != "install" {
cargo.arg("--target")
.arg(target);
} else {
assert_eq!(target, compiler.host);
}
// Set a flag for `check` so that certain build scripts can do less work
// (e.g., not building/requiring LLVM).
if cmd == "check" {
cargo.env("RUST_CHECK", "1");
}
cargo.arg("-j").arg(self.jobs().to_string());
// Remove make-related flags to ensure Cargo can correctly set things up
cargo.env_remove("MAKEFLAGS");
cargo.env_remove("MFLAGS");
// FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
// Force cargo to output binaries with disambiguating hashes in the name
let metadata = if compiler.stage == 0 {
// Treat stage0 like special channel, whether it's a normal prior-
// release rustc or a local rebuild with the same version, so we
// never mix these libraries by accident.
"bootstrap"
} else {
&self.config.channel
};
cargo.env("__CARGO_DEFAULT_LIB_METADATA", &metadata);
let stage;
if compiler.stage == 0 && self.local_rebuild {
// Assume the local-rebuild rustc already has stage1 features.
stage = 1;
} else {
stage = compiler.stage;
}
let mut extra_args = env::var(&format!("RUSTFLAGS_STAGE_{}", stage)).unwrap_or_default();
if stage != 0 {
let s = env::var("RUSTFLAGS_STAGE_NOT_0").unwrap_or_default();
if !extra_args.is_empty() {
extra_args.push_str(" ");
}
extra_args.push_str(&s);
}
if !extra_args.is_empty() {
cargo.env(
"RUSTFLAGS",
format!(
"{} {}",
env::var("RUSTFLAGS").unwrap_or_default(),
extra_args
),
);
}
let want_rustdoc = self.doc_tests != DocTests::No;
// We synthetically interpret a stage0 compiler used to build tools as a
// "raw" compiler in that it's the exact snapshot we download. Normally
// the stage0 build means it uses libraries build by the stage0
// compiler, but for tools we just use the precompiled libraries that
// we've downloaded
let use_snapshot = mode == Mode::ToolBootstrap;
assert!(!use_snapshot || stage == 0 || self.local_rebuild);
let maybe_sysroot = self.sysroot(compiler);
let sysroot = if use_snapshot {
self.rustc_snapshot_sysroot()
} else {
&maybe_sysroot
};
let libdir = sysroot.join(libdir(&compiler.host));
// Customize the compiler we're running. Specify the compiler to cargo
// as our shim and then pass it some various options used to configure
// how the actual compiler itself is called.
//
// These variables are primarily all read by
// src/bootstrap/bin/{rustc.rs,rustdoc.rs}
cargo
.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
.env("RUSTC_REAL", self.rustc(compiler))
.env("RUSTC_STAGE", stage.to_string())
.env(
"RUSTC_DEBUG_ASSERTIONS",
self.config.rust_debug_assertions.to_string(),
)
.env("RUSTC_SYSROOT", &sysroot)
.env("RUSTC_LIBDIR", &libdir)
.env("RUSTC_RPATH", self.config.rust_rpath.to_string())
.env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
.env(
"RUSTDOC_REAL",
if cmd == "doc" || cmd == "rustdoc" || (cmd == "test" && want_rustdoc) {
self.rustdoc(compiler.host)
} else {
PathBuf::from("/path/to/nowhere/rustdoc/not/required")
},
)
.env("TEST_MIRI", self.config.test_miri.to_string())
.env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir());
if let Some(host_linker) = self.linker(compiler.host) {
cargo.env("RUSTC_HOST_LINKER", host_linker);
}
if let Some(target_linker) = self.linker(target) {
cargo.env("RUSTC_TARGET_LINKER", target_linker);
}
if let Some(ref error_format) = self.config.rustc_error_format {
cargo.env("RUSTC_ERROR_FORMAT", error_format);
}
if cmd != "build" && cmd != "check" && cmd != "rustc" && want_rustdoc {
cargo.env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.config.build));
}
if mode.is_tool() {
// Tools like cargo and rls don't get debuginfo by default right now, but this can be
// enabled in the config. Adding debuginfo makes them several times larger.
if self.config.rust_debuginfo_tools {
cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
cargo.env(
"RUSTC_DEBUGINFO_LINES",
self.config.rust_debuginfo_lines.to_string(),
);
}
} else {
cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string());
cargo.env(
"RUSTC_DEBUGINFO_LINES",
self.config.rust_debuginfo_lines.to_string(),
);
cargo.env("RUSTC_FORCE_UNSTABLE", "1");
// Currently the compiler depends on crates from crates.io, and
// then other crates can depend on the compiler (e.g., proc-macro
// crates). Let's say, for example that rustc itself depends on the
// bitflags crate. If an external crate then depends on the
// bitflags crate as well, we need to make sure they don't
// conflict, even if they pick the same version of bitflags. We'll
// want to make sure that e.g., a plugin and rustc each get their
// own copy of bitflags.
// Cargo ensures that this works in general through the -C metadata
// flag. This flag will frob the symbols in the binary to make sure
// they're different, even though the source code is the exact
// same. To solve this problem for the compiler we extend Cargo's
// already-passed -C metadata flag with our own. Our rustc.rs
// wrapper around the actual rustc will detect -C metadata being
// passed and frob it with this extra string we're passing in.
cargo.env("RUSTC_METADATA_SUFFIX", "rustc");
}
if let Some(x) = self.crt_static(target) {
cargo.env("RUSTC_CRT_STATIC", x.to_string());
}
if let Some(x) = self.crt_static(compiler.host) {
cargo.env("RUSTC_HOST_CRT_STATIC", x.to_string());
}
if let Some(map) = self.build.debuginfo_map(GitRepo::Rustc) {
cargo.env("RUSTC_DEBUGINFO_MAP", map);
}
// Enable usage of unstable features
cargo.env("RUSTC_BOOTSTRAP", "1");
self.add_rust_test_threads(&mut cargo);
// Almost all of the crates that we compile as part of the bootstrap may
// have a build script, including the standard library. To compile a
// build script, however, it itself needs a standard library! This
// introduces a bit of a pickle when we're compiling the standard
// library itself.
//
// To work around this we actually end up using the snapshot compiler
// (stage0) for compiling build scripts of the standard library itself.
// The stage0 compiler is guaranteed to have a libstd available for use.
//
// For other crates, however, we know that we've already got a standard
// library up and running, so we can use the normal compiler to compile
// build scripts in that situation.
//
// If LLVM support is disabled we need to use the snapshot compiler to compile
// build scripts, as the new compiler doesn't support executables.
if mode == Mode::Std || !self.config.llvm_enabled {
cargo
.env("RUSTC_SNAPSHOT", &self.initial_rustc)
.env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
} else {
cargo
.env("RUSTC_SNAPSHOT", self.rustc(compiler))
.env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler));
}
if self.config.incremental {
cargo.env("CARGO_INCREMENTAL", "1");
} else {
// Don't rely on any default setting for incr. comp. in Cargo
cargo.env("CARGO_INCREMENTAL", "0");
}
if let Some(ref on_fail) = self.config.on_fail {
cargo.env("RUSTC_ON_FAIL", on_fail);
}
if self.config.print_step_timings {
cargo.env("RUSTC_PRINT_STEP_TIMINGS", "1");
}
if self.config.backtrace_on_ice {
cargo.env("RUSTC_BACKTRACE_ON_ICE", "1");
}
cargo.env("RUSTC_VERBOSE", self.verbosity.to_string());
// in std, we want to avoid denying warnings for stage 0 as that makes cfg's painful.
if self.config.deny_warnings && !(mode == Mode::Std && stage == 0) {
cargo.env("RUSTC_DENY_WARNINGS", "1");
}
// Throughout the build Cargo can execute a number of build scripts
// compiling C/C++ code and we need to pass compilers, archivers, flags, etc
// obtained previously to those build scripts.
// Build scripts use either the `cc` crate or `configure/make` so we pass
// the options through environment variables that are fetched and understood by both.
//
// FIXME: the guard against msvc shouldn't need to be here
if target.contains("msvc") {
if let Some(ref cl) = self.config.llvm_clang_cl {
cargo.env("CC", cl).env("CXX", cl);
}
} else {
let ccache = self.config.ccache.as_ref();
let ccacheify = |s: &Path| {
let ccache = match ccache {
Some(ref s) => s,
None => return s.display().to_string(),
};
// FIXME: the cc-rs crate only recognizes the literal strings
// `ccache` and `sccache` when doing caching compilations, so we
// mirror that here. It should probably be fixed upstream to
// accept a new env var or otherwise work with custom ccache
// vars.
match &ccache[..] {
"ccache" | "sccache" => format!("{} {}", ccache, s.display()),
_ => s.display().to_string(),
}
};
let cc = ccacheify(&self.cc(target));
cargo.env(format!("CC_{}", target), &cc).env("CC", &cc);
let cflags = self.cflags(target, GitRepo::Rustc).join(" ");
cargo
.env(format!("CFLAGS_{}", target), cflags.clone())
.env("CFLAGS", cflags.clone());
if let Some(ar) = self.ar(target) {
let ranlib = format!("{} s", ar.display());
cargo
.env(format!("AR_{}", target), ar)
.env("AR", ar)
.env(format!("RANLIB_{}", target), ranlib.clone())
.env("RANLIB", ranlib);
}
if let Ok(cxx) = self.cxx(target) {
let cxx = ccacheify(&cxx);
cargo
.env(format!("CXX_{}", target), &cxx)
.env("CXX", &cxx)
.env(format!("CXXFLAGS_{}", target), cflags.clone())
.env("CXXFLAGS", cflags);
}
}
if (cmd == "build" || cmd == "rustc")
&& mode == Mode::Std
&& self.config.extended
&& compiler.is_final_stage(self)
{
cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
}
// For `cargo doc` invocations, make rustdoc print the Rust version into the docs
cargo.env("RUSTDOC_CRATE_VERSION", self.rust_version());
// Environment variables *required* throughout the build
//
// FIXME: should update code to not require this env var
cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
// Set this for all builds to make sure doc builds also get it.
cargo.env("CFG_RELEASE_CHANNEL", &self.config.channel);
// This one's a bit tricky. As of the time of this writing the compiler
// links to the `winapi` crate on crates.io. This crate provides raw
// bindings to Windows system functions, sort of like libc does for
// Unix. This crate also, however, provides "import libraries" for the
// MinGW targets. There's an import library per dll in the windows
// distribution which is what's linked to. These custom import libraries
// are used because the winapi crate can reference Windows functions not
// present in the MinGW import libraries.
//
// For example MinGW may ship libdbghelp.a, but it may not have
// references to all the functions in the dbghelp dll. Instead the
// custom import library for dbghelp in the winapi crates has all this
// information.
//
// Unfortunately for us though the import libraries are linked by
// default via `-ldylib=winapi_foo`. That is, they're linked with the
// `dylib` type with a `winapi_` prefix (so the winapi ones don't
// conflict with the system MinGW ones). This consequently means that
// the binaries we ship of things like rustc_codegen_llvm (aka the rustc_codegen_llvm
// DLL) when linked against *again*, for example with procedural macros
// or plugins, will trigger the propagation logic of `-ldylib`, passing
// `-lwinapi_foo` to the linker again. This isn't actually available in
// our distribution, however, so the link fails.
//
// To solve this problem we tell winapi to not use its bundled import
// libraries. This means that it will link to the system MinGW import
// libraries by default, and the `-ldylib=foo` directives will still get
// passed to the final linker, but they'll look like `-lfoo` which can
// be resolved because MinGW has the import library. The downside is we
// don't get newer functions from Windows, but we don't use any of them
// anyway.
if !mode.is_tool() {
cargo.env("WINAPI_NO_BUNDLED_LIBRARIES", "1");
}
for _ in 1..self.verbosity {
cargo.arg("-v");
}
match (mode, self.config.rust_codegen_units_std, self.config.rust_codegen_units) {
(Mode::Std, Some(n), _) |
(Mode::Test, Some(n), _) |
(_, _, Some(n)) => {
cargo.env("RUSTC_CODEGEN_UNITS", n.to_string());
}
_ => {
// Don't set anything
}
}
if self.config.rust_optimize {
// FIXME: cargo bench/install do not accept `--release`
if cmd != "bench" && cmd != "install" {
cargo.arg("--release");
}
}
if self.config.locked_deps {
cargo.arg("--locked");
}
if self.config.vendor || self.is_sudo {
cargo.arg("--frozen");
}
self.ci_env.force_coloring_in_ci(&mut cargo);
cargo
}
/// Ensure that a given step is built, returning its output. This will
/// cache the step, so it is safe (and good!) to call this as often as
/// needed to ensure that all dependencies are built.
pub fn ensure<S: Step>(&'a self, step: S) -> S::Output {
{
let mut stack = self.stack.borrow_mut();
for stack_step in stack.iter() {
// should skip
if stack_step
.downcast_ref::<S>()
.map_or(true, |stack_step| *stack_step != step)
{
continue;
}
let mut out = String::new();
out += &format!("\n\nCycle in build detected when adding {:?}\n", step);
for el in stack.iter().rev() {
out += &format!("\t{:?}\n", el);
}
panic!(out);
}
if let Some(out) = self.cache.get(&step) {
self.verbose(&format!("{}c {:?}", " ".repeat(stack.len()), step));
{
let mut graph = self.graph.borrow_mut();
let parent = self.parent.get();
let us = *self
.graph_nodes
.borrow_mut()
.entry(format!("{:?}", step))
.or_insert_with(|| graph.add_node(format!("{:?}", step)));
if let Some(parent) = parent {
graph.add_edge(parent, us, false);
}
}
return out;
}
self.verbose(&format!("{}> {:?}", " ".repeat(stack.len()), step));
stack.push(Box::new(step.clone()));
}
let prev_parent = self.parent.get();
{
let mut graph = self.graph.borrow_mut();
let parent = self.parent.get();
let us = *self
.graph_nodes
.borrow_mut()
.entry(format!("{:?}", step))
.or_insert_with(|| graph.add_node(format!("{:?}", step)));
self.parent.set(Some(us));
if let Some(parent) = parent {
graph.add_edge(parent, us, true);
}
}
let (out, dur) = {
let start = Instant::now();
let zero = Duration::new(0, 0);
let parent = self.time_spent_on_dependencies.replace(zero);
let out = step.clone().run(self);
let dur = start.elapsed();
let deps = self.time_spent_on_dependencies.replace(parent + dur);
(out, dur - deps)
};
self.parent.set(prev_parent);
if self.config.print_step_timings && dur > Duration::from_millis(100) {
println!(
"[TIMING] {:?} -- {}.{:03}",
step,
dur.as_secs(),
dur.subsec_nanos() / 1_000_000
);
}
{
let mut stack = self.stack.borrow_mut();
let cur_step = stack.pop().expect("step stack empty");
assert_eq!(cur_step.downcast_ref(), Some(&step));
}
self.verbose(&format!(
"{}< {:?}",
" ".repeat(self.stack.borrow().len()),
step
));
self.cache.put(step, out.clone());
out
}
}
#[cfg(test)]
mod __test {
use super::*;
use crate::config::Config;
use std::thread;
fn configure(host: &[&str], target: &[&str]) -> Config {
let mut config = Config::default_opts();
// don't save toolstates
config.save_toolstates = None;
config.run_host_only = true;
config.dry_run = true;
// try to avoid spurious failures in dist where we create/delete each others file
let dir = config.out.join("tmp-rustbuild-tests").join(
&thread::current()
.name()
.unwrap_or("unknown")
.replace(":", "-"),
);
t!(fs::create_dir_all(&dir));
config.out = dir;
config.build = INTERNER.intern_str("A");
config.hosts = vec![config.build]
.clone()
.into_iter()
.chain(host.iter().map(|s| INTERNER.intern_str(s)))
.collect::<Vec<_>>();
config.targets = config
.hosts
.clone()
.into_iter()
.chain(target.iter().map(|s| INTERNER.intern_str(s)))
.collect::<Vec<_>>();
config
}
fn first<A, B>(v: Vec<(A, B)>) -> Vec<A> {
v.into_iter().map(|(a, _)| a).collect::<Vec<_>>()
}
#[test]
fn dist_baseline() {
let build = Build::new(configure(&[], &[]));
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
let a = INTERNER.intern_str("A");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
&[dist::Docs { stage: 2, host: a },]
);
assert_eq!(
first(builder.cache.all::<dist::Mingw>()),
&[dist::Mingw { host: a },]
);
assert_eq!(
first(builder.cache.all::<dist::Rustc>()),
&[dist::Rustc {
compiler: Compiler { host: a, stage: 2 }
},]
);
assert_eq!(
first(builder.cache.all::<dist::Std>()),
&[dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: a,
},]
);
assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
}
#[test]
fn dist_with_targets() {
let build = Build::new(configure(&[], &["B"]));
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
let a = INTERNER.intern_str("A");
let b = INTERNER.intern_str("B");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
&[
dist::Docs { stage: 2, host: a },
dist::Docs { stage: 2, host: b },
]
);
assert_eq!(
first(builder.cache.all::<dist::Mingw>()),
&[dist::Mingw { host: a }, dist::Mingw { host: b },]
);
assert_eq!(
first(builder.cache.all::<dist::Rustc>()),
&[dist::Rustc {
compiler: Compiler { host: a, stage: 2 }
},]
);
assert_eq!(
first(builder.cache.all::<dist::Std>()),
&[
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
]
);
assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
}
#[test]
fn dist_with_hosts() {
let build = Build::new(configure(&["B"], &[]));
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
let a = INTERNER.intern_str("A");
let b = INTERNER.intern_str("B");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
&[
dist::Docs { stage: 2, host: a },
dist::Docs { stage: 2, host: b },
]
);
assert_eq!(
first(builder.cache.all::<dist::Mingw>()),
&[dist::Mingw { host: a }, dist::Mingw { host: b },]
);
assert_eq!(
first(builder.cache.all::<dist::Rustc>()),
&[
dist::Rustc {
compiler: Compiler { host: a, stage: 2 }
},
dist::Rustc {
compiler: Compiler { host: b, stage: 2 }
},
]
);
assert_eq!(
first(builder.cache.all::<dist::Std>()),
&[
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
]
);
assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
}
#[test]
fn dist_with_targets_and_hosts() {
let build = Build::new(configure(&["B"], &["C"]));
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
let a = INTERNER.intern_str("A");
let b = INTERNER.intern_str("B");
let c = INTERNER.intern_str("C");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
&[
dist::Docs { stage: 2, host: a },
dist::Docs { stage: 2, host: b },
dist::Docs { stage: 2, host: c },
]
);
assert_eq!(
first(builder.cache.all::<dist::Mingw>()),
&[
dist::Mingw { host: a },
dist::Mingw { host: b },
dist::Mingw { host: c },
]
);
assert_eq!(
first(builder.cache.all::<dist::Rustc>()),
&[
dist::Rustc {
compiler: Compiler { host: a, stage: 2 }
},
dist::Rustc {
compiler: Compiler { host: b, stage: 2 }
},
]
);
assert_eq!(
first(builder.cache.all::<dist::Std>()),
&[
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: c,
},
]
);
assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
}
#[test]
fn dist_with_target_flag() {
let mut config = configure(&["B"], &["C"]);
config.run_host_only = false; // as-if --target=C was passed
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
let a = INTERNER.intern_str("A");
let b = INTERNER.intern_str("B");
let c = INTERNER.intern_str("C");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
&[
dist::Docs { stage: 2, host: a },
dist::Docs { stage: 2, host: b },
dist::Docs { stage: 2, host: c },
]
);
assert_eq!(
first(builder.cache.all::<dist::Mingw>()),
&[
dist::Mingw { host: a },
dist::Mingw { host: b },
dist::Mingw { host: c },
]
);
assert_eq!(first(builder.cache.all::<dist::Rustc>()), &[]);
assert_eq!(
first(builder.cache.all::<dist::Std>()),
&[
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: c,
},
]
);
assert_eq!(first(builder.cache.all::<dist::Src>()), &[]);
}
#[test]
fn dist_with_same_targets_and_hosts() {
let build = Build::new(configure(&["B"], &["B"]));
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
let a = INTERNER.intern_str("A");
let b = INTERNER.intern_str("B");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
&[
dist::Docs { stage: 2, host: a },
dist::Docs { stage: 2, host: b },
]
);
assert_eq!(
first(builder.cache.all::<dist::Mingw>()),
&[dist::Mingw { host: a }, dist::Mingw { host: b },]
);
assert_eq!(
first(builder.cache.all::<dist::Rustc>()),
&[
dist::Rustc {
compiler: Compiler { host: a, stage: 2 }
},
dist::Rustc {
compiler: Compiler { host: b, stage: 2 }
},
]
);
assert_eq!(
first(builder.cache.all::<dist::Std>()),
&[
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
dist::Std {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
]
);
assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
assert_eq!(
first(builder.cache.all::<compile::Std>()),
&[
compile::Std {
compiler: Compiler { host: a, stage: 0 },
target: a,
},
compile::Std {
compiler: Compiler { host: a, stage: 1 },
target: a,
},
compile::Std {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
compile::Std {
compiler: Compiler { host: a, stage: 1 },
target: b,
},
compile::Std {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
]
);
assert_eq!(
first(builder.cache.all::<compile::Test>()),
&[
compile::Test {
compiler: Compiler { host: a, stage: 0 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 1 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 1 },
target: b,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
]
);
assert_eq!(
first(builder.cache.all::<compile::Assemble>()),
&[
compile::Assemble {
target_compiler: Compiler { host: a, stage: 0 },
},
compile::Assemble {
target_compiler: Compiler { host: a, stage: 1 },
},
compile::Assemble {
target_compiler: Compiler { host: a, stage: 2 },
},
compile::Assemble {
target_compiler: Compiler { host: b, stage: 2 },
},
]
);
}
#[test]
fn build_default() {
let build = Build::new(configure(&["B"], &["C"]));
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
let a = INTERNER.intern_str("A");
let b = INTERNER.intern_str("B");
let c = INTERNER.intern_str("C");
assert!(!builder.cache.all::<compile::Std>().is_empty());
assert!(!builder.cache.all::<compile::Assemble>().is_empty());
assert_eq!(
first(builder.cache.all::<compile::Rustc>()),
&[
compile::Rustc {
compiler: Compiler { host: a, stage: 0 },
target: a,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 1 },
target: a,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
compile::Rustc {
compiler: Compiler { host: b, stage: 2 },
target: a,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 0 },
target: b,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 1 },
target: b,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
compile::Rustc {
compiler: Compiler { host: b, stage: 2 },
target: b,
},
]
);
assert_eq!(
first(builder.cache.all::<compile::Test>()),
&[
compile::Test {
compiler: Compiler { host: a, stage: 0 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 1 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
compile::Test {
compiler: Compiler { host: b, stage: 2 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 0 },
target: b,
},
compile::Test {
compiler: Compiler { host: a, stage: 1 },
target: b,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
compile::Test {
compiler: Compiler { host: b, stage: 2 },
target: b,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: c,
},
compile::Test {
compiler: Compiler { host: b, stage: 2 },
target: c,
},
]
);
}
#[test]
fn build_with_target_flag() {
let mut config = configure(&["B"], &["C"]);
config.run_host_only = false;
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
let a = INTERNER.intern_str("A");
let b = INTERNER.intern_str("B");
let c = INTERNER.intern_str("C");
assert!(!builder.cache.all::<compile::Std>().is_empty());
assert_eq!(
first(builder.cache.all::<compile::Assemble>()),
&[
compile::Assemble {
target_compiler: Compiler { host: a, stage: 0 },
},
compile::Assemble {
target_compiler: Compiler { host: a, stage: 1 },
},
compile::Assemble {
target_compiler: Compiler { host: b, stage: 1 },
},
compile::Assemble {
target_compiler: Compiler { host: a, stage: 2 },
},
compile::Assemble {
target_compiler: Compiler { host: b, stage: 2 },
},
]
);
assert_eq!(
first(builder.cache.all::<compile::Rustc>()),
&[
compile::Rustc {
compiler: Compiler { host: a, stage: 0 },
target: a,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 1 },
target: a,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 0 },
target: b,
},
compile::Rustc {
compiler: Compiler { host: a, stage: 1 },
target: b,
},
]
);
assert_eq!(
first(builder.cache.all::<compile::Test>()),
&[
compile::Test {
compiler: Compiler { host: a, stage: 0 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 1 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: a,
},
compile::Test {
compiler: Compiler { host: b, stage: 2 },
target: a,
},
compile::Test {
compiler: Compiler { host: a, stage: 0 },
target: b,
},
compile::Test {
compiler: Compiler { host: a, stage: 1 },
target: b,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: b,
},
compile::Test {
compiler: Compiler { host: b, stage: 2 },
target: b,
},
compile::Test {
compiler: Compiler { host: a, stage: 2 },
target: c,
},
compile::Test {
compiler: Compiler { host: b, stage: 2 },
target: c,
},
]
);
}
#[test]
fn test_with_no_doc_stage0() {
let mut config = configure(&[], &[]);
config.stage = Some(0);
config.cmd = Subcommand::Test {
paths: vec!["src/libstd".into()],
test_args: vec![],
rustc_args: vec![],
fail_fast: true,
doc_tests: DocTests::No,
bless: false,
compare_mode: None,
};
let build = Build::new(config);
let mut builder = Builder::new(&build);
let host = INTERNER.intern_str("A");
builder.run_step_descriptions(
&[StepDescription::from::<test::Crate>()],
&["src/libstd".into()],
);
// Ensure we don't build any compiler artifacts.
assert!(!builder.cache.contains::<compile::Rustc>());
assert_eq!(
first(builder.cache.all::<test::Crate>()),
&[test::Crate {
compiler: Compiler { host, stage: 0 },
target: host,
mode: Mode::Std,
test_kind: test::TestKind::Test,
krate: INTERNER.intern_str("std"),
},]
);
}
#[test]
fn
|
() {
let mut config = configure(&[], &[]);
config.exclude = vec![
"src/test/run-pass".into(),
"src/tools/tidy".into(),
];
config.cmd = Subcommand::Test {
paths: Vec::new(),
test_args: Vec::new(),
rustc_args: Vec::new(),
fail_fast: true,
doc_tests: DocTests::No,
bless: false,
compare_mode: None,
};
let build = Build::new(config);
let builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Test), &[]);
// Ensure we have really excluded run-pass & tidy
assert!(!builder.cache.contains::<test::RunPass>());
assert!(!builder.cache.contains::<test::Tidy>());
// Ensure other tests are not affected.
assert!(builder.cache.contains::<test::RunPassFullDeps>());
assert!(builder.cache.contains::<test::RustdocUi>());
}
}
|
test_exclude
|
response.py
|
# -*- coding: utf-8 -*-
import json
import logging
|
from rest_framework.views import APIView, exception_handler
from common import error_code
from django.utils.translation import gettext as _
logger = logging.getLogger("api-request")
def custom_exception_handler(exc, context):
response = exception_handler(exc, context)
request = context['request']
if response is not None:
data = response.data
response.data = {}
if 'detail' in data and not isinstance(data['detail'], (list, dict)):
if isinstance(data['detail'], str):
data['detail'] = data['detail']
if hasattr(error_code, data['detail']):
response.data['return_code'] = getattr(error_code, data['detail'])
response.data['return_msg'] = _(getattr(error_code.ZhError, data['detail']))
logger.error("route:%(route)s\trequest:%(request)s\treturn:%(return)s\tdata:%(data)s" % {
"route": json.dumps(request.build_absolute_uri()), "request": json.dumps(request.data),
"return": json.dumps(response.data), "data": data})
elif isinstance(data['detail'], exceptions.ErrorDetail):
code = str(data['detail'].code).upper()
if hasattr(error_code, code):
response.data['return_code'] = getattr(error_code, code)
response.data['return_msg'] = _(getattr(error_code.ZhError, code))
logger.error("route:%(route)s\trequest:%(request)s\treturn:%(return)s\tdata:%(data)s" % {
"route": json.dumps(request.build_absolute_uri()), "request": json.dumps(request.data),
"return": json.dumps(response.data), "data": data})
else:
response.data['return_code'] = getattr(error_code, 'SYSTEM_ERROR')
response.data['return_msg'] = _(data['detail'])
logger.error("route:%(route)s\trequest:%(request)s\treturn:%(return)s\tdata:%(data)s" % {
"route": json.dumps(request.build_absolute_uri()), "request": json.dumps(request.data),
"return": json.dumps(response.data), "data": data})
else:
response.data['return_code'] = getattr(error_code, 'SYSTEM_ERROR')
response.data['return_msg'] = _(data['detail'])
logger.error("route:%(route)s\trequest:%(request)s\treturn:%(return)s\tdata:%(data)s" % {
"route": json.dumps(request.build_absolute_uri()), "request": json.dumps(request.data),
"return": json.dumps(response.data), "data": data})
else:
if isinstance(exc, exceptions.ValidationError):
response.data['return_code'] = getattr(error_code, 'ERROR_CHECK_PARAM')
# response.data['return_msgEn'] = getattr(error_code.EnglishError, 'ERROR_CHECK_PARAM')
response.data['return_msg'] = _(getattr(error_code.ZhError, 'ERROR_CHECK_PARAM'))
response.data['data'] = data
try:
logger.error("route:%(route)s\trequest:%(request)s\treturn:%(return)s\tdata:%(data)s" % {
"route": json.dumps(request.build_absolute_uri()), "request": json.dumps(request.data),
"return": json.dumps(response.data), "data": data})
pass
except Exception as e:
pass
else:
response.data['return_code'] = getattr(error_code, 'UNDIFINED_ERROR')
response.data['return_msg'] = _(getattr(error_code.ZhError, 'UNDIFINED_ERROR'))
response.data['data'] = data
try:
logger.critical("route:%(route)s\trequest:%(request)s\treturn:%(return)s\tdata:%(data)s" % {
"route": json.dumps(request.build_absolute_uri()), "request": json.dumps(request.data),
"return": json.dumps(response.data), "data": data})
except Exception as e:
pass
else:
logger.critical("route:%(route)s\trequest:%(request)s\treturn:%(return)s\tdata:%(data)s" % {
"route": json.dumps(request.build_absolute_uri()), "request": json.dumps(request.data), "return": "None",
"data": repr(exc)})
return response
class Version2APIView(APIView):
def get_exception_handler(self):
return custom_exception_handler
class BaseAPIView(Version2APIView):
def finalize_response(self, request, response, *args, **kwargs):
response = super(BaseAPIView, self).finalize_response(request, response, *args, **kwargs)
if hasattr(response, 'render') and callable(response.render):
response.render()
if 200 <= response.status_code < 300:
if response.get('Content-Type', "").lower() == 'application/json':
response.content = json.dumps({"return_code": 0, "return_msg": _('成功'),
"data": json.loads(response.content, object_pairs_hook=OrderedDict)})
else:
if str(response.content).lower() != "success":
response.content = json.dumps(
{"return_code": 0, "return_msg": _('成功'), "data": _(response.content)})
else:
response.content = json.dumps({"return_code": 0, "return_msg": _('成功')})
response['Content-Type'] = 'application/json'
return response
def encode_error(self, error_no):
if hasattr(error_code, error_no):
return {
'return_code': getattr(error_code, error_no),
'return_msg': _(getattr(error_code.ZhError, error_no))
}
else:
return {
'return_code': getattr(error_code, 'SYSTEM_ERROR'),
'return_msg': _(error_no)
}
|
from collections import OrderedDict
from rest_framework import exceptions
|
block.go
|
package merge
import (
"fmt"
"github.com/protolambda/zrnt/eth2/beacon/common"
"github.com/protolambda/zrnt/eth2/beacon/phase0"
"github.com/protolambda/ztyp/codec"
"github.com/protolambda/ztyp/tree"
. "github.com/protolambda/ztyp/view"
)
type SignedBeaconBlock struct {
Message BeaconBlock `json:"message" yaml:"message"`
Signature common.BLSSignature `json:"signature" yaml:"signature"`
}
var _ common.EnvelopeBuilder = (*SignedBeaconBlock)(nil)
func (b *SignedBeaconBlock) Envelope(spec *common.Spec, digest common.ForkDigest) *common.BeaconBlockEnvelope {
return &common.BeaconBlockEnvelope{
ForkDigest: digest,
Slot: b.Message.Slot,
ProposerIndex: b.Message.ProposerIndex,
ParentRoot: b.Message.ParentRoot,
StateRoot: b.Message.StateRoot,
SignedBlock: b,
BlockRoot: b.Message.HashTreeRoot(spec, tree.GetHashFn()),
Signature: b.Signature,
}
}
func (b *SignedBeaconBlock) Deserialize(spec *common.Spec, dr *codec.DecodingReader) error {
return dr.Container(spec.Wrap(&b.Message), &b.Signature)
}
func (b *SignedBeaconBlock) Serialize(spec *common.Spec, w *codec.EncodingWriter) error {
return w.Container(spec.Wrap(&b.Message), &b.Signature)
}
func (b *SignedBeaconBlock) ByteLength(spec *common.Spec) uint64 {
return codec.ContainerLength(spec.Wrap(&b.Message), &b.Signature)
}
func (a *SignedBeaconBlock) FixedLength(*common.Spec) uint64 {
return 0
}
func (b *SignedBeaconBlock) HashTreeRoot(spec *common.Spec, hFn tree.HashFn) common.Root {
return hFn.HashTreeRoot(spec.Wrap(&b.Message), b.Signature)
}
func (block *SignedBeaconBlock) SignedHeader(spec *common.Spec) *common.SignedBeaconBlockHeader {
return &common.SignedBeaconBlockHeader{
Message: *block.Message.Header(spec),
Signature: block.Signature,
}
}
type BeaconBlock struct {
Slot common.Slot `json:"slot" yaml:"slot"`
ProposerIndex common.ValidatorIndex `json:"proposer_index" yaml:"proposer_index"`
ParentRoot common.Root `json:"parent_root" yaml:"parent_root"`
StateRoot common.Root `json:"state_root" yaml:"state_root"`
Body BeaconBlockBody `json:"body" yaml:"body"`
}
func (b *BeaconBlock) Deserialize(spec *common.Spec, dr *codec.DecodingReader) error {
return dr.Container(&b.Slot, &b.ProposerIndex, &b.ParentRoot, &b.StateRoot, spec.Wrap(&b.Body))
}
func (b *BeaconBlock) Serialize(spec *common.Spec, w *codec.EncodingWriter) error {
return w.Container(&b.Slot, &b.ProposerIndex, &b.ParentRoot, &b.StateRoot, spec.Wrap(&b.Body))
}
func (b *BeaconBlock) ByteLength(spec *common.Spec) uint64 {
return codec.ContainerLength(&b.Slot, &b.ProposerIndex, &b.ParentRoot, &b.StateRoot, spec.Wrap(&b.Body))
}
func (a *BeaconBlock) FixedLength(*common.Spec) uint64 {
return 0
}
func (b *BeaconBlock) HashTreeRoot(spec *common.Spec, hFn tree.HashFn) common.Root {
return hFn.HashTreeRoot(b.Slot, b.ProposerIndex, b.ParentRoot, b.StateRoot, spec.Wrap(&b.Body))
}
func BeaconBlockType(spec *common.Spec) *ContainerTypeDef {
return ContainerType("BeaconBlock", []FieldDef{
{"slot", common.SlotType},
{"proposer_index", common.ValidatorIndexType},
{"parent_root", RootType},
{"state_root", RootType},
{"body", BeaconBlockBodyType(spec)},
})
}
func SignedBeaconBlockType(spec *common.Spec) *ContainerTypeDef
|
func (block *BeaconBlock) Header(spec *common.Spec) *common.BeaconBlockHeader {
return &common.BeaconBlockHeader{
Slot: block.Slot,
ProposerIndex: block.ProposerIndex,
ParentRoot: block.ParentRoot,
StateRoot: block.StateRoot,
BodyRoot: block.Body.HashTreeRoot(spec, tree.GetHashFn()),
}
}
type BeaconBlockBody struct {
RandaoReveal common.BLSSignature `json:"randao_reveal" yaml:"randao_reveal"`
Eth1Data common.Eth1Data `json:"eth1_data" yaml:"eth1_data"`
Graffiti common.Root `json:"graffiti" yaml:"graffiti"`
ProposerSlashings phase0.ProposerSlashings `json:"proposer_slashings" yaml:"proposer_slashings"`
AttesterSlashings phase0.AttesterSlashings `json:"attester_slashings" yaml:"attester_slashings"`
Attestations phase0.Attestations `json:"attestations" yaml:"attestations"`
Deposits phase0.Deposits `json:"deposits" yaml:"deposits"`
VoluntaryExits phase0.VoluntaryExits `json:"voluntary_exits" yaml:"voluntary_exits"`
ExecutionPayload common.ExecutionPayload `json:"execution_payload" yaml:"execution_payload"`
}
func (b *BeaconBlockBody) Deserialize(spec *common.Spec, dr *codec.DecodingReader) error {
return dr.Container(
&b.RandaoReveal, &b.Eth1Data,
&b.Graffiti, spec.Wrap(&b.ProposerSlashings),
spec.Wrap(&b.AttesterSlashings), spec.Wrap(&b.Attestations),
spec.Wrap(&b.Deposits), spec.Wrap(&b.VoluntaryExits),
spec.Wrap(&b.ExecutionPayload),
)
}
func (b *BeaconBlockBody) Serialize(spec *common.Spec, w *codec.EncodingWriter) error {
return w.Container(
&b.RandaoReveal, &b.Eth1Data,
&b.Graffiti, spec.Wrap(&b.ProposerSlashings),
spec.Wrap(&b.AttesterSlashings), spec.Wrap(&b.Attestations),
spec.Wrap(&b.Deposits), spec.Wrap(&b.VoluntaryExits),
spec.Wrap(&b.ExecutionPayload),
)
}
func (b *BeaconBlockBody) ByteLength(spec *common.Spec) uint64 {
return codec.ContainerLength(
&b.RandaoReveal, &b.Eth1Data,
&b.Graffiti, spec.Wrap(&b.ProposerSlashings),
spec.Wrap(&b.AttesterSlashings), spec.Wrap(&b.Attestations),
spec.Wrap(&b.Deposits), spec.Wrap(&b.VoluntaryExits),
spec.Wrap(&b.ExecutionPayload),
)
}
func (a *BeaconBlockBody) FixedLength(*common.Spec) uint64 {
return 0
}
func (b *BeaconBlockBody) HashTreeRoot(spec *common.Spec, hFn tree.HashFn) common.Root {
return hFn.HashTreeRoot(
b.RandaoReveal, &b.Eth1Data,
b.Graffiti, spec.Wrap(&b.ProposerSlashings),
spec.Wrap(&b.AttesterSlashings), spec.Wrap(&b.Attestations),
spec.Wrap(&b.Deposits), spec.Wrap(&b.VoluntaryExits),
spec.Wrap(&b.ExecutionPayload),
)
}
func (b BeaconBlockBody) CheckLimits(spec *common.Spec) error {
if x := uint64(len(b.ProposerSlashings)); x > spec.MAX_PROPOSER_SLASHINGS {
return fmt.Errorf("too many proposer slashings: %d", x)
}
if x := uint64(len(b.AttesterSlashings)); x > spec.MAX_ATTESTER_SLASHINGS {
return fmt.Errorf("too many attester slashings: %d", x)
}
if x := uint64(len(b.Attestations)); x > spec.MAX_ATTESTATIONS {
return fmt.Errorf("too many attestations: %d", x)
}
if x := uint64(len(b.Deposits)); x > spec.MAX_DEPOSITS {
return fmt.Errorf("too many deposits: %d", x)
}
if x := uint64(len(b.VoluntaryExits)); x > spec.MAX_VOLUNTARY_EXITS {
return fmt.Errorf("too many voluntary exits: %d", x)
}
// TODO: also check sum of byte size, sanity check block size.
if x := len(b.ExecutionPayload.Transactions); x > common.MAX_EXECUTION_TRANSACTIONS {
return fmt.Errorf("too many transactions: %d", x)
}
return nil
}
func BeaconBlockBodyType(spec *common.Spec) *ContainerTypeDef {
return ContainerType("BeaconBlockBody", []FieldDef{
{"randao_reveal", common.BLSSignatureType},
{"eth1_data", common.Eth1DataType}, // Eth1 data vote
{"graffiti", common.Bytes32Type}, // Arbitrary data
// Operations
{"proposer_slashings", phase0.BlockProposerSlashingsType(spec)},
{"attester_slashings", phase0.BlockAttesterSlashingsType(spec)},
{"attestations", phase0.BlockAttestationsType(spec)},
{"deposits", phase0.BlockDepositsType(spec)},
{"voluntary_exits", phase0.BlockVoluntaryExitsType(spec)},
// Merge
{"execution_payload", common.ExecutionPayloadType},
})
}
|
{
return ContainerType("SignedBeaconBlock", []FieldDef{
{"message", BeaconBlockType(spec)},
{"signature", common.BLSSignatureType},
})
}
|
config.js
|
/// <reference path="../js/plugins/blueimp-gallery/js/blueimp-gallery.min.js" />
function
|
($stateProvider, $urlRouterProvider, $ocLazyLoadProvider, IdleProvider, KeepaliveProvider) {
// Configure Idle settings
IdleProvider.idle(5); // in seconds
IdleProvider.timeout(120); // in seconds
$urlRouterProvider.otherwise("/dashboards/dashboard_1");
$ocLazyLoadProvider.config({
// Set to true if you want to see what and when is dynamically loaded
debug: false
});
//$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|file|ftp|blob):|data:image\//);
$stateProvider
.state('dashboards', {
abstract: true,
url: "/dashboards",
templateUrl: "views/common/content.html",
})
.state('dashboards.dashboard_1', {
url: "/dashboard_1",
templateUrl: "views/dashboard_1.html",
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
serie: true,
name: 'angular-flot',
files: [ 'js/plugins/flot/jquery.flot.js', 'js/plugins/flot/jquery.flot.time.js', 'js/plugins/flot/jquery.flot.tooltip.min.js', 'js/plugins/flot/jquery.flot.spline.js', 'js/plugins/flot/jquery.flot.resize.js', 'js/plugins/flot/jquery.flot.pie.js', 'js/plugins/flot/curvedLines.js', 'js/plugins/flot/angular-flot.js', ]
},
{
name: 'angles',
files: ['js/plugins/chartJs/angles.js', 'js/plugins/chartJs/Chart.min.js']
},
{
name: 'angular-peity',
files: ['js/plugins/peity/jquery.peity.min.js', 'js/plugins/peity/angular-peity.js']
}
]);
}
}
})
.state('dashboards.dashboard_2', {
url: "/dashboard_2",
templateUrl: "views/dashboard_2.html",
data: { pageTitle: 'Dashboard 2' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
serie: true,
name: 'angular-flot',
files: [ 'js/plugins/flot/jquery.flot.js', 'js/plugins/flot/jquery.flot.time.js', 'js/plugins/flot/jquery.flot.tooltip.min.js', 'js/plugins/flot/jquery.flot.spline.js', 'js/plugins/flot/jquery.flot.resize.js', 'js/plugins/flot/jquery.flot.pie.js', 'js/plugins/flot/curvedLines.js', 'js/plugins/flot/angular-flot.js' ]
},
{
serie: true,
files: ['js/plugins/jvectormap/jquery-jvectormap-2.0.2.min.js', 'js/plugins/jvectormap/jquery-jvectormap-2.0.2.css']
},
{
serie: true,
files: ['js/plugins/jvectormap/jquery-jvectormap-world-mill-en.js']
},
{
name: 'ui.checkbox',
files: ['js/bootstrap/angular-bootstrap-checkbox.js']
}
]);
}
}
})
.state('dashboards.dashboard_3', {
url: "/dashboard_3",
templateUrl: "views/dashboard_3.html",
data: { pageTitle: 'Dashboard 3' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
name: 'angles',
files: ['js/plugins/chartJs/angles.js', 'js/plugins/chartJs/Chart.min.js']
},
{
name: 'angular-peity',
files: ['js/plugins/peity/jquery.peity.min.js', 'js/plugins/peity/angular-peity.js']
},
{
name: 'ui.checkbox',
files: ['js/bootstrap/angular-bootstrap-checkbox.js']
}
]);
}
}
})
.state('dashboards_top', {
abstract: true,
url: "/dashboards_top",
templateUrl: "views/common/content_top_navigation.html",
})
.state('dashboards_top.dashboard_4', {
url: "/dashboard_4",
templateUrl: "views/dashboard_4.html",
data: { pageTitle: 'Dashboard 4' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
name: 'angles',
files: ['js/plugins/chartJs/angles.js', 'js/plugins/chartJs/Chart.min.js']
},
{
name: 'angular-peity',
files: ['js/plugins/peity/jquery.peity.min.js', 'js/plugins/peity/angular-peity.js']
},
{
serie: true,
name: 'angular-flot',
files: [ 'js/plugins/flot/jquery.flot.js', 'js/plugins/flot/jquery.flot.time.js', 'js/plugins/flot/jquery.flot.tooltip.min.js', 'js/plugins/flot/jquery.flot.spline.js', 'js/plugins/flot/jquery.flot.resize.js', 'js/plugins/flot/jquery.flot.pie.js', 'js/plugins/flot/curvedLines.js', 'js/plugins/flot/angular-flot.js', ]
}
]);
}
}
})
.state('dashboards.dashboard_helpDesk', {
url: "/dashboard_helpDesk",
templateUrl: "views/dashboard_helpDesk.html",
data: { pageTitle: 'dashboard HelpDesk' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
name: 'angles',
files: ['js/plugins/chartJs/angles.js', 'js/plugins/chartJs/Chart.min.js']
},
{
name: 'angular-peity',
files: ['js/plugins/peity/jquery.peity.min.js', 'js/plugins/peity/angular-peity.js']
},
{
serie: true,
name: 'angular-flot',
files: ['js/plugins/flot/jquery.flot.js', 'js/plugins/flot/jquery.flot.time.js', 'js/plugins/flot/jquery.flot.tooltip.min.js', 'js/plugins/flot/jquery.flot.spline.js', 'js/plugins/flot/jquery.flot.resize.js', 'js/plugins/flot/jquery.flot.pie.js', 'js/plugins/flot/curvedLines.js', 'js/plugins/flot/angular-flot.js',]
}
]);
}
}
})
.state('dashboards.dashboard_5', {
url: "/dashboard_5",
templateUrl: "views/dashboard_5.html",
data: { pageTitle: 'Dashboard 5' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
serie: true,
name: 'angular-flot',
files: [ 'js/plugins/flot/jquery.flot.js', 'js/plugins/flot/jquery.flot.time.js', 'js/plugins/flot/jquery.flot.tooltip.min.js', 'js/plugins/flot/jquery.flot.spline.js', 'js/plugins/flot/jquery.flot.resize.js', 'js/plugins/flot/jquery.flot.pie.js', 'js/plugins/flot/curvedLines.js', 'js/plugins/flot/angular-flot.js', ]
},
{
files: ['js/plugins/sparkline/jquery.sparkline.min.js']
}
]);
}
}
})
.state('layouts', {
url: "/layouts",
templateUrl: "views/layouts.html",
data: { pageTitle: 'Layouts' },
})
.state('forms.file_upload', {
url: "/file_upload",
templateUrl: "views/form_file_upload.html",
data: { pageTitle: 'File upload' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['css/plugins/dropzone/basic.css','css/plugins/dropzone/dropzone.css','js/plugins/dropzone/dropzone.js']
},
{
files: ['js/plugins/jasny/jasny-bootstrap.min.js', 'css/plugins/jasny/jasny-bootstrap.min.css' ]
}
]);
}
}
})
.state('app', {
abstract: true,
url: "/app",
templateUrl: "views/common/content.html",
})
.state('app.contacts', {
url: "/contacts",
templateUrl: "views/contacts.html",
data: { pageTitle: 'Contacts' }
})
.state('app.contacts_2', {
url: "/contacts_2",
templateUrl: "views/contacts_2.html",
data: { pageTitle: 'Contacts 2' }
})
.state('app.set', {
url: "/set/:setIdx",
templateUrl: "views/set.html",
params: { setIdx: null },
data: { pageTitle: 'Set' }
})
.state('app.setsng', {
url: "/setsng",
templateUrl: "views/setsng.html",
params: {
viewLayout: null, cat: {}, coll: {}, source : 'none'
},
data: { pageTitle: 'Setsng' }
})
.state('app.catalog', {
url: "/catalog",
templateUrl: "views/catalog.html",
params: {
catalog: null
},
data: { pageTitle: 'Catalog' }
})
.state('app.catalogs', {
url: "/catalogs",
templateUrl: "views/catalogs.html",
params: {
viewLayout: null
},
data: { pageTitle: 'Catalogs' }
})
.state('pages', {
abstract: true,
url: "/pages",
templateUrl: "views/common/content.html"
})
.state('pages.search_results', {
url: "/search_results",
templateUrl: "views/search_results.html",
data: { pageTitle: 'Search results' }
})
.state('pages.empy_page', {
url: "/empy_page",
templateUrl: "views/empty_page.html",
data: { pageTitle: 'Empty page' }
})
.state('logins', {
url: "/logins",
templateUrl: "views/login.html",
data: { pageTitle: 'Login', specialClass: 'gray-bg' }
})
.state('login_two_columns', {
url: "/login_two_columns",
templateUrl: "views/login_two_columns.html",
data: { pageTitle: 'Login two columns', specialClass: 'gray-bg' }
})
.state('register', {
url: "/register",
templateUrl: "views/register.html",
data: { pageTitle: 'Register', specialClass: 'gray-bg' }
})
.state('lockscreen', {
url: "/lockscreen",
templateUrl: "views/lockscreen.html",
data: { pageTitle: 'Lockscreen', specialClass: 'gray-bg' }
})
.state('forgot_password', {
url: "/forgot_password",
templateUrl: "views/forgot_password.html",
data: { pageTitle: 'Forgot password', specialClass: 'gray-bg' }
})
.state('errorOne', {
url: "/errorOne",
templateUrl: "views/errorOne.html",
data: { pageTitle: '404', specialClass: 'gray-bg' }
})
.state('errorTwo', {
url: "/errorTwo",
templateUrl: "views/errorTwo.html",
data: { pageTitle: '500', specialClass: 'gray-bg' }
})
.state('ui', {
abstract: true,
url: "/ui",
templateUrl: "views/common/content.html",
})
.state('ui.Collections', {
url: "/Collections",
templateUrl: "views/Collections.html",
data: { pageTitle: 'Collection' }
})
.state('ui.collection', {
url: "/collection",
templateUrl: "views/collection.html",
data: { pageTitle: 'Collection' }
})
.state('tables', {
abstract: true,
url: "/tables",
templateUrl: "views/common/content.html"
})
.state('tables.static_table', {
url: "/static_table",
templateUrl: "views/table_basic.html",
data: { pageTitle: 'Static table' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
name: 'angular-peity',
files: ['js/plugins/peity/jquery.peity.min.js', 'js/plugins/peity/angular-peity.js']
},
{
files: ['css/plugins/iCheck/custom.css','js/plugins/iCheck/icheck.min.js']
}
]);
}
}
})
.state('tables.data_tables', {
url: "/data_tables",
templateUrl: "views/table_data_tables.html",
data: { pageTitle: 'Data Tables' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
serie: true,
files: ['js/plugins/dataTables/datatables.min.js','css/plugins/dataTables/datatables.min.css']
},
{
serie: true,
name: 'datatables',
files: ['js/plugins/dataTables/angular-datatables.min.js']
},
{
serie: true,
name: 'datatables.buttons',
files: ['js/plugins/dataTables/angular-datatables.buttons.min.js']
}
]);
}
}
})
.state('tables.foo_table', {
url: "/foo_table",
templateUrl: "views/foo_table.html",
data: { pageTitle: 'Foo Table' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['js/plugins/footable/footable.all.min.js', 'css/plugins/footable/footable.core.css']
},
{
name: 'ui.footable',
files: ['js/plugins/footable/angular-footable.js']
}
]);
}
}
})
.state('tables.nggrid', {
url: "/nggrid",
templateUrl: "views/nggrid.html",
data: { pageTitle: 'ng Grid' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
name: 'ngGrid',
files: ['js/plugins/nggrid/ng-grid-2.0.3.min.js']
},
{
insertBefore: '#loadBefore',
files: ['js/plugins/nggrid/ng-grid.css']
}
]);
}
}
})
.state('commerce', {
abstract: true,
url: "/commerce",
templateUrl: "views/common/content.html",
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['js/plugins/footable/footable.all.min.js', 'css/plugins/footable/footable.core.css']
},
{
name: 'ui.footable',
files: ['js/plugins/footable/angular-footable.js']
}
]);
}
}
})
.state('commerce.products_grid', {
url: "/products_grid",
templateUrl: "views/ecommerce_products_grid.html",
data: { pageTitle: 'E-commerce grid' }
})
.state('commerce.product_list', {
url: "/product_list",
templateUrl: "views/ecommerce_product_list.html",
data: { pageTitle: 'E-commerce product list' }
})
.state('commerce.orders', {
url: "/orders",
templateUrl: "views/ecommerce_orders.html",
data: { pageTitle: 'E-commerce orders' }
})
.state('commerce.product', {
url: "/product",
templateUrl: "views/ecommerce_product.html",
data: { pageTitle: 'Product edit' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['css/plugins/summernote/summernote.css','css/plugins/summernote/summernote-bs3.css','js/plugins/summernote/summernote.min.js']
},
{
name: 'summernote',
files: ['css/plugins/summernote/summernote.css','css/plugins/summernote/summernote-bs3.css','js/plugins/summernote/summernote.min.js','js/plugins/summernote/angular-summernote.min.js']
}
]);
}
}
})
.state('commerce.product_details', {
url: "/product_details",
templateUrl: "views/ecommerce_product_details.html",
data: { pageTitle: 'E-commerce Product detail' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['css/plugins/slick/slick.css','css/plugins/slick/slick-theme.css','js/plugins/slick/slick.min.js']
},
{
name: 'slick',
files: ['js/plugins/slick/angular-slick.min.js']
}
]);
}
}
})
.state('commerce.payments', {
url: "/payments",
templateUrl: "views/ecommerce_payments.html",
data: { pageTitle: 'E-commerce payments' }
})
.state('commerce.cart', {
url: "/cart",
templateUrl: "views/ecommerce_cart.html",
data: { pageTitle: 'Shopping cart' }
})
.state('gallery', {
abstract: true,
url: "/gallery",
templateUrl: "views/common/content.html"
})
.state('gallery.basic_gallery', {
url: "/basic_gallery",
templateUrl: "views/basic_gallery.html",
data: { pageTitle: 'Collection' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['lib/blueimp-gallery/js/blueimp-gallery.min.js','lib/blueimp-gallery/css/blueimp-gallery.min.css']
}
]);
}
}
})
.state('gallery.bootstrap_carousel', {
url: "/bootstrap_carousel",
templateUrl: "views/carousel.html",
data: { pageTitle: 'Bootstrap carousel' }
})
.state('gallery.slick_gallery', {
url: "/slick_gallery",
templateUrl: "views/slick.html",
data: { pageTitle: 'Slick carousel' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['css/plugins/slick/slick.css','css/plugins/slick/slick-theme.css','js/plugins/slick/slick.min.js']
},
{
name: 'slick',
files: ['js/plugins/slick/angular-slick.min.js']
}
]);
}
}
})
.state('css_animations', {
url: "/css_animations",
templateUrl: "views/css_animation.html",
data: { pageTitle: 'CSS Animations' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
reconfig: true,
serie: true,
files: ['js/plugins/rickshaw/vendor/d3.v3.js','js/plugins/rickshaw/rickshaw.min.js']
},
{
reconfig: true,
name: 'angular-rickshaw',
files: ['js/plugins/rickshaw/angular-rickshaw.js']
}
]);
}
}
})
.state('landing', {
url: "/landing",
templateUrl: "views/landing.html",
data: { pageTitle: 'Landing page', specialClass: 'landing-page' },
resolve: {
loadPlugin: function ($ocLazyLoad) {
return $ocLazyLoad.load([
{
files: ['js/plugins/wow/wow.min.js']
}
]);
}
}
})
.state('outlook', {
url: "/outlook",
templateUrl: "views/outlook.html",
data: { pageTitle: 'Outlook view', specialClass: 'fixed-sidebar' }
})
.state('off_canvas', {
url: "/off_canvas",
templateUrl: "views/off_canvas.html",
data: { pageTitle: 'Off canvas menu', specialClass: 'canvas-menu' }
});
}
angular
.module('inspinia')
.config(config)
.run(function ($rootScope, $state, $stateParams) {
$rootScope.$state = $state;
$rootScope.$stateParams = $stateParams;
});
|
config
|
tankAI.py
|
"""
The player's AI code
Functions here are called by clock.py to run the AI code
"""
import random
import math
from clientLogic.logging import logPrint
from clientLogic import clientData, commands
def onConnect():
"""
Called when the player initially connects to the server but before the tank first spawns
"""
commands.setInfo("Python player instance running the example AI.\n" +
"Fork me at https://github.com/JoelEager/pyTanks.Player")
def onSpawn():
"""
Called when the tank spawns in a new game
"""
pass
def onTick(elapsedTime):
|
"""
Called once every frame while the tank is alive
:param elapsedTime: The time elapsed, in seconds, since the last frame
"""
gs = clientData.gameState
# Collided so try to get moving again
if not gs.myTank.moving:
commands.turn((math.pi / 4) * random.randint(0, 7))
commands.go()
logPrint("Turned and starting moving", 2)
# Shooting logic
if gs.myTank.canShoot and random.randint(0, 4) == 0:
# Select a target
random.shuffle(gs.tanks)
for target in gs.tanks:
if target.alive:
# Do the math
deltaX = abs(gs.myTank.x - target.x)
if deltaX == 0: return
deltaY = gs.myTank.y - target.y
angle = math.atan(deltaY / deltaX)
if target.x < gs.myTank.x:
angle = math.pi - angle
commands.fire(angle)
logPrint("Fired", 2)
break
|
|
person-editor.component.spec.ts
|
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { PersonEditorComponent } from './person-editor.component';
describe('PersonEditorComponent', () => {
let component: PersonEditorComponent;
let fixture: ComponentFixture<PersonEditorComponent>;
|
TestBed.configureTestingModule({
declarations: [ PersonEditorComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(PersonEditorComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should be created', () => {
expect(component).toBeTruthy();
});
});
|
beforeEach(async(() => {
|
pcr.rs
|
// Copyright 2021 Contributors to the Parsec project.
// SPDX-License-Identifier: Apache-2.0
mod bank;
mod data;
use crate::{structures::PcrSelectionList, Context, Result};
pub use bank::PcrBank;
pub use data::PcrData;
/// Function that reads all the PCRs in a selection list and returns
/// the result as PCR data.
///
/// # Example
///
/// ```rust
/// # use tss_esapi::{Context, TctiNameConf};
/// # // Create context
/// # let mut context =
/// # Context::new(
/// # TctiNameConf::from_environment_variable().expect("Failed to get TCTI"),
/// # ).expect("Failed to create Context");
/// #
/// use tss_esapi::{
/// interface_types::algorithm::HashingAlgorithm,
/// structures::{PcrSelectionListBuilder, PcrSlot},
/// };
/// // Create PCR selection list with slots in a bank
/// // that is going to be read.
/// let pcr_selection_list = PcrSelectionListBuilder::new()
/// .with_selection(HashingAlgorithm::Sha256,
/// &[
/// PcrSlot::Slot0,
/// PcrSlot::Slot1,
/// PcrSlot::Slot2,
/// PcrSlot::Slot3,
/// PcrSlot::Slot4,
/// PcrSlot::Slot5,
/// PcrSlot::Slot6,
/// PcrSlot::Slot7,
/// PcrSlot::Slot8,
/// PcrSlot::Slot9,
/// PcrSlot::Slot10,
/// PcrSlot::Slot11,
/// PcrSlot::Slot12,
/// PcrSlot::Slot13,
/// PcrSlot::Slot14,
/// PcrSlot::Slot15,
/// PcrSlot::Slot16,
/// PcrSlot::Slot17,
/// PcrSlot::Slot18,
/// PcrSlot::Slot19,
/// PcrSlot::Slot20,
/// PcrSlot::Slot21,
/// ])
/// .build();
/// let _pcr_data = tss_esapi::abstraction::pcr::read_all(&mut context, pcr_selection_list)
/// .expect("pcr::read_all failed");
/// ```
pub fn read_all(
context: &mut Context,
mut pcr_selection_list: PcrSelectionList,
) -> Result<PcrData>
|
{
let mut pcr_data = PcrData::new();
while !pcr_selection_list.is_empty() {
let (_, pcrs_read, pcr_digests) = context.pcr_read(&pcr_selection_list)?;
pcr_data.add(&pcrs_read, &pcr_digests)?;
pcr_selection_list.subtract(&pcrs_read)?;
}
Ok(pcr_data)
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.