file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
name.rs | #![macro_use]
use std::{
cell::RefCell,
collections::{HashMap, HashSet},
fmt,
string::String,
};
/// An interned, freshenable identifier.
/// Generally, one creates names with `n()` (short for `Name::global()`);
/// two names created this way with the same spelling will be treated as the same name.
/// Hygiene comes from freshening (implemented in `alpha.rs`, invoked in `walk_mode.rs`).
/// If a name is created in an unusual way that might cause it to collide,
/// `Name::gensym()` ensures uniqueness.
/// Only names that were copied or clone from the original will compare equal.
#[derive(PartialEq, Eq, Clone, Copy, Hash)]
pub struct Name {
id: usize,
}
pub struct Spelling {
// No two different variables have this the same. Tomatoes may have been added:
unique: String,
// The original spelling that the programmer chose.
orig: String,
}
thread_local! {
// From `Spelling.unique` to `id`s:
static id_map: RefCell<HashMap<String, usize>> = RefCell::new(HashMap::new());
// From `id`s to `Spelling`s
static spellings: RefCell<Vec<Spelling>> = RefCell::new(vec![]);
static printables: RefCell<HashMap<usize, String>> = RefCell::new(HashMap::new());
// The values of `printables`, for lookup purposes.
static printables_used: RefCell<HashSet<String>> = RefCell::new(HashSet::new());
// Should we do "naive" freshening for testing purposes?
static fake_freshness: RefCell<bool> = RefCell::new(false);
}
impl crate::runtime::reify::Reifiable for Name {
fn ty_name() -> Name { n("Name") }
fn reify(&self) -> crate::runtime::eval::Value { val!(ast(at * self)) }
fn reflect(v: &crate::runtime::eval::Value) -> Name {
extract!((v) crate::runtime::eval::Value::AbstractSyntax = (ref ast)
=> ast.to_name())
}
}
impl std::cmp::PartialOrd for Name {
fn partial_cmp(&self, other: &Name) -> Option<std::cmp::Ordering> {
Some(self.orig_sp().cmp(&other.orig_sp()))
}
}
impl std::cmp::Ord for Name {
fn cmp(&self, other: &Name) -> std::cmp::Ordering { self.orig_sp().cmp(&other.orig_sp()) }
}
// These are for isolating tests of alpha-equivalence from each other.
pub fn enable_fake_freshness(ff: bool) {
fake_freshness.with(|fake_freshness_| {
*fake_freshness_.borrow_mut() = ff;
})
}
// only available on nightly:
// impl !Send for Name {}
impl Name {
/// Two names that are unequal to each other will have different "spelling"s.
/// Tomatoes (π
) may have been added to the end to ensure uniqueness.
pub fn sp(self) -> String { spellings.with(|us| us.borrow()[self.id].unique.clone()) }
/// The "original spelling" of a name; the string that was used to define it. These may collide.
pub fn orig_sp(self) -> String { spellings.with(|us| us.borrow()[self.id].orig.clone()) }
/// This extracts the "original" `Name`, prior to any freshening.
/// This is probably not ever the *right* thing to do, but may be needed as a workaround.
pub fn unhygienic_orig(self) -> Name {
spellings.with(|us| Name::new(&us.borrow()[self.id].orig, false))
}
/// Printable names are unique, like names from `sp()`, but generated lazily.
/// So, if the compiler freshens some name a bunch of times, producing a tomato-filled mess,
/// but only prints one version of the name, it gets to print an unadorned name.
/// If absolutely necessary to avoid collision, carrots (π₯) are added to the end.
pub fn print(self) -> String {
printables.with(|printables_| {
printables_used.with(|printables_used_| {
printables_
.borrow_mut()
.entry(self.id)
.or_insert_with(|| {
let mut print_version = self.orig_sp();
while printables_used_.borrow().contains(&print_version) {
// Graffiti seen at Berkley: "EβΆT YOUR VEGETABLES π₯"
print_version = format!("{}π₯", print_version);
}
printables_used_.borrow_mut().insert(print_version.clone());
print_version.clone()
})
.clone()
})
})
}
pub fn global(s: &str) -> Name { Name::new(s, false) }
pub fn gensym(s: &str) -> Name { Name::new(s, true) }
pub fn freshen(self) -> Name { Name::new(&self.orig_sp(), true) }
fn new(orig_spelling: &str, freshen: bool) -> Name {
let fake_freshness_ = fake_freshness.with(|ff| *ff.borrow());
id_map.with(|id_map_| {
let mut unique_spelling = orig_spelling.to_owned();
// Find a fresh version by adding tomatoes, if requested:
while freshen && id_map_.borrow().contains_key(&unique_spelling) {
unique_spelling = format!("{}π
", unique_spelling);
}
if freshen && fake_freshness_ {
// Forget doing it right; only add exactly one tomato:
unique_spelling = format!("{}π
", orig_spelling);
}
let claim_id = || {
spellings.with(|spellings_| {
let new_id = spellings_.borrow().len();
spellings_.borrow_mut().push(Spelling {
unique: unique_spelling.clone(),
orig: orig_spelling.to_owned(),
});
new_id
})
};
// If we're faking freshness, make the freshened name findable. Otherwise...
let id = if freshen && !fake_freshness_ {
claim_id() // ...don't put it in the table
} else {
*id_map_.borrow_mut().entry(unique_spelling.clone()).or_insert_with(claim_id)
};
Name { id: id }
})
}
pub fn is(self, s: &str) -> bool { self.sp() == s }
pub fn is_name(self, n: Name) -> bool { self.sp() == n.sp() }
}
impl From<&str> for Name {
fn from(s: &str) -> Name { Name::global(s) }
}
impl From<&String> for Name {
fn from(s: &String) -> Name { Name::global(&*s) }
}
// TODO: move to `ast_walk`
// TODO: using `lazy_static!` (with or without gensym) makes some tests fail. Why?
/// Special name for negative `ast_walk`ing
pub fn negative_ret_val() -> Name { Name::global("β") }
impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Β«{}Β»", self.sp()) }
}
impl fmt::Display for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.print()) }
}
pub fn n(s: &str) -> Name { Name::global(s) }
#[test]
fn name_interning() {
// This test fails under tarpaulin; why? It must be related to `thread_local!` somehow...
let a = n("a");
assert_eq!(a, a);
assert_eq!(a, n("a"));
assert_ne!(a, a.freshen());
assert_eq!(a, a.freshen().unhygienic_orig());
assert_ne!(a, n("xπ
"));
assert_ne!(a.freshen(), a.freshen());
assert_ne!(n("a"), n("y"));
enable_fake_freshness(true);
let x = n("x");
assert_eq!(x, x);
assert_eq!(x, n("x"));
assert_ne!(x, x.freshen());
// ... but now we the freshened version of `x` is accessible (and doesn't avoid existing names)
assert_eq!(x.freshen(), n("xπ
"));
assert_eq!(x.freshen(), x.freshen());
| // Printable versions are first-come, first-served
assert_eq!(a.freshen().print(), "a");
assert_eq!(a.print(), "aπ₯");
} | random_line_split |
|
name.rs | #![macro_use]
use std::{
cell::RefCell,
collections::{HashMap, HashSet},
fmt,
string::String,
};
/// An interned, freshenable identifier.
/// Generally, one creates names with `n()` (short for `Name::global()`);
/// two names created this way with the same spelling will be treated as the same name.
/// Hygiene comes from freshening (implemented in `alpha.rs`, invoked in `walk_mode.rs`).
/// If a name is created in an unusual way that might cause it to collide,
/// `Name::gensym()` ensures uniqueness.
/// Only names that were copied or clone from the original will compare equal.
#[derive(PartialEq, Eq, Clone, Copy, Hash)]
pub struct Name {
id: usize,
}
pub struct Spelling {
// No two different variables have this the same. Tomatoes may have been added:
unique: String,
// The original spelling that the programmer chose.
orig: String,
}
thread_local! {
// From `Spelling.unique` to `id`s:
static id_map: RefCell<HashMap<String, usize>> = RefCell::new(HashMap::new());
// From `id`s to `Spelling`s
static spellings: RefCell<Vec<Spelling>> = RefCell::new(vec![]);
static printables: RefCell<HashMap<usize, String>> = RefCell::new(HashMap::new());
// The values of `printables`, for lookup purposes.
static printables_used: RefCell<HashSet<String>> = RefCell::new(HashSet::new());
// Should we do "naive" freshening for testing purposes?
static fake_freshness: RefCell<bool> = RefCell::new(false);
}
impl crate::runtime::reify::Reifiable for Name {
fn ty_name() -> Name { n("Name") }
fn reify(&self) -> crate::runtime::eval::Value { val!(ast(at * self)) }
fn reflect(v: &crate::runtime::eval::Value) -> Name {
extract!((v) crate::runtime::eval::Value::AbstractSyntax = (ref ast)
=> ast.to_name())
}
}
impl std::cmp::PartialOrd for Name {
fn partial_cmp(&self, other: &Name) -> Option<std::cmp::Ordering> {
Some(self.orig_sp().cmp(&other.orig_sp()))
}
}
impl std::cmp::Ord for Name {
fn cmp(&self, other: &Name) -> std::cmp::Ordering { self.orig_sp().cmp(&other.orig_sp()) }
}
// These are for isolating tests of alpha-equivalence from each other.
pub fn enable_fake_freshness(ff: bool) {
fake_freshness.with(|fake_freshness_| {
*fake_freshness_.borrow_mut() = ff;
})
}
// only available on nightly:
// impl !Send for Name {}
impl Name {
/// Two names that are unequal to each other will have different "spelling"s.
/// Tomatoes (π
) may have been added to the end to ensure uniqueness.
pub fn sp(self) -> String { spellings.with(|us| us.borrow()[self.id].unique.clone()) }
/// The "original spelling" of a name; the string that was used to define it. These may collide.
pub fn orig_sp(self) -> String { spellings.with(|us| us.borrow()[self.id].orig.clone()) }
/// This extracts the "original" `Name`, prior to any freshening.
/// This is probably not ever the *right* thing to do, but may be needed as a workaround.
pub fn unhygienic_orig(self) -> Name {
spellings.with(|us| Name::new(&us.borrow()[self.id].orig, false))
}
/// Printable names are unique, like names from `sp()`, but generated lazily.
/// So, if the compiler freshens some name a bunch of times, producing a tomato-filled mess,
/// but only prints one version of the name, it gets to print an unadorned name.
/// If absolutely necessary to avoid collision, carrots (π₯) are added to the end.
pub fn print(self) -> String {
printables.with(|printables_| {
printables_used.with(|printables_used_| {
printables_
.borrow_mut()
.entry(self.id)
.or_insert_with(|| {
let mut print_version = self.orig_sp();
while printables_used_.borrow().contains(&print_version) {
// Graffiti seen at Berkley: "EβΆT YOUR VEGETABLES π₯"
print_version = format!("{}π₯", print_version);
}
printables_used_.borrow_mut().insert(print_version.clone());
print_version.clone()
})
.clone()
})
})
}
pub fn global(s: &str) -> Name { Name::new(s, false) }
pub fn gensym(s: &str) -> Name { Name::new(s, true) }
pub fn freshen(self) -> Name { Name::new(&self.orig_sp(), true) }
fn new(orig_spelling: &str, freshen: bool) -> Name {
let fake_freshness_ = fake_freshness.with(|ff| *ff.borrow());
id_map.with(|id_map_| {
let mut unique_spelling = orig_spelling.to_owned();
// Find a fresh version by adding tomatoes, if requested:
while freshen && id_map_.borrow().contains_key(&unique_spelling) {
unique_spelling = format!("{}π
", unique_spelling);
}
if freshen && fake_freshness_ {
// Forget doing it right; only add exactly one tomato:
unique_spelling = format!("{}π
", orig_spelling);
}
let claim_id = || {
spellings.with(|spellings_| {
let new_id = spellings_.borrow().len();
spellings_.borrow_mut().push(Spelling {
unique: unique_spelling.clone(),
orig: orig_spelling.to_owned(),
});
new_id
})
};
// If we're faking freshness, make the freshened name findable. Otherwise...
let id = if freshen && !fake_freshness_ {
cl | *id_map_.borrow_mut().entry(unique_spelling.clone()).or_insert_with(claim_id)
};
Name { id: id }
})
}
pub fn is(self, s: &str) -> bool { self.sp() == s }
pub fn is_name(self, n: Name) -> bool { self.sp() == n.sp() }
}
impl From<&str> for Name {
fn from(s: &str) -> Name { Name::global(s) }
}
impl From<&String> for Name {
fn from(s: &String) -> Name { Name::global(&*s) }
}
// TODO: move to `ast_walk`
// TODO: using `lazy_static!` (with or without gensym) makes some tests fail. Why?
/// Special name for negative `ast_walk`ing
pub fn negative_ret_val() -> Name { Name::global("β") }
impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Β«{}Β»", self.sp()) }
}
impl fmt::Display for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.print()) }
}
pub fn n(s: &str) -> Name { Name::global(s) }
#[test]
fn name_interning() {
// This test fails under tarpaulin; why? It must be related to `thread_local!` somehow...
let a = n("a");
assert_eq!(a, a);
assert_eq!(a, n("a"));
assert_ne!(a, a.freshen());
assert_eq!(a, a.freshen().unhygienic_orig());
assert_ne!(a, n("xπ
"));
assert_ne!(a.freshen(), a.freshen());
assert_ne!(n("a"), n("y"));
enable_fake_freshness(true);
let x = n("x");
assert_eq!(x, x);
assert_eq!(x, n("x"));
assert_ne!(x, x.freshen());
// ... but now we the freshened version of `x` is accessible (and doesn't avoid existing names)
assert_eq!(x.freshen(), n("xπ
"));
assert_eq!(x.freshen(), x.freshen());
// Printable versions are first-come, first-served
assert_eq!(a.freshen().print(), "a");
assert_eq!(a.print(), "aπ₯");
}
| aim_id() // ...don't put it in the table
} else {
| conditional_block |
name.rs | #![macro_use]
use std::{
cell::RefCell,
collections::{HashMap, HashSet},
fmt,
string::String,
};
/// An interned, freshenable identifier.
/// Generally, one creates names with `n()` (short for `Name::global()`);
/// two names created this way with the same spelling will be treated as the same name.
/// Hygiene comes from freshening (implemented in `alpha.rs`, invoked in `walk_mode.rs`).
/// If a name is created in an unusual way that might cause it to collide,
/// `Name::gensym()` ensures uniqueness.
/// Only names that were copied or clone from the original will compare equal.
#[derive(PartialEq, Eq, Clone, Copy, Hash)]
pub struct Name {
id: usize,
}
pub struct Spelling {
// No two different variables have this the same. Tomatoes may have been added:
unique: String,
// The original spelling that the programmer chose.
orig: String,
}
thread_local! {
// From `Spelling.unique` to `id`s:
static id_map: RefCell<HashMap<String, usize>> = RefCell::new(HashMap::new());
// From `id`s to `Spelling`s
static spellings: RefCell<Vec<Spelling>> = RefCell::new(vec![]);
static printables: RefCell<HashMap<usize, String>> = RefCell::new(HashMap::new());
// The values of `printables`, for lookup purposes.
static printables_used: RefCell<HashSet<String>> = RefCell::new(HashSet::new());
// Should we do "naive" freshening for testing purposes?
static fake_freshness: RefCell<bool> = RefCell::new(false);
}
impl crate::runtime::reify::Reifiable for Name {
fn ty_name() -> Name { n("Name") }
fn reify(&self) -> crate::runtime::eval::Value { val!(ast(at * self)) }
fn reflect(v: &crate::runtime::eval::Value) -> Name {
extract!((v) crate::runtime::eval::Value::AbstractSyntax = (ref ast)
=> ast.to_name())
}
}
impl std::cmp::PartialOrd for Name {
fn partial_cmp(&self, other: &Name) -> Option<std::cmp::Ordering> {
Some(self.orig_sp().cmp(&other.orig_sp()))
}
}
impl std::cmp::Ord for Name {
fn cmp(&self, other: &Name) -> std::cmp::Ordering { self.orig_sp().cmp(&other.orig_sp()) }
}
// These are for isolating tests of alpha-equivalence from each other.
pub fn enable_fake_freshness(ff: bool) {
fake_freshness.with(|fake_freshness_| {
*fake_freshness_.borrow_mut() = ff;
})
}
// only available on nightly:
// impl !Send for Name {}
impl Name {
/// Two names that are unequal to each other will have different "spelling"s.
/// Tomatoes (π
) may have been added to the end to ensure uniqueness.
pub fn sp(self) -> String { spellings.with(|us| us.borrow()[self.id].unique.clone()) }
/// The "original spelling" of a name; the string that was used to define it. These may collide.
pub fn orig_sp(self) -> String { spellings.with(|us| us.borrow()[self.id].orig.clone()) }
/// This extracts the "original" `Name`, prior to any freshening.
/// This is probably not ever the *right* thing to do, but may be needed as a workaround.
pub fn unhygienic_orig(self) -> Name {
spellings.with(|us| Name::new(&us.borrow()[self.id].orig, false))
}
/// Printable names are unique, like names from `sp()`, but generated lazily.
/// So, if the compiler freshens some name a bunch of times, producing a tomato-filled mess,
/// but only prints one version of the name, it gets to print an unadorned name.
/// If absolutely necessary to avoid collision, carrots (π₯) are added to the end.
pub fn print(self) -> String {
printables.with(|printables_| {
printables_used.with(|printables_used_| {
printables_
.borrow_mut()
.entry(self.id)
.or_insert_with(|| {
let mut print_version = self.orig_sp();
while printables_used_.borrow().contains(&print_version) {
// Graffiti seen at Berkley: "EβΆT YOUR VEGETABLES π₯"
print_version = format!("{}π₯", print_version);
}
printables_used_.borrow_mut().insert(print_version.clone());
print_version.clone()
})
.clone()
})
})
}
pub fn global(s: &str) -> Name { Name::new(s, false) }
pub fn gensym(s: &str | ame { Name::new(s, true) }
pub fn freshen(self) -> Name { Name::new(&self.orig_sp(), true) }
fn new(orig_spelling: &str, freshen: bool) -> Name {
let fake_freshness_ = fake_freshness.with(|ff| *ff.borrow());
id_map.with(|id_map_| {
let mut unique_spelling = orig_spelling.to_owned();
// Find a fresh version by adding tomatoes, if requested:
while freshen && id_map_.borrow().contains_key(&unique_spelling) {
unique_spelling = format!("{}π
", unique_spelling);
}
if freshen && fake_freshness_ {
// Forget doing it right; only add exactly one tomato:
unique_spelling = format!("{}π
", orig_spelling);
}
let claim_id = || {
spellings.with(|spellings_| {
let new_id = spellings_.borrow().len();
spellings_.borrow_mut().push(Spelling {
unique: unique_spelling.clone(),
orig: orig_spelling.to_owned(),
});
new_id
})
};
// If we're faking freshness, make the freshened name findable. Otherwise...
let id = if freshen && !fake_freshness_ {
claim_id() // ...don't put it in the table
} else {
*id_map_.borrow_mut().entry(unique_spelling.clone()).or_insert_with(claim_id)
};
Name { id: id }
})
}
pub fn is(self, s: &str) -> bool { self.sp() == s }
pub fn is_name(self, n: Name) -> bool { self.sp() == n.sp() }
}
impl From<&str> for Name {
fn from(s: &str) -> Name { Name::global(s) }
}
impl From<&String> for Name {
fn from(s: &String) -> Name { Name::global(&*s) }
}
// TODO: move to `ast_walk`
// TODO: using `lazy_static!` (with or without gensym) makes some tests fail. Why?
/// Special name for negative `ast_walk`ing
pub fn negative_ret_val() -> Name { Name::global("β") }
impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Β«{}Β»", self.sp()) }
}
impl fmt::Display for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.print()) }
}
pub fn n(s: &str) -> Name { Name::global(s) }
#[test]
fn name_interning() {
// This test fails under tarpaulin; why? It must be related to `thread_local!` somehow...
let a = n("a");
assert_eq!(a, a);
assert_eq!(a, n("a"));
assert_ne!(a, a.freshen());
assert_eq!(a, a.freshen().unhygienic_orig());
assert_ne!(a, n("xπ
"));
assert_ne!(a.freshen(), a.freshen());
assert_ne!(n("a"), n("y"));
enable_fake_freshness(true);
let x = n("x");
assert_eq!(x, x);
assert_eq!(x, n("x"));
assert_ne!(x, x.freshen());
// ... but now we the freshened version of `x` is accessible (and doesn't avoid existing names)
assert_eq!(x.freshen(), n("xπ
"));
assert_eq!(x.freshen(), x.freshen());
// Printable versions are first-come, first-served
assert_eq!(a.freshen().print(), "a");
assert_eq!(a.print(), "aπ₯");
}
| ) -> N | identifier_name |
sparc64_unknown_linux_gnu.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | pub fn target() -> TargetResult {
let mut base = super::linux_base::opts();
base.cpu = "v9".to_string();
base.max_atomic_width = Some(64);
Ok(Target {
llvm_target: "sparc64-unknown-linux-gnu".to_string(),
target_endian: "big".to_string(),
target_pointer_width: "64".to_string(),
target_c_int_width: "32".to_string(),
data_layout: "E-m:e-i64:64-n32:64-S128".to_string(),
arch: "sparc64".to_string(),
target_os: "linux".to_string(),
target_env: "gnu".to_string(),
target_vendor: "unknown".to_string(),
linker_flavor: LinkerFlavor::Gcc,
options: base,
})
} | // option. This file may not be copied, modified, or distributed
// except according to those terms.
use spec::{LinkerFlavor, Target, TargetResult};
| random_line_split |
sparc64_unknown_linux_gnu.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use spec::{LinkerFlavor, Target, TargetResult};
pub fn | () -> TargetResult {
let mut base = super::linux_base::opts();
base.cpu = "v9".to_string();
base.max_atomic_width = Some(64);
Ok(Target {
llvm_target: "sparc64-unknown-linux-gnu".to_string(),
target_endian: "big".to_string(),
target_pointer_width: "64".to_string(),
target_c_int_width: "32".to_string(),
data_layout: "E-m:e-i64:64-n32:64-S128".to_string(),
arch: "sparc64".to_string(),
target_os: "linux".to_string(),
target_env: "gnu".to_string(),
target_vendor: "unknown".to_string(),
linker_flavor: LinkerFlavor::Gcc,
options: base,
})
}
| target | identifier_name |
sparc64_unknown_linux_gnu.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use spec::{LinkerFlavor, Target, TargetResult};
pub fn target() -> TargetResult | {
let mut base = super::linux_base::opts();
base.cpu = "v9".to_string();
base.max_atomic_width = Some(64);
Ok(Target {
llvm_target: "sparc64-unknown-linux-gnu".to_string(),
target_endian: "big".to_string(),
target_pointer_width: "64".to_string(),
target_c_int_width: "32".to_string(),
data_layout: "E-m:e-i64:64-n32:64-S128".to_string(),
arch: "sparc64".to_string(),
target_os: "linux".to_string(),
target_env: "gnu".to_string(),
target_vendor: "unknown".to_string(),
linker_flavor: LinkerFlavor::Gcc,
options: base,
})
} | identifier_body |
|
grayscale.js | /*!
* Start Bootstrap - Grayscale Bootstrap Theme (http://startbootstrap.com)
* Code licensed under the Apache License v2.0.
* For details, see http://www.apache.org/licenses/LICENSE-2.0.
*/
// jQuery to collapse the navbar on scroll
$(window).scroll(function() {
if ($(".navbar").offset().top > 50) {
$(".navbar-fixed-top").addClass("top-nav-collapse");
} else {
$(".navbar-fixed-top").removeClass("top-nav-collapse");
}
});
// jQuery for page scrolling feature - requires jQuery Easing plugin
$(function() {
$('a.page-scroll').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// jQuery for page scrolling feature - requires jQuery Easing plugin for the submenu in drop downs
$(function() {
$('a.page-scroll-submenu').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// Closes the Responsive Menu on Menu Item Click
$('.navbar-collapse ul li a').click(function() {
$('.navbar-toggle:visible').click();
});
// Google Maps Scripts
// When the window has finished loading create our google map below
google.maps.event.addDomListener(window, 'load', init);
function | () {
// Basic options for a simple Google Map
// For more options see: https://developers.google.com/maps/documentation/javascript/reference#MapOptions
var mapOptions = {
// How zoomed in you want the map to start at (always required)
zoom: 16,
// The latitude and longitude to center the map (always required)
center: new google.maps.LatLng(46.233755, 6.055636), // CERN
// Disables the default Google Maps UI components
disableDefaultUI: true,
scrollwheel: false,
draggable: false,
// How you would like to style the map.
// This is where you would paste any style found on Snazzy Maps.
styles: [
{
"featureType": "all",
"elementType": "all",
"stylers": [
{
"invert_lightness": true
},
{
"saturation": 10
},
{
"lightness": 30
},
{
"gamma": 0.5
},
{
"hue": "#435158"
}
]
}
]
};
// Get the HTML DOM element that will contain your map
// We are using a div with id="map" seen below in the <body>
var mapElement = document.getElementById('map');
// Create the Google Map using out element and options defined above
var map = new google.maps.Map(mapElement, mapOptions);
// Custom Map Marker Icon - Customize the map-marker.png file to customize your icon
var image = 'img/map-marker.png';
var myLatLng = new google.maps.LatLng(46.233755, 6.055636);
/*var beachMarker = new google.maps.Marker({
position: myLatLng,
map: map,
icon: image
});*/
}
$(document).ready(function(d){
$(".dropdown .dropdown-menu li").hover(function(){
//Mouse IN
$(".dropdown .dropdown-menu li").removeClass("active");
$(this).addClass("active");
}, function(){
//Mouse Out
$(".dropdown .dropdown-menu li").removeClass("active");
}
);
});
| init | identifier_name |
grayscale.js | /*!
* Start Bootstrap - Grayscale Bootstrap Theme (http://startbootstrap.com)
* Code licensed under the Apache License v2.0.
* For details, see http://www.apache.org/licenses/LICENSE-2.0.
*/
// jQuery to collapse the navbar on scroll
$(window).scroll(function() {
if ($(".navbar").offset().top > 50) | else {
$(".navbar-fixed-top").removeClass("top-nav-collapse");
}
});
// jQuery for page scrolling feature - requires jQuery Easing plugin
$(function() {
$('a.page-scroll').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// jQuery for page scrolling feature - requires jQuery Easing plugin for the submenu in drop downs
$(function() {
$('a.page-scroll-submenu').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// Closes the Responsive Menu on Menu Item Click
$('.navbar-collapse ul li a').click(function() {
$('.navbar-toggle:visible').click();
});
// Google Maps Scripts
// When the window has finished loading create our google map below
google.maps.event.addDomListener(window, 'load', init);
function init() {
// Basic options for a simple Google Map
// For more options see: https://developers.google.com/maps/documentation/javascript/reference#MapOptions
var mapOptions = {
// How zoomed in you want the map to start at (always required)
zoom: 16,
// The latitude and longitude to center the map (always required)
center: new google.maps.LatLng(46.233755, 6.055636), // CERN
// Disables the default Google Maps UI components
disableDefaultUI: true,
scrollwheel: false,
draggable: false,
// How you would like to style the map.
// This is where you would paste any style found on Snazzy Maps.
styles: [
{
"featureType": "all",
"elementType": "all",
"stylers": [
{
"invert_lightness": true
},
{
"saturation": 10
},
{
"lightness": 30
},
{
"gamma": 0.5
},
{
"hue": "#435158"
}
]
}
]
};
// Get the HTML DOM element that will contain your map
// We are using a div with id="map" seen below in the <body>
var mapElement = document.getElementById('map');
// Create the Google Map using out element and options defined above
var map = new google.maps.Map(mapElement, mapOptions);
// Custom Map Marker Icon - Customize the map-marker.png file to customize your icon
var image = 'img/map-marker.png';
var myLatLng = new google.maps.LatLng(46.233755, 6.055636);
/*var beachMarker = new google.maps.Marker({
position: myLatLng,
map: map,
icon: image
});*/
}
$(document).ready(function(d){
$(".dropdown .dropdown-menu li").hover(function(){
//Mouse IN
$(".dropdown .dropdown-menu li").removeClass("active");
$(this).addClass("active");
}, function(){
//Mouse Out
$(".dropdown .dropdown-menu li").removeClass("active");
}
);
});
| {
$(".navbar-fixed-top").addClass("top-nav-collapse");
} | conditional_block |
grayscale.js | /*!
* Start Bootstrap - Grayscale Bootstrap Theme (http://startbootstrap.com)
* Code licensed under the Apache License v2.0.
* For details, see http://www.apache.org/licenses/LICENSE-2.0.
*/
// jQuery to collapse the navbar on scroll
$(window).scroll(function() {
if ($(".navbar").offset().top > 50) {
$(".navbar-fixed-top").addClass("top-nav-collapse");
} else {
$(".navbar-fixed-top").removeClass("top-nav-collapse");
}
});
// jQuery for page scrolling feature - requires jQuery Easing plugin
$(function() {
$('a.page-scroll').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// jQuery for page scrolling feature - requires jQuery Easing plugin for the submenu in drop downs
$(function() {
$('a.page-scroll-submenu').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// Closes the Responsive Menu on Menu Item Click
$('.navbar-collapse ul li a').click(function() {
$('.navbar-toggle:visible').click();
});
// Google Maps Scripts
// When the window has finished loading create our google map below
google.maps.event.addDomListener(window, 'load', init);
function init() {
// Basic options for a simple Google Map
// For more options see: https://developers.google.com/maps/documentation/javascript/reference#MapOptions
var mapOptions = {
// How zoomed in you want the map to start at (always required)
zoom: 16,
// The latitude and longitude to center the map (always required)
center: new google.maps.LatLng(46.233755, 6.055636), // CERN
// Disables the default Google Maps UI components
disableDefaultUI: true,
scrollwheel: false,
draggable: false,
// How you would like to style the map.
// This is where you would paste any style found on Snazzy Maps.
styles: [
{
"featureType": "all",
"elementType": "all",
"stylers": [
{
"invert_lightness": true
},
{
"saturation": 10
},
{
| "lightness": 30
},
{
"gamma": 0.5
},
{
"hue": "#435158"
}
]
}
]
};
// Get the HTML DOM element that will contain your map
// We are using a div with id="map" seen below in the <body>
var mapElement = document.getElementById('map');
// Create the Google Map using out element and options defined above
var map = new google.maps.Map(mapElement, mapOptions);
// Custom Map Marker Icon - Customize the map-marker.png file to customize your icon
var image = 'img/map-marker.png';
var myLatLng = new google.maps.LatLng(46.233755, 6.055636);
/*var beachMarker = new google.maps.Marker({
position: myLatLng,
map: map,
icon: image
});*/
}
$(document).ready(function(d){
$(".dropdown .dropdown-menu li").hover(function(){
//Mouse IN
$(".dropdown .dropdown-menu li").removeClass("active");
$(this).addClass("active");
}, function(){
//Mouse Out
$(".dropdown .dropdown-menu li").removeClass("active");
}
);
}); | random_line_split |
|
grayscale.js | /*!
* Start Bootstrap - Grayscale Bootstrap Theme (http://startbootstrap.com)
* Code licensed under the Apache License v2.0.
* For details, see http://www.apache.org/licenses/LICENSE-2.0.
*/
// jQuery to collapse the navbar on scroll
$(window).scroll(function() {
if ($(".navbar").offset().top > 50) {
$(".navbar-fixed-top").addClass("top-nav-collapse");
} else {
$(".navbar-fixed-top").removeClass("top-nav-collapse");
}
});
// jQuery for page scrolling feature - requires jQuery Easing plugin
$(function() {
$('a.page-scroll').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// jQuery for page scrolling feature - requires jQuery Easing plugin for the submenu in drop downs
$(function() {
$('a.page-scroll-submenu').bind('click', function(event) {
var $anchor = $(this);
$('html, body').stop().animate({
scrollTop: $($anchor.attr('href')).offset().top
}, 1500, 'easeInOutExpo');
event.preventDefault();
});
});
// Closes the Responsive Menu on Menu Item Click
$('.navbar-collapse ul li a').click(function() {
$('.navbar-toggle:visible').click();
});
// Google Maps Scripts
// When the window has finished loading create our google map below
google.maps.event.addDomListener(window, 'load', init);
function init() |
$(document).ready(function(d){
$(".dropdown .dropdown-menu li").hover(function(){
//Mouse IN
$(".dropdown .dropdown-menu li").removeClass("active");
$(this).addClass("active");
}, function(){
//Mouse Out
$(".dropdown .dropdown-menu li").removeClass("active");
}
);
});
| {
// Basic options for a simple Google Map
// For more options see: https://developers.google.com/maps/documentation/javascript/reference#MapOptions
var mapOptions = {
// How zoomed in you want the map to start at (always required)
zoom: 16,
// The latitude and longitude to center the map (always required)
center: new google.maps.LatLng(46.233755, 6.055636), // CERN
// Disables the default Google Maps UI components
disableDefaultUI: true,
scrollwheel: false,
draggable: false,
// How you would like to style the map.
// This is where you would paste any style found on Snazzy Maps.
styles: [
{
"featureType": "all",
"elementType": "all",
"stylers": [
{
"invert_lightness": true
},
{
"saturation": 10
},
{
"lightness": 30
},
{
"gamma": 0.5
},
{
"hue": "#435158"
}
]
}
]
};
// Get the HTML DOM element that will contain your map
// We are using a div with id="map" seen below in the <body>
var mapElement = document.getElementById('map');
// Create the Google Map using out element and options defined above
var map = new google.maps.Map(mapElement, mapOptions);
// Custom Map Marker Icon - Customize the map-marker.png file to customize your icon
var image = 'img/map-marker.png';
var myLatLng = new google.maps.LatLng(46.233755, 6.055636);
/*var beachMarker = new google.maps.Marker({
position: myLatLng,
map: map,
icon: image
});*/
} | identifier_body |
sw.js | /**
* Welcome to your Workbox-powered service worker!
*
* You'll need to register this file in your web app and you should
* disable HTTP caching for this file too.
* See https://goo.gl/nhQhGp
*
* The rest of the code is auto-generated. Please don't update this file
* directly; instead, make changes to your Workbox build configuration
* and re-run your build process.
* See https://goo.gl/2aRDsh
*/
importScripts("workbox-v3.6.3/workbox-sw.js");
workbox.setConfig({modulePathPrefix: "workbox-v3.6.3"});
workbox.core.setCacheNameDetails({prefix: "gatsby-plugin-offline"});
workbox.skipWaiting();
workbox.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
* See https://goo.gl/S9QRab
*/
self.__precacheManifest = [
{
"url": "webpack-runtime-3b0de88cc86236a9fd25.js"
},
{
"url": "styles.d5535f422ce021f2fbf4.css"
},
{
"url": "styles-36681512479a843831b0.js"
},
{
"url": "framework-52a3ea48de1a5dd03b5a.js"
},
{
"url": "app-8676dc5fee877e903cab.js"
},
{
"url": "component---node-modules-gatsby-plugin-offline-app-shell-js-af9e0f2c9acfb4fb259c.js"
},
{
"url": "offline-plugin-app-shell-fallback/index.html",
"revision": "2e2284128b12e011ace69ffe4b6e061e"
},
{
"url": "manifest.webmanifest",
"revision": "bd80d19c4b4dd12eca877feb0b5015cc"
}
].concat(self.__precacheManifest || []);
workbox.precaching.suppressWarnings();
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerRoute(/(\.js$|\.css$|static\/)/, workbox.strategies.cacheFirst(), 'GET');
workbox.routing.registerRoute(/^https?:.*\page-data\/.*\/page-data\.json/, workbox.strategies.networkFirst(), 'GET');
workbox.routing.registerRoute(/^https?:.*\.(png|jpg|jpeg|webp|svg|gif|tiff|js|woff|woff2|json|css)$/, workbox.strategies.staleWhileRevalidate(), 'GET');
workbox.routing.registerRoute(/^https?:\/\/fonts\.googleapis\.com\/css/, workbox.strategies.staleWhileRevalidate(), 'GET');
/* global importScripts, workbox, idbKeyval */
importScripts(`idb-keyval-iife.min.js`)
const { NavigationRoute } = workbox.routing
const navigationRoute = new NavigationRoute(async ({ event }) => {
let { pathname } = new URL(event.request.url)
pathname = pathname.replace(new RegExp(`^`), ``)
// Check for resources + the app bundle
// The latter may not exist if the SW is updating to a new version
const resources = await idbKeyval.get(`resources:${pathname}`)
if (!resources || !(await caches.match(`/app-8676dc5fee877e903cab.js`))) {
return await fetch(event.request)
}
for (const resource of resources) {
// As soon as we detect a failed resource, fetch the entire page from
// network - that way we won't risk being in an inconsistent state with
// some parts of the page failing.
if (!(await caches.match(resource))) {
return await fetch(event.request)
}
}
const offlineShell = `/offline-plugin-app-shell-fallback/index.html`
return await caches.match(offlineShell)
})
workbox.routing.registerRoute(navigationRoute)
const messageApi = {
setPathResources(event, { path, resources }) {
event.waitUntil(idbKeyval.set(`resources:${path}`, resources))
},
| (event) {
event.waitUntil(idbKeyval.clear())
},
}
self.addEventListener(`message`, event => {
const { gatsbyApi } = event.data
if (gatsbyApi) messageApi[gatsbyApi](event, event.data)
})
| clearPathResources | identifier_name |
sw.js | /**
* Welcome to your Workbox-powered service worker!
*
* You'll need to register this file in your web app and you should
* disable HTTP caching for this file too.
* See https://goo.gl/nhQhGp
*
* The rest of the code is auto-generated. Please don't update this file
* directly; instead, make changes to your Workbox build configuration
* and re-run your build process.
* See https://goo.gl/2aRDsh
*/
importScripts("workbox-v3.6.3/workbox-sw.js");
workbox.setConfig({modulePathPrefix: "workbox-v3.6.3"});
workbox.core.setCacheNameDetails({prefix: "gatsby-plugin-offline"});
workbox.skipWaiting();
workbox.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
* See https://goo.gl/S9QRab
*/
self.__precacheManifest = [
{
"url": "webpack-runtime-3b0de88cc86236a9fd25.js"
},
{
"url": "styles.d5535f422ce021f2fbf4.css"
},
{
"url": "styles-36681512479a843831b0.js"
},
{
"url": "framework-52a3ea48de1a5dd03b5a.js"
},
{
"url": "app-8676dc5fee877e903cab.js"
},
{
"url": "component---node-modules-gatsby-plugin-offline-app-shell-js-af9e0f2c9acfb4fb259c.js"
},
{
"url": "offline-plugin-app-shell-fallback/index.html",
"revision": "2e2284128b12e011ace69ffe4b6e061e"
}, | "revision": "bd80d19c4b4dd12eca877feb0b5015cc"
}
].concat(self.__precacheManifest || []);
workbox.precaching.suppressWarnings();
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerRoute(/(\.js$|\.css$|static\/)/, workbox.strategies.cacheFirst(), 'GET');
workbox.routing.registerRoute(/^https?:.*\page-data\/.*\/page-data\.json/, workbox.strategies.networkFirst(), 'GET');
workbox.routing.registerRoute(/^https?:.*\.(png|jpg|jpeg|webp|svg|gif|tiff|js|woff|woff2|json|css)$/, workbox.strategies.staleWhileRevalidate(), 'GET');
workbox.routing.registerRoute(/^https?:\/\/fonts\.googleapis\.com\/css/, workbox.strategies.staleWhileRevalidate(), 'GET');
/* global importScripts, workbox, idbKeyval */
importScripts(`idb-keyval-iife.min.js`)
const { NavigationRoute } = workbox.routing
const navigationRoute = new NavigationRoute(async ({ event }) => {
let { pathname } = new URL(event.request.url)
pathname = pathname.replace(new RegExp(`^`), ``)
// Check for resources + the app bundle
// The latter may not exist if the SW is updating to a new version
const resources = await idbKeyval.get(`resources:${pathname}`)
if (!resources || !(await caches.match(`/app-8676dc5fee877e903cab.js`))) {
return await fetch(event.request)
}
for (const resource of resources) {
// As soon as we detect a failed resource, fetch the entire page from
// network - that way we won't risk being in an inconsistent state with
// some parts of the page failing.
if (!(await caches.match(resource))) {
return await fetch(event.request)
}
}
const offlineShell = `/offline-plugin-app-shell-fallback/index.html`
return await caches.match(offlineShell)
})
workbox.routing.registerRoute(navigationRoute)
const messageApi = {
setPathResources(event, { path, resources }) {
event.waitUntil(idbKeyval.set(`resources:${path}`, resources))
},
clearPathResources(event) {
event.waitUntil(idbKeyval.clear())
},
}
self.addEventListener(`message`, event => {
const { gatsbyApi } = event.data
if (gatsbyApi) messageApi[gatsbyApi](event, event.data)
}) | {
"url": "manifest.webmanifest", | random_line_split |
sw.js | /**
* Welcome to your Workbox-powered service worker!
*
* You'll need to register this file in your web app and you should
* disable HTTP caching for this file too.
* See https://goo.gl/nhQhGp
*
* The rest of the code is auto-generated. Please don't update this file
* directly; instead, make changes to your Workbox build configuration
* and re-run your build process.
* See https://goo.gl/2aRDsh
*/
importScripts("workbox-v3.6.3/workbox-sw.js");
workbox.setConfig({modulePathPrefix: "workbox-v3.6.3"});
workbox.core.setCacheNameDetails({prefix: "gatsby-plugin-offline"});
workbox.skipWaiting();
workbox.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
* See https://goo.gl/S9QRab
*/
self.__precacheManifest = [
{
"url": "webpack-runtime-3b0de88cc86236a9fd25.js"
},
{
"url": "styles.d5535f422ce021f2fbf4.css"
},
{
"url": "styles-36681512479a843831b0.js"
},
{
"url": "framework-52a3ea48de1a5dd03b5a.js"
},
{
"url": "app-8676dc5fee877e903cab.js"
},
{
"url": "component---node-modules-gatsby-plugin-offline-app-shell-js-af9e0f2c9acfb4fb259c.js"
},
{
"url": "offline-plugin-app-shell-fallback/index.html",
"revision": "2e2284128b12e011ace69ffe4b6e061e"
},
{
"url": "manifest.webmanifest",
"revision": "bd80d19c4b4dd12eca877feb0b5015cc"
}
].concat(self.__precacheManifest || []);
workbox.precaching.suppressWarnings();
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerRoute(/(\.js$|\.css$|static\/)/, workbox.strategies.cacheFirst(), 'GET');
workbox.routing.registerRoute(/^https?:.*\page-data\/.*\/page-data\.json/, workbox.strategies.networkFirst(), 'GET');
workbox.routing.registerRoute(/^https?:.*\.(png|jpg|jpeg|webp|svg|gif|tiff|js|woff|woff2|json|css)$/, workbox.strategies.staleWhileRevalidate(), 'GET');
workbox.routing.registerRoute(/^https?:\/\/fonts\.googleapis\.com\/css/, workbox.strategies.staleWhileRevalidate(), 'GET');
/* global importScripts, workbox, idbKeyval */
importScripts(`idb-keyval-iife.min.js`)
const { NavigationRoute } = workbox.routing
const navigationRoute = new NavigationRoute(async ({ event }) => {
let { pathname } = new URL(event.request.url)
pathname = pathname.replace(new RegExp(`^`), ``)
// Check for resources + the app bundle
// The latter may not exist if the SW is updating to a new version
const resources = await idbKeyval.get(`resources:${pathname}`)
if (!resources || !(await caches.match(`/app-8676dc5fee877e903cab.js`))) {
return await fetch(event.request)
}
for (const resource of resources) {
// As soon as we detect a failed resource, fetch the entire page from
// network - that way we won't risk being in an inconsistent state with
// some parts of the page failing.
if (!(await caches.match(resource))) {
return await fetch(event.request)
}
}
const offlineShell = `/offline-plugin-app-shell-fallback/index.html`
return await caches.match(offlineShell)
})
workbox.routing.registerRoute(navigationRoute)
const messageApi = {
setPathResources(event, { path, resources }) {
event.waitUntil(idbKeyval.set(`resources:${path}`, resources))
},
clearPathResources(event) | ,
}
self.addEventListener(`message`, event => {
const { gatsbyApi } = event.data
if (gatsbyApi) messageApi[gatsbyApi](event, event.data)
})
| {
event.waitUntil(idbKeyval.clear())
} | identifier_body |
fit-util.js | // The MIT License (MIT)
// Copyright (c) 2015 RAFAEL FERNANDES
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
var ngModule = angular.module('fit-util', ['underscore']);
ngModule.factory('fitUtil', ['_', function(_) {
return {
average: average
};
/**
* Returns the avarage of the list
*/
function | (list) {
return _.reduce(list, function(memo, num) {
return Number(memo) + Number(num);
}, 0) / (list.length === 0 ? 1 : list.length);
}
}]);
/**
* Underscore as an angular module and service.
*/
angular.module('underscore', [])
.factory('_', function() {
return window._; // assumes underscore has already been loaded on the page
});
| average | identifier_name |
fit-util.js | // The MIT License (MIT)
// Copyright (c) 2015 RAFAEL FERNANDES
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
var ngModule = angular.module('fit-util', ['underscore']);
ngModule.factory('fitUtil', ['_', function(_) {
return {
average: average
};
/**
* Returns the avarage of the list
*/ | return Number(memo) + Number(num);
}, 0) / (list.length === 0 ? 1 : list.length);
}
}]);
/**
* Underscore as an angular module and service.
*/
angular.module('underscore', [])
.factory('_', function() {
return window._; // assumes underscore has already been loaded on the page
}); | function average(list) {
return _.reduce(list, function(memo, num) { | random_line_split |
fit-util.js | // The MIT License (MIT)
// Copyright (c) 2015 RAFAEL FERNANDES
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
var ngModule = angular.module('fit-util', ['underscore']);
ngModule.factory('fitUtil', ['_', function(_) {
return {
average: average
};
/**
* Returns the avarage of the list
*/
function average(list) |
}]);
/**
* Underscore as an angular module and service.
*/
angular.module('underscore', [])
.factory('_', function() {
return window._; // assumes underscore has already been loaded on the page
});
| {
return _.reduce(list, function(memo, num) {
return Number(memo) + Number(num);
}, 0) / (list.length === 0 ? 1 : list.length);
} | identifier_body |
mod.rs | // Generated with ./mk_vsl_tag from Varnish headers: include/tbl/vsl_tags.h include/tbl/vsl_tags_http.h include/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/vapi/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags_http.h
mod tag_e;
pub mod message;
pub mod parser;
use std::fmt::{self, Debug, Display};
use quick_error::ResultExt;
use nom;
use quick_error::quick_error;
use bitflags::bitflags;
use crate::maybe_string::MaybeStr;
pub use self::tag_e::VSL_tag_e as VslRecordTag;
bitflags! {
pub struct Marker: u8 {
const VSL_CLIENTMARKER = 0b0000_0001;
const VSL_BACKENDMARKER = 0b0000_0010;
}
}
impl Display for Marker {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "[{}{}]",
if self.contains(Marker::VSL_CLIENTMARKER) { "C" } else { " " },
if self.contains(Marker::VSL_BACKENDMARKER) { "B" } else { " " })
}
}
pub type VslIdent = u32;
#[derive(Debug)]
struct VslRecordHeader {
tag: u8,
len: u16,
marker: Marker,
ident: VslIdent,
}
pub struct VslRecord<'b> {
pub tag: VslRecordTag,
pub marker: Marker,
pub ident: VslIdent,
pub data: &'b[u8],
}
quick_error! {
#[derive(Debug)]
pub enum VslRecordParseError {
Nom(nom_err: String, tag: VslRecordTag, record: String) {
context(record: &'a VslRecord<'a>, err: nom::Err<&'a [u8]>) -> (format!("{}", err), record.tag, format!("{}", record))
display("Nom parser failed on {}: {}", record, nom_err)
}
}
}
impl<'b> VslRecord<'b> {
pub fn parse_data<T, P>(&'b self, parser: P) -> Result<T, VslRecordParseError> where
P: Fn(&'b [u8]) -> nom::IResult<&'b [u8], T> {
// Note: need type annotaion for the u32 error type as the output IResult has no Error
// variant that would help to infer it
let result: nom::IResult<_, Result<T, _>, u32> = opt_res!(self.data, complete!(parser));
// unwrap here is safe as complete! eliminates Incomplete variant and opt_res! remaining Error variant
result.unwrap().1.context(self).map_err(From::from)
}
pub fn is_client(&self) -> bool {
self.marker.contains(Marker::VSL_CLIENTMARKER)
}
pub fn is_backend(&self) -> bool {
self.marker.contains(Marker::VSL_BACKENDMARKER)
}
}
impl<'b> Debug for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("VSL Record")
.field("tag", &self.tag)
.field("marker", &self.marker)
.field("ident", &self.ident)
.field("data", &MaybeStr::from_bytes(self.data))
.finish()
}
}
impl<'b> Display for VslRecord<'b> {
fn | (&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let tag = format!("{:?}", self.tag);
if f.alternate() {
write!(f, "{} {:5} {:18} {}", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
} else {
write!(f, "VSL record (marker: {} ident: {} tag: {} data: {:?})", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
}
}
}
| fmt | identifier_name |
mod.rs | // Generated with ./mk_vsl_tag from Varnish headers: include/tbl/vsl_tags.h include/tbl/vsl_tags_http.h include/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/vapi/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags_http.h
mod tag_e;
pub mod message;
pub mod parser;
use std::fmt::{self, Debug, Display};
use quick_error::ResultExt;
use nom;
use quick_error::quick_error;
use bitflags::bitflags;
use crate::maybe_string::MaybeStr;
pub use self::tag_e::VSL_tag_e as VslRecordTag;
bitflags! {
pub struct Marker: u8 {
const VSL_CLIENTMARKER = 0b0000_0001;
const VSL_BACKENDMARKER = 0b0000_0010;
}
}
impl Display for Marker {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "[{}{}]",
if self.contains(Marker::VSL_CLIENTMARKER) { "C" } else { " " },
if self.contains(Marker::VSL_BACKENDMARKER) { "B" } else { " " })
}
}
pub type VslIdent = u32;
#[derive(Debug)]
struct VslRecordHeader {
tag: u8,
len: u16,
marker: Marker,
ident: VslIdent,
}
pub struct VslRecord<'b> {
pub tag: VslRecordTag,
pub marker: Marker,
pub ident: VslIdent,
pub data: &'b[u8],
}
quick_error! {
#[derive(Debug)]
pub enum VslRecordParseError {
Nom(nom_err: String, tag: VslRecordTag, record: String) {
context(record: &'a VslRecord<'a>, err: nom::Err<&'a [u8]>) -> (format!("{}", err), record.tag, format!("{}", record))
display("Nom parser failed on {}: {}", record, nom_err)
}
}
}
impl<'b> VslRecord<'b> {
pub fn parse_data<T, P>(&'b self, parser: P) -> Result<T, VslRecordParseError> where
P: Fn(&'b [u8]) -> nom::IResult<&'b [u8], T> {
// Note: need type annotaion for the u32 error type as the output IResult has no Error
// variant that would help to infer it
let result: nom::IResult<_, Result<T, _>, u32> = opt_res!(self.data, complete!(parser));
// unwrap here is safe as complete! eliminates Incomplete variant and opt_res! remaining Error variant
result.unwrap().1.context(self).map_err(From::from)
}
pub fn is_client(&self) -> bool {
self.marker.contains(Marker::VSL_CLIENTMARKER)
}
pub fn is_backend(&self) -> bool {
self.marker.contains(Marker::VSL_BACKENDMARKER)
}
}
impl<'b> Debug for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("VSL Record")
.field("tag", &self.tag)
.field("marker", &self.marker)
.field("ident", &self.ident)
.field("data", &MaybeStr::from_bytes(self.data))
.finish()
}
}
impl<'b> Display for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let tag = format!("{:?}", self.tag);
if f.alternate() | else {
write!(f, "VSL record (marker: {} ident: {} tag: {} data: {:?})", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
}
}
}
| {
write!(f, "{} {:5} {:18} {}", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
} | conditional_block |
mod.rs | // Generated with ./mk_vsl_tag from Varnish headers: include/tbl/vsl_tags.h include/tbl/vsl_tags_http.h include/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/vapi/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags_http.h
mod tag_e;
pub mod message;
pub mod parser;
use std::fmt::{self, Debug, Display};
use quick_error::ResultExt;
use nom;
use quick_error::quick_error;
use bitflags::bitflags;
use crate::maybe_string::MaybeStr;
pub use self::tag_e::VSL_tag_e as VslRecordTag;
bitflags! {
pub struct Marker: u8 {
const VSL_CLIENTMARKER = 0b0000_0001;
const VSL_BACKENDMARKER = 0b0000_0010;
}
}
impl Display for Marker {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "[{}{}]",
if self.contains(Marker::VSL_CLIENTMARKER) { "C" } else { " " },
if self.contains(Marker::VSL_BACKENDMARKER) { "B" } else { " " })
}
}
pub type VslIdent = u32;
#[derive(Debug)]
struct VslRecordHeader {
tag: u8,
len: u16,
marker: Marker,
ident: VslIdent,
}
pub struct VslRecord<'b> {
pub tag: VslRecordTag,
pub marker: Marker,
pub ident: VslIdent,
pub data: &'b[u8],
}
quick_error! {
#[derive(Debug)]
pub enum VslRecordParseError {
Nom(nom_err: String, tag: VslRecordTag, record: String) {
context(record: &'a VslRecord<'a>, err: nom::Err<&'a [u8]>) -> (format!("{}", err), record.tag, format!("{}", record))
display("Nom parser failed on {}: {}", record, nom_err)
}
}
}
impl<'b> VslRecord<'b> {
pub fn parse_data<T, P>(&'b self, parser: P) -> Result<T, VslRecordParseError> where
P: Fn(&'b [u8]) -> nom::IResult<&'b [u8], T> {
// Note: need type annotaion for the u32 error type as the output IResult has no Error
// variant that would help to infer it
let result: nom::IResult<_, Result<T, _>, u32> = opt_res!(self.data, complete!(parser));
// unwrap here is safe as complete! eliminates Incomplete variant and opt_res! remaining Error variant
result.unwrap().1.context(self).map_err(From::from)
}
pub fn is_client(&self) -> bool {
self.marker.contains(Marker::VSL_CLIENTMARKER) |
pub fn is_backend(&self) -> bool {
self.marker.contains(Marker::VSL_BACKENDMARKER)
}
}
impl<'b> Debug for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("VSL Record")
.field("tag", &self.tag)
.field("marker", &self.marker)
.field("ident", &self.ident)
.field("data", &MaybeStr::from_bytes(self.data))
.finish()
}
}
impl<'b> Display for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let tag = format!("{:?}", self.tag);
if f.alternate() {
write!(f, "{} {:5} {:18} {}", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
} else {
write!(f, "VSL record (marker: {} ident: {} tag: {} data: {:?})", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
}
}
} | } | random_line_split |
mod.rs | // Generated with ./mk_vsl_tag from Varnish headers: include/tbl/vsl_tags.h include/tbl/vsl_tags_http.h include/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/vapi/vsl_int.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags.h
// https://github.com/varnishcache/varnish-cache/blob/master/include/tbl/vsl_tags_http.h
mod tag_e;
pub mod message;
pub mod parser;
use std::fmt::{self, Debug, Display};
use quick_error::ResultExt;
use nom;
use quick_error::quick_error;
use bitflags::bitflags;
use crate::maybe_string::MaybeStr;
pub use self::tag_e::VSL_tag_e as VslRecordTag;
bitflags! {
pub struct Marker: u8 {
const VSL_CLIENTMARKER = 0b0000_0001;
const VSL_BACKENDMARKER = 0b0000_0010;
}
}
impl Display for Marker {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "[{}{}]",
if self.contains(Marker::VSL_CLIENTMARKER) { "C" } else { " " },
if self.contains(Marker::VSL_BACKENDMARKER) { "B" } else { " " })
}
}
pub type VslIdent = u32;
#[derive(Debug)]
struct VslRecordHeader {
tag: u8,
len: u16,
marker: Marker,
ident: VslIdent,
}
pub struct VslRecord<'b> {
pub tag: VslRecordTag,
pub marker: Marker,
pub ident: VslIdent,
pub data: &'b[u8],
}
quick_error! {
#[derive(Debug)]
pub enum VslRecordParseError {
Nom(nom_err: String, tag: VslRecordTag, record: String) {
context(record: &'a VslRecord<'a>, err: nom::Err<&'a [u8]>) -> (format!("{}", err), record.tag, format!("{}", record))
display("Nom parser failed on {}: {}", record, nom_err)
}
}
}
impl<'b> VslRecord<'b> {
pub fn parse_data<T, P>(&'b self, parser: P) -> Result<T, VslRecordParseError> where
P: Fn(&'b [u8]) -> nom::IResult<&'b [u8], T> {
// Note: need type annotaion for the u32 error type as the output IResult has no Error
// variant that would help to infer it
let result: nom::IResult<_, Result<T, _>, u32> = opt_res!(self.data, complete!(parser));
// unwrap here is safe as complete! eliminates Incomplete variant and opt_res! remaining Error variant
result.unwrap().1.context(self).map_err(From::from)
}
pub fn is_client(&self) -> bool {
self.marker.contains(Marker::VSL_CLIENTMARKER)
}
pub fn is_backend(&self) -> bool {
self.marker.contains(Marker::VSL_BACKENDMARKER)
}
}
impl<'b> Debug for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> |
}
impl<'b> Display for VslRecord<'b> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let tag = format!("{:?}", self.tag);
if f.alternate() {
write!(f, "{} {:5} {:18} {}", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
} else {
write!(f, "VSL record (marker: {} ident: {} tag: {} data: {:?})", self.marker, self.ident, tag, MaybeStr::from_bytes(self.data))
}
}
}
| {
f.debug_struct("VSL Record")
.field("tag", &self.tag)
.field("marker", &self.marker)
.field("ident", &self.ident)
.field("data", &MaybeStr::from_bytes(self.data))
.finish()
} | identifier_body |
__init__.py | # =============================================================================
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# | # See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Custom op used by periodic_resample."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.periodic_resample.python.ops.periodic_resample_op import periodic_resample
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = ["periodic_resample"]
remove_undocumented(__name__, _allowed_symbols) | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | random_line_split |
network.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use anyhow::{bail, ensure, format_err};
use exonum::{
crypto::{
x25519::{self, into_x25519_public_key},
PublicKey,
},
messages::{SignedMessage, Verified},
};
use futures::{channel::mpsc, future, prelude::*};
use futures_retry::{ErrorHandler, FutureRetry, RetryPolicy};
use rand::{thread_rng, Rng};
use tokio::net::{TcpListener, TcpStream};
use tokio_util::codec::Framed;
use std::{
collections::HashMap,
io,
net::SocketAddr,
ops,
sync::{Arc, RwLock},
time::Duration,
};
use crate::{
events::{
codec::MessagesCodec,
noise::{Handshake, HandshakeData, HandshakeParams, NoiseHandshake},
},
messages::{Connect, Message, Service},
state::SharedConnectList,
NetworkConfiguration,
};
const OUTGOING_CHANNEL_SIZE: usize = 10;
#[derive(Debug)]
struct ErrorAction {
retry_timeout: Duration,
max_retries: usize,
description: String,
}
impl ErrorAction {
fn new(config: &NetworkConfiguration, description: String) -> Self {
Self {
retry_timeout: Duration::from_millis(config.tcp_connect_retry_timeout),
max_retries: config.tcp_connect_max_retries as usize,
description,
}
}
}
impl ErrorHandler<io::Error> for ErrorAction {
type OutError = io::Error;
fn handle(&mut self, attempt: usize, e: io::Error) -> RetryPolicy<io::Error> {
log::trace!(
"{} failed [Attempt: {}/{}]: {}",
self.description,
attempt,
self.max_retries,
e
);
if attempt >= self.max_retries {
RetryPolicy::ForwardError(e)
} else {
let jitter = thread_rng().gen_range(0.5, 1.0);
let timeout = self.retry_timeout.mul_f64(jitter);
RetryPolicy::WaitRetry(timeout)
}
}
}
#[derive(Debug, Clone)]
pub enum ConnectedPeerAddr {
In(SocketAddr),
Out(String, SocketAddr),
}
impl ConnectedPeerAddr {
pub fn is_incoming(&self) -> bool {
match self {
Self::In(_) => true,
Self::Out(_, _) => false,
}
}
}
/// Network events.
#[derive(Debug)]
pub enum NetworkEvent {
/// A message was received from the network.
MessageReceived(Vec<u8>),
/// The node has connected to a peer.
PeerConnected {
/// Peer address.
addr: ConnectedPeerAddr,
/// Connect message.
connect: Box<Verified<Connect>>,
},
/// The node has disconnected from a peer.
PeerDisconnected(PublicKey),
/// Connection to a peer failed.
UnableConnectToPeer(PublicKey),
}
#[derive(Debug, Clone)]
pub enum NetworkRequest {
SendMessage(PublicKey, SignedMessage),
#[cfg(test)]
DisconnectWithPeer(PublicKey),
}
#[derive(Debug)]
pub struct NetworkPart {
pub our_connect_message: Verified<Connect>,
pub listen_address: SocketAddr,
pub network_config: NetworkConfiguration,
pub max_message_len: u32,
pub network_requests: mpsc::Receiver<NetworkRequest>,
pub network_tx: mpsc::Sender<NetworkEvent>,
pub(crate) connect_list: SharedConnectList,
}
#[derive(Clone, Debug)]
struct ConnectionPoolEntry {
sender: mpsc::Sender<SignedMessage>,
address: ConnectedPeerAddr,
// Connection ID assigned to the connection during instantiation. This ID is unique among
// all connections and is used in `ConnectList::remove()` to figure out whether
// it would make sense to remove a connection, or the request has been obsoleted.
id: u64,
}
#[derive(Clone, Debug)]
struct SharedConnectionPool {
inner: Arc<RwLock<ConnectionPool>>,
}
impl SharedConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
inner: Arc::new(RwLock::new(ConnectionPool::new(our_key))),
}
}
fn read(&self) -> impl ops::Deref<Target = ConnectionPool> + '_ {
self.inner.read().unwrap()
}
fn write(&self) -> impl ops::DerefMut<Target = ConnectionPool> + '_ {
self.inner.write().unwrap()
}
async fn send_message(&self, peer_key: &PublicKey, message: SignedMessage) {
let maybe_peer_info = {
// Ensure that we don't hold the lock across the `await` point.
let peers = &self.inner.read().unwrap().peers;
peers
.get(peer_key)
.map(|peer| (peer.sender.clone(), peer.id))
};
if let Some((mut sender, connection_id)) = maybe_peer_info {
if sender.send(message).await.is_err() {
log::warn!("Cannot send message to peer {}", peer_key);
self.write().remove(peer_key, Some(connection_id));
}
}
}
fn create_connection(
&self,
peer_key: PublicKey,
address: ConnectedPeerAddr,
socket: Framed<TcpStream, MessagesCodec>,
) -> Option<Connection> {
let mut guard = self.write();
if guard.contains(&peer_key) && Self::ignore_connection(guard.our_key, peer_key) {
log::info!("Ignoring connection to {:?} per priority rules", peer_key);
return None;
}
let (receiver_rx, connection_id) = guard.add(peer_key, address.clone());
Some(Connection {
socket,
receiver_rx,
address,
key: peer_key,
id: connection_id,
})
}
/// Provides a complete, anti-symmetric relation among two peers bound in a connection.
/// This is used by the peers to decide which one of two connections are left alive
/// if the peers connect to each other simultaneously.
fn ignore_connection(our_key: PublicKey, their_key: PublicKey) -> bool {
our_key[..] < their_key[..]
}
}
#[derive(Debug)]
struct ConnectionPool {
peers: HashMap<PublicKey, ConnectionPoolEntry>,
our_key: PublicKey,
next_connection_id: u64,
}
impl ConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
peers: HashMap::new(),
our_key,
next_connection_id: 0,
}
}
fn count_incoming(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
fn count_outgoing(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
/// Adds a peer to the connection list.
///
/// # Return value
///
/// Returns the receiver for outgoing messages to the peer and the connection ID.
fn add(
&mut self,
key: PublicKey,
address: ConnectedPeerAddr,
) -> (mpsc::Receiver<SignedMessage>, u64) {
let id = self.next_connection_id;
let (sender, receiver_rx) = mpsc::channel(OUTGOING_CHANNEL_SIZE);
let entry = ConnectionPoolEntry {
sender,
address,
id,
};
self.next_connection_id += 1;
self.peers.insert(key, entry);
(receiver_rx, id)
}
fn contains(&self, address: &PublicKey) -> bool {
self.peers.get(address).is_some()
}
/// Drops the connection to a peer. The request can be optionally filtered by the connection ID
/// in order to avoid issuing obsolete requests.
///
/// # Return value
///
/// Returns `true` if the connection with the peer was dropped. If the connection with the
/// peer was not dropped (either because it did not exist, or because
/// the provided `connection_id` is outdated), returns `false`.
fn remove(&mut self, address: &PublicKey, connection_id: Option<u64>) -> bool {
if let Some(entry) = self.peers.get(address) {
if connection_id.map_or(true, |id| id == entry.id) {
self.peers.remove(address);
return true;
}
}
false
}
}
struct Connection {
socket: Framed<TcpStream, MessagesCodec>,
receiver_rx: mpsc::Receiver<SignedMessage>,
address: ConnectedPeerAddr,
key: PublicKey,
id: u64,
}
#[derive(Clone)]
struct NetworkHandler {
listen_address: SocketAddr,
pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
}
impl NetworkHandler {
fn new(
address: SocketAddr,
connection_pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
) -> Self {
Self {
listen_address: address,
pool: connection_pool,
network_config,
network_tx,
handshake_params,
connect_list,
}
}
async fn listener(self) -> anyhow::Result<()> {
let mut listener = TcpListener::bind(&self.listen_address).await?;
let mut incoming_connections = listener.incoming();
// Incoming connections limiter
let incoming_connections_limit = self.network_config.max_incoming_connections;
while let Some(mut socket) = incoming_connections.try_next().await? {
let peer_address = match socket.peer_addr() {
Ok(address) => address,
Err(err) => {
log::warn!("Peer address resolution failed: {}", err);
continue;
}
};
// Check incoming connections count.
let connections_count = self.pool.read().count_incoming();
if connections_count >= incoming_connections_limit {
log::warn!(
"Rejected incoming connection with peer={}, connections limit reached.",
peer_address
);
continue;
}
let pool = self.pool.clone();
let connect_list = self.connect_list.clone();
let network_tx = self.network_tx.clone();
let handshake = NoiseHandshake::responder(&self.handshake_params);
let task = async move {
let HandshakeData {
codec,
raw_message,
peer_key,
} = handshake.listen(&mut socket).await?;
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let peer_key = connect.author();
if !connect_list.is_peer_allowed(&peer_key) {
bail!(
"Rejecting incoming connection with peer={} public_key={}, \
the peer is not in the connect list",
peer_address,
peer_key
);
}
let conn_addr = ConnectedPeerAddr::In(peer_address);
let socket = Framed::new(socket, codec);
let maybe_connection = pool.create_connection(peer_key, conn_addr, socket);
if let Some(connection) = maybe_connection {
Self::handle_connection(connection, connect, pool, network_tx).await
} else {
Ok(())
}
};
tokio::spawn(task.unwrap_or_else(|err| log::warn!("{}", err)));
}
Ok(())
}
/// # Return value
///
/// The returned future resolves when the connection is established. The connection processing
/// is spawned onto `tokio` runtime.
fn connect(
&self,
key: PublicKey,
handshake_params: &HandshakeParams,
) -> impl Future<Output = anyhow::Result<()>> {
// Resolve peer key to an address.
let maybe_address = self.connect_list.find_address_by_key(&key);
let unresolved_address = if let Some(address) = maybe_address {
address
} else {
let err = format_err!("Trying to connect to peer {} not from connect list", key);
return future::err(err).left_future();
};
let max_connections = self.network_config.max_outgoing_connections;
let mut handshake_params = handshake_params.clone();
handshake_params.set_remote_key(key);
let pool = self.pool.clone();
let network_tx = self.network_tx.clone();
let network_config = self.network_config;
let description = format!(
"Connecting to {} (remote address = {})",
key, unresolved_address
);
let on_error = ErrorAction::new(&network_config, description);
async move {
let connect = || TcpStream::connect(&unresolved_address);
// The second component in returned value / error is the number of retries,
// which we ignore.
let (mut socket, _) = FutureRetry::new(connect, on_error)
.await
.map_err(|(err, _)| err)?;
let peer_address = match socket.peer_addr() {
Ok(addr) => addr,
Err(err) => {
let err = format_err!("Couldn't take peer addr from socket: {}", err);
return Err(err);
}
};
Self::configure_socket(&mut socket, network_config)?;
let HandshakeData {
codec,
raw_message,
peer_key,
} = NoiseHandshake::initiator(&handshake_params)
.send(&mut socket)
.await?;
if pool.read().count_outgoing() >= max_connections {
log::info!(
"Ignoring outgoing connection to {:?} because the connection limit ({}) \
is reached",
key,
max_connections
);
return Ok(());
}
let conn_addr = ConnectedPeerAddr::Out(unresolved_address, peer_address);
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let socket = Framed::new(socket, codec);
if let Some(connection) = pool.create_connection(key, conn_addr, socket) |
Ok(())
}
.right_future()
}
async fn process_messages(
pool: SharedConnectionPool,
connection: Connection,
mut network_tx: mpsc::Sender<NetworkEvent>,
) {
let (sink, stream) = connection.socket.split();
let key = connection.key;
let connection_id = connection.id;
// Processing of incoming messages.
let incoming = async move {
let res = (&mut network_tx)
.sink_map_err(anyhow::Error::from)
.send_all(&mut stream.map_ok(NetworkEvent::MessageReceived))
.await;
if pool.write().remove(&key, Some(connection_id)) {
network_tx
.send(NetworkEvent::PeerDisconnected(key))
.await
.ok();
}
res
};
futures::pin_mut!(incoming);
// Processing of outgoing messages.
let outgoing = connection.receiver_rx.map(Ok).forward(sink);
// Select the first future to terminate and drop the remaining one.
let task = future::select(incoming, outgoing).map(|res| {
if let (Err(err), _) = res.factor_first() {
log::info!(
"Connection with peer {} terminated: {} (root cause: {})",
key,
err,
err.root_cause()
);
}
});
task.await
}
fn configure_socket(
socket: &mut TcpStream,
network_config: NetworkConfiguration,
) -> anyhow::Result<()> {
socket.set_nodelay(network_config.tcp_nodelay)?;
let duration = network_config.tcp_keep_alive.map(Duration::from_millis);
socket.set_keepalive(duration)?;
Ok(())
}
async fn handle_connection(
connection: Connection,
connect: Verified<Connect>,
pool: SharedConnectionPool,
mut network_tx: mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let address = connection.address.clone();
log::trace!("Established connection with peer {:?}", address);
Self::send_peer_connected_event(address, connect, &mut network_tx).await?;
Self::process_messages(pool, connection, network_tx).await;
Ok(())
}
fn parse_connect_msg(
raw: Vec<u8>,
key: &x25519::PublicKey,
) -> anyhow::Result<Verified<Connect>> {
let message = Message::from_raw_buffer(raw)?;
let connect: Verified<Connect> = match message {
Message::Service(Service::Connect(connect)) => connect,
other => bail!(
"First message from a remote peer is not `Connect`, got={:?}",
other
),
};
let author = into_x25519_public_key(connect.author());
ensure!(
author == *key,
"Connect message public key doesn't match with the received peer key"
);
Ok(connect)
}
pub async fn handle_requests(self, mut receiver: mpsc::Receiver<NetworkRequest>) {
while let Some(request) = receiver.next().await {
match request {
NetworkRequest::SendMessage(key, message) => {
let mut this = self.clone();
tokio::spawn(async move {
if let Err(e) = this.handle_send_message(key, message).await {
log::error!("Cannot send message to peer {:?}: {}", key, e);
}
});
}
#[cfg(test)]
NetworkRequest::DisconnectWithPeer(peer) => {
let disconnected = self.pool.write().remove(&peer, None);
if disconnected {
let mut network_tx = self.network_tx.clone();
tokio::spawn(async move {
network_tx
.send(NetworkEvent::PeerDisconnected(peer))
.await
.ok();
});
}
}
}
}
}
async fn handle_send_message(
&mut self,
address: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
if self.pool.read().contains(&address) {
self.pool.send_message(&address, message).await;
Ok(())
} else if self.can_create_connections() {
self.create_new_connection(address, message).await
} else {
self.send_unable_connect_event(address).await
}
}
async fn create_new_connection(
&self,
key: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
self.connect(key, &self.handshake_params).await?;
let connect = &self.handshake_params.connect;
if message != *connect.as_raw() {
self.pool.send_message(&key, message).await;
}
Ok(())
}
async fn send_peer_connected_event(
addr: ConnectedPeerAddr,
connect: Verified<Connect>,
network_tx: &mut mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let peer_connected = NetworkEvent::PeerConnected {
addr,
connect: Box::new(connect),
};
network_tx
.send(peer_connected)
.await
.map_err(|_| format_err!("Cannot send `PeerConnected` notification"))
}
fn can_create_connections(&self) -> bool {
self.pool.read().count_outgoing() < self.network_config.max_outgoing_connections
}
async fn send_unable_connect_event(&mut self, peer: PublicKey) -> anyhow::Result<()> {
let event = NetworkEvent::UnableConnectToPeer(peer);
self.network_tx
.send(event)
.await
.map_err(|_| format_err!("can't send network event"))
}
}
impl NetworkPart {
pub async fn run(self, handshake_params: HandshakeParams) {
let our_key = handshake_params.connect.author();
let handler = NetworkHandler::new(
self.listen_address,
SharedConnectionPool::new(our_key),
self.network_config,
self.network_tx,
handshake_params,
self.connect_list,
);
let listener = handler.clone().listener().unwrap_or_else(|e| {
log::error!("Listening to incoming peer connections failed: {}", e);
});
futures::pin_mut!(listener);
let request_handler = handler.handle_requests(self.network_requests);
futures::pin_mut!(request_handler);
// FIXME: is `select` appropriate here?
future::select(listener, request_handler).await;
}
}
| {
let handler = Self::handle_connection(connection, connect, pool, network_tx);
tokio::spawn(handler);
} | conditional_block |
network.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use anyhow::{bail, ensure, format_err};
use exonum::{
crypto::{
x25519::{self, into_x25519_public_key},
PublicKey,
},
messages::{SignedMessage, Verified},
};
use futures::{channel::mpsc, future, prelude::*};
use futures_retry::{ErrorHandler, FutureRetry, RetryPolicy};
use rand::{thread_rng, Rng};
use tokio::net::{TcpListener, TcpStream};
use tokio_util::codec::Framed;
use std::{
collections::HashMap,
io,
net::SocketAddr,
ops,
sync::{Arc, RwLock},
time::Duration,
};
use crate::{
events::{
codec::MessagesCodec,
noise::{Handshake, HandshakeData, HandshakeParams, NoiseHandshake},
},
messages::{Connect, Message, Service},
state::SharedConnectList,
NetworkConfiguration,
};
const OUTGOING_CHANNEL_SIZE: usize = 10;
#[derive(Debug)]
struct ErrorAction {
retry_timeout: Duration,
max_retries: usize,
description: String,
}
impl ErrorAction {
fn new(config: &NetworkConfiguration, description: String) -> Self {
Self {
retry_timeout: Duration::from_millis(config.tcp_connect_retry_timeout),
max_retries: config.tcp_connect_max_retries as usize,
description,
}
}
}
impl ErrorHandler<io::Error> for ErrorAction {
type OutError = io::Error;
fn handle(&mut self, attempt: usize, e: io::Error) -> RetryPolicy<io::Error> {
log::trace!(
"{} failed [Attempt: {}/{}]: {}",
self.description,
attempt,
self.max_retries,
e
);
if attempt >= self.max_retries {
RetryPolicy::ForwardError(e)
} else {
let jitter = thread_rng().gen_range(0.5, 1.0);
let timeout = self.retry_timeout.mul_f64(jitter);
RetryPolicy::WaitRetry(timeout)
}
}
}
#[derive(Debug, Clone)]
pub enum ConnectedPeerAddr {
In(SocketAddr),
Out(String, SocketAddr),
}
impl ConnectedPeerAddr {
pub fn is_incoming(&self) -> bool {
match self {
Self::In(_) => true,
Self::Out(_, _) => false,
}
}
}
/// Network events.
#[derive(Debug)]
pub enum NetworkEvent {
/// A message was received from the network.
MessageReceived(Vec<u8>),
/// The node has connected to a peer.
PeerConnected {
/// Peer address.
addr: ConnectedPeerAddr,
/// Connect message.
connect: Box<Verified<Connect>>,
},
/// The node has disconnected from a peer.
PeerDisconnected(PublicKey),
/// Connection to a peer failed.
UnableConnectToPeer(PublicKey),
}
#[derive(Debug, Clone)]
pub enum NetworkRequest {
SendMessage(PublicKey, SignedMessage),
#[cfg(test)]
DisconnectWithPeer(PublicKey),
}
#[derive(Debug)]
pub struct NetworkPart {
pub our_connect_message: Verified<Connect>,
pub listen_address: SocketAddr,
pub network_config: NetworkConfiguration,
pub max_message_len: u32,
pub network_requests: mpsc::Receiver<NetworkRequest>,
pub network_tx: mpsc::Sender<NetworkEvent>,
pub(crate) connect_list: SharedConnectList,
}
#[derive(Clone, Debug)]
struct ConnectionPoolEntry {
sender: mpsc::Sender<SignedMessage>,
address: ConnectedPeerAddr,
// Connection ID assigned to the connection during instantiation. This ID is unique among
// all connections and is used in `ConnectList::remove()` to figure out whether
// it would make sense to remove a connection, or the request has been obsoleted.
id: u64,
}
#[derive(Clone, Debug)]
struct SharedConnectionPool {
inner: Arc<RwLock<ConnectionPool>>,
}
impl SharedConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
inner: Arc::new(RwLock::new(ConnectionPool::new(our_key))),
}
}
fn read(&self) -> impl ops::Deref<Target = ConnectionPool> + '_ {
self.inner.read().unwrap()
}
fn write(&self) -> impl ops::DerefMut<Target = ConnectionPool> + '_ {
self.inner.write().unwrap()
}
async fn send_message(&self, peer_key: &PublicKey, message: SignedMessage) {
let maybe_peer_info = {
// Ensure that we don't hold the lock across the `await` point.
let peers = &self.inner.read().unwrap().peers;
peers
.get(peer_key)
.map(|peer| (peer.sender.clone(), peer.id))
};
if let Some((mut sender, connection_id)) = maybe_peer_info {
if sender.send(message).await.is_err() {
log::warn!("Cannot send message to peer {}", peer_key);
self.write().remove(peer_key, Some(connection_id));
}
}
}
fn create_connection(
&self,
peer_key: PublicKey,
address: ConnectedPeerAddr,
socket: Framed<TcpStream, MessagesCodec>,
) -> Option<Connection> {
let mut guard = self.write();
if guard.contains(&peer_key) && Self::ignore_connection(guard.our_key, peer_key) {
log::info!("Ignoring connection to {:?} per priority rules", peer_key);
return None;
}
let (receiver_rx, connection_id) = guard.add(peer_key, address.clone());
Some(Connection {
socket,
receiver_rx,
address,
key: peer_key,
id: connection_id,
})
}
/// Provides a complete, anti-symmetric relation among two peers bound in a connection.
/// This is used by the peers to decide which one of two connections are left alive
/// if the peers connect to each other simultaneously.
fn ignore_connection(our_key: PublicKey, their_key: PublicKey) -> bool {
our_key[..] < their_key[..]
}
}
#[derive(Debug)]
struct ConnectionPool {
peers: HashMap<PublicKey, ConnectionPoolEntry>,
our_key: PublicKey,
next_connection_id: u64,
}
impl ConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
peers: HashMap::new(),
our_key,
next_connection_id: 0,
}
}
fn count_incoming(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
fn count_outgoing(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
/// Adds a peer to the connection list.
///
/// # Return value
///
/// Returns the receiver for outgoing messages to the peer and the connection ID.
fn add(
&mut self,
key: PublicKey,
address: ConnectedPeerAddr,
) -> (mpsc::Receiver<SignedMessage>, u64) {
let id = self.next_connection_id;
let (sender, receiver_rx) = mpsc::channel(OUTGOING_CHANNEL_SIZE);
let entry = ConnectionPoolEntry {
sender,
address,
id,
};
self.next_connection_id += 1;
self.peers.insert(key, entry);
(receiver_rx, id)
}
fn contains(&self, address: &PublicKey) -> bool {
self.peers.get(address).is_some()
}
/// Drops the connection to a peer. The request can be optionally filtered by the connection ID
/// in order to avoid issuing obsolete requests.
///
/// # Return value
///
/// Returns `true` if the connection with the peer was dropped. If the connection with the
/// peer was not dropped (either because it did not exist, or because
/// the provided `connection_id` is outdated), returns `false`.
fn remove(&mut self, address: &PublicKey, connection_id: Option<u64>) -> bool {
if let Some(entry) = self.peers.get(address) {
if connection_id.map_or(true, |id| id == entry.id) {
self.peers.remove(address);
return true;
}
}
false
}
}
struct Connection {
socket: Framed<TcpStream, MessagesCodec>,
receiver_rx: mpsc::Receiver<SignedMessage>,
address: ConnectedPeerAddr,
key: PublicKey,
id: u64,
}
#[derive(Clone)]
struct NetworkHandler {
listen_address: SocketAddr,
pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
}
impl NetworkHandler {
fn new(
address: SocketAddr,
connection_pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
) -> Self {
Self {
listen_address: address,
pool: connection_pool,
network_config,
network_tx,
handshake_params,
connect_list,
}
}
async fn listener(self) -> anyhow::Result<()> {
let mut listener = TcpListener::bind(&self.listen_address).await?;
let mut incoming_connections = listener.incoming();
// Incoming connections limiter
let incoming_connections_limit = self.network_config.max_incoming_connections;
while let Some(mut socket) = incoming_connections.try_next().await? {
let peer_address = match socket.peer_addr() {
Ok(address) => address,
Err(err) => {
log::warn!("Peer address resolution failed: {}", err);
continue;
}
};
// Check incoming connections count.
let connections_count = self.pool.read().count_incoming();
if connections_count >= incoming_connections_limit {
log::warn!(
"Rejected incoming connection with peer={}, connections limit reached.",
peer_address
);
continue;
}
let pool = self.pool.clone();
let connect_list = self.connect_list.clone();
let network_tx = self.network_tx.clone();
let handshake = NoiseHandshake::responder(&self.handshake_params);
let task = async move {
let HandshakeData {
codec,
raw_message,
peer_key,
} = handshake.listen(&mut socket).await?;
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let peer_key = connect.author();
if !connect_list.is_peer_allowed(&peer_key) {
bail!(
"Rejecting incoming connection with peer={} public_key={}, \
the peer is not in the connect list",
peer_address,
peer_key
);
}
let conn_addr = ConnectedPeerAddr::In(peer_address);
let socket = Framed::new(socket, codec);
let maybe_connection = pool.create_connection(peer_key, conn_addr, socket);
if let Some(connection) = maybe_connection {
Self::handle_connection(connection, connect, pool, network_tx).await
} else {
Ok(())
}
};
tokio::spawn(task.unwrap_or_else(|err| log::warn!("{}", err)));
}
Ok(())
}
/// # Return value
///
/// The returned future resolves when the connection is established. The connection processing
/// is spawned onto `tokio` runtime.
fn | (
&self,
key: PublicKey,
handshake_params: &HandshakeParams,
) -> impl Future<Output = anyhow::Result<()>> {
// Resolve peer key to an address.
let maybe_address = self.connect_list.find_address_by_key(&key);
let unresolved_address = if let Some(address) = maybe_address {
address
} else {
let err = format_err!("Trying to connect to peer {} not from connect list", key);
return future::err(err).left_future();
};
let max_connections = self.network_config.max_outgoing_connections;
let mut handshake_params = handshake_params.clone();
handshake_params.set_remote_key(key);
let pool = self.pool.clone();
let network_tx = self.network_tx.clone();
let network_config = self.network_config;
let description = format!(
"Connecting to {} (remote address = {})",
key, unresolved_address
);
let on_error = ErrorAction::new(&network_config, description);
async move {
let connect = || TcpStream::connect(&unresolved_address);
// The second component in returned value / error is the number of retries,
// which we ignore.
let (mut socket, _) = FutureRetry::new(connect, on_error)
.await
.map_err(|(err, _)| err)?;
let peer_address = match socket.peer_addr() {
Ok(addr) => addr,
Err(err) => {
let err = format_err!("Couldn't take peer addr from socket: {}", err);
return Err(err);
}
};
Self::configure_socket(&mut socket, network_config)?;
let HandshakeData {
codec,
raw_message,
peer_key,
} = NoiseHandshake::initiator(&handshake_params)
.send(&mut socket)
.await?;
if pool.read().count_outgoing() >= max_connections {
log::info!(
"Ignoring outgoing connection to {:?} because the connection limit ({}) \
is reached",
key,
max_connections
);
return Ok(());
}
let conn_addr = ConnectedPeerAddr::Out(unresolved_address, peer_address);
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let socket = Framed::new(socket, codec);
if let Some(connection) = pool.create_connection(key, conn_addr, socket) {
let handler = Self::handle_connection(connection, connect, pool, network_tx);
tokio::spawn(handler);
}
Ok(())
}
.right_future()
}
async fn process_messages(
pool: SharedConnectionPool,
connection: Connection,
mut network_tx: mpsc::Sender<NetworkEvent>,
) {
let (sink, stream) = connection.socket.split();
let key = connection.key;
let connection_id = connection.id;
// Processing of incoming messages.
let incoming = async move {
let res = (&mut network_tx)
.sink_map_err(anyhow::Error::from)
.send_all(&mut stream.map_ok(NetworkEvent::MessageReceived))
.await;
if pool.write().remove(&key, Some(connection_id)) {
network_tx
.send(NetworkEvent::PeerDisconnected(key))
.await
.ok();
}
res
};
futures::pin_mut!(incoming);
// Processing of outgoing messages.
let outgoing = connection.receiver_rx.map(Ok).forward(sink);
// Select the first future to terminate and drop the remaining one.
let task = future::select(incoming, outgoing).map(|res| {
if let (Err(err), _) = res.factor_first() {
log::info!(
"Connection with peer {} terminated: {} (root cause: {})",
key,
err,
err.root_cause()
);
}
});
task.await
}
fn configure_socket(
socket: &mut TcpStream,
network_config: NetworkConfiguration,
) -> anyhow::Result<()> {
socket.set_nodelay(network_config.tcp_nodelay)?;
let duration = network_config.tcp_keep_alive.map(Duration::from_millis);
socket.set_keepalive(duration)?;
Ok(())
}
async fn handle_connection(
connection: Connection,
connect: Verified<Connect>,
pool: SharedConnectionPool,
mut network_tx: mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let address = connection.address.clone();
log::trace!("Established connection with peer {:?}", address);
Self::send_peer_connected_event(address, connect, &mut network_tx).await?;
Self::process_messages(pool, connection, network_tx).await;
Ok(())
}
fn parse_connect_msg(
raw: Vec<u8>,
key: &x25519::PublicKey,
) -> anyhow::Result<Verified<Connect>> {
let message = Message::from_raw_buffer(raw)?;
let connect: Verified<Connect> = match message {
Message::Service(Service::Connect(connect)) => connect,
other => bail!(
"First message from a remote peer is not `Connect`, got={:?}",
other
),
};
let author = into_x25519_public_key(connect.author());
ensure!(
author == *key,
"Connect message public key doesn't match with the received peer key"
);
Ok(connect)
}
pub async fn handle_requests(self, mut receiver: mpsc::Receiver<NetworkRequest>) {
while let Some(request) = receiver.next().await {
match request {
NetworkRequest::SendMessage(key, message) => {
let mut this = self.clone();
tokio::spawn(async move {
if let Err(e) = this.handle_send_message(key, message).await {
log::error!("Cannot send message to peer {:?}: {}", key, e);
}
});
}
#[cfg(test)]
NetworkRequest::DisconnectWithPeer(peer) => {
let disconnected = self.pool.write().remove(&peer, None);
if disconnected {
let mut network_tx = self.network_tx.clone();
tokio::spawn(async move {
network_tx
.send(NetworkEvent::PeerDisconnected(peer))
.await
.ok();
});
}
}
}
}
}
async fn handle_send_message(
&mut self,
address: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
if self.pool.read().contains(&address) {
self.pool.send_message(&address, message).await;
Ok(())
} else if self.can_create_connections() {
self.create_new_connection(address, message).await
} else {
self.send_unable_connect_event(address).await
}
}
async fn create_new_connection(
&self,
key: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
self.connect(key, &self.handshake_params).await?;
let connect = &self.handshake_params.connect;
if message != *connect.as_raw() {
self.pool.send_message(&key, message).await;
}
Ok(())
}
async fn send_peer_connected_event(
addr: ConnectedPeerAddr,
connect: Verified<Connect>,
network_tx: &mut mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let peer_connected = NetworkEvent::PeerConnected {
addr,
connect: Box::new(connect),
};
network_tx
.send(peer_connected)
.await
.map_err(|_| format_err!("Cannot send `PeerConnected` notification"))
}
fn can_create_connections(&self) -> bool {
self.pool.read().count_outgoing() < self.network_config.max_outgoing_connections
}
async fn send_unable_connect_event(&mut self, peer: PublicKey) -> anyhow::Result<()> {
let event = NetworkEvent::UnableConnectToPeer(peer);
self.network_tx
.send(event)
.await
.map_err(|_| format_err!("can't send network event"))
}
}
impl NetworkPart {
pub async fn run(self, handshake_params: HandshakeParams) {
let our_key = handshake_params.connect.author();
let handler = NetworkHandler::new(
self.listen_address,
SharedConnectionPool::new(our_key),
self.network_config,
self.network_tx,
handshake_params,
self.connect_list,
);
let listener = handler.clone().listener().unwrap_or_else(|e| {
log::error!("Listening to incoming peer connections failed: {}", e);
});
futures::pin_mut!(listener);
let request_handler = handler.handle_requests(self.network_requests);
futures::pin_mut!(request_handler);
// FIXME: is `select` appropriate here?
future::select(listener, request_handler).await;
}
}
| connect | identifier_name |
network.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use anyhow::{bail, ensure, format_err};
use exonum::{
crypto::{
x25519::{self, into_x25519_public_key},
PublicKey,
},
messages::{SignedMessage, Verified},
};
use futures::{channel::mpsc, future, prelude::*};
use futures_retry::{ErrorHandler, FutureRetry, RetryPolicy};
use rand::{thread_rng, Rng};
use tokio::net::{TcpListener, TcpStream};
use tokio_util::codec::Framed;
use std::{
collections::HashMap,
io,
net::SocketAddr,
ops,
sync::{Arc, RwLock},
time::Duration,
};
use crate::{
events::{
codec::MessagesCodec,
noise::{Handshake, HandshakeData, HandshakeParams, NoiseHandshake},
},
messages::{Connect, Message, Service},
state::SharedConnectList,
NetworkConfiguration,
};
const OUTGOING_CHANNEL_SIZE: usize = 10;
#[derive(Debug)]
struct ErrorAction {
retry_timeout: Duration,
max_retries: usize,
description: String,
}
impl ErrorAction {
fn new(config: &NetworkConfiguration, description: String) -> Self {
Self {
retry_timeout: Duration::from_millis(config.tcp_connect_retry_timeout),
max_retries: config.tcp_connect_max_retries as usize,
description,
}
}
}
impl ErrorHandler<io::Error> for ErrorAction {
type OutError = io::Error;
fn handle(&mut self, attempt: usize, e: io::Error) -> RetryPolicy<io::Error> {
log::trace!(
"{} failed [Attempt: {}/{}]: {}",
self.description,
attempt,
self.max_retries,
e
);
if attempt >= self.max_retries {
RetryPolicy::ForwardError(e)
} else {
let jitter = thread_rng().gen_range(0.5, 1.0);
let timeout = self.retry_timeout.mul_f64(jitter);
RetryPolicy::WaitRetry(timeout)
}
}
}
#[derive(Debug, Clone)]
pub enum ConnectedPeerAddr {
In(SocketAddr),
Out(String, SocketAddr),
}
impl ConnectedPeerAddr {
pub fn is_incoming(&self) -> bool {
match self {
Self::In(_) => true,
Self::Out(_, _) => false,
}
}
}
/// Network events.
#[derive(Debug)]
pub enum NetworkEvent {
/// A message was received from the network.
MessageReceived(Vec<u8>),
/// The node has connected to a peer.
PeerConnected {
/// Peer address.
addr: ConnectedPeerAddr,
/// Connect message.
connect: Box<Verified<Connect>>,
},
/// The node has disconnected from a peer.
PeerDisconnected(PublicKey),
/// Connection to a peer failed.
UnableConnectToPeer(PublicKey),
}
#[derive(Debug, Clone)]
pub enum NetworkRequest {
SendMessage(PublicKey, SignedMessage),
#[cfg(test)]
DisconnectWithPeer(PublicKey),
}
#[derive(Debug)]
pub struct NetworkPart {
pub our_connect_message: Verified<Connect>,
pub listen_address: SocketAddr,
pub network_config: NetworkConfiguration,
pub max_message_len: u32,
pub network_requests: mpsc::Receiver<NetworkRequest>,
pub network_tx: mpsc::Sender<NetworkEvent>,
pub(crate) connect_list: SharedConnectList,
}
#[derive(Clone, Debug)]
struct ConnectionPoolEntry {
sender: mpsc::Sender<SignedMessage>,
address: ConnectedPeerAddr,
// Connection ID assigned to the connection during instantiation. This ID is unique among
// all connections and is used in `ConnectList::remove()` to figure out whether
// it would make sense to remove a connection, or the request has been obsoleted.
id: u64,
}
#[derive(Clone, Debug)]
struct SharedConnectionPool {
inner: Arc<RwLock<ConnectionPool>>,
}
impl SharedConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
inner: Arc::new(RwLock::new(ConnectionPool::new(our_key))),
}
}
fn read(&self) -> impl ops::Deref<Target = ConnectionPool> + '_ {
self.inner.read().unwrap()
}
fn write(&self) -> impl ops::DerefMut<Target = ConnectionPool> + '_ {
self.inner.write().unwrap()
}
async fn send_message(&self, peer_key: &PublicKey, message: SignedMessage) {
let maybe_peer_info = {
// Ensure that we don't hold the lock across the `await` point.
let peers = &self.inner.read().unwrap().peers;
peers
.get(peer_key)
.map(|peer| (peer.sender.clone(), peer.id))
};
if let Some((mut sender, connection_id)) = maybe_peer_info {
if sender.send(message).await.is_err() {
log::warn!("Cannot send message to peer {}", peer_key);
self.write().remove(peer_key, Some(connection_id));
}
}
}
fn create_connection(
&self,
peer_key: PublicKey,
address: ConnectedPeerAddr,
socket: Framed<TcpStream, MessagesCodec>,
) -> Option<Connection> {
let mut guard = self.write();
if guard.contains(&peer_key) && Self::ignore_connection(guard.our_key, peer_key) {
log::info!("Ignoring connection to {:?} per priority rules", peer_key);
return None;
}
let (receiver_rx, connection_id) = guard.add(peer_key, address.clone());
Some(Connection {
socket,
receiver_rx,
address,
key: peer_key,
id: connection_id,
})
}
/// Provides a complete, anti-symmetric relation among two peers bound in a connection.
/// This is used by the peers to decide which one of two connections are left alive
/// if the peers connect to each other simultaneously.
fn ignore_connection(our_key: PublicKey, their_key: PublicKey) -> bool {
our_key[..] < their_key[..]
}
}
#[derive(Debug)]
struct ConnectionPool {
peers: HashMap<PublicKey, ConnectionPoolEntry>,
our_key: PublicKey,
next_connection_id: u64,
}
impl ConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
peers: HashMap::new(),
our_key,
next_connection_id: 0,
}
}
fn count_incoming(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
fn count_outgoing(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
/// Adds a peer to the connection list.
///
/// # Return value
///
/// Returns the receiver for outgoing messages to the peer and the connection ID.
fn add(
&mut self,
key: PublicKey,
address: ConnectedPeerAddr,
) -> (mpsc::Receiver<SignedMessage>, u64) {
let id = self.next_connection_id;
let (sender, receiver_rx) = mpsc::channel(OUTGOING_CHANNEL_SIZE);
let entry = ConnectionPoolEntry {
sender,
address,
id,
};
self.next_connection_id += 1;
self.peers.insert(key, entry);
(receiver_rx, id)
}
fn contains(&self, address: &PublicKey) -> bool {
self.peers.get(address).is_some()
}
/// Drops the connection to a peer. The request can be optionally filtered by the connection ID
/// in order to avoid issuing obsolete requests.
///
/// # Return value
///
/// Returns `true` if the connection with the peer was dropped. If the connection with the
/// peer was not dropped (either because it did not exist, or because
/// the provided `connection_id` is outdated), returns `false`.
fn remove(&mut self, address: &PublicKey, connection_id: Option<u64>) -> bool {
if let Some(entry) = self.peers.get(address) {
if connection_id.map_or(true, |id| id == entry.id) {
self.peers.remove(address);
return true;
}
}
false
}
}
struct Connection {
socket: Framed<TcpStream, MessagesCodec>,
receiver_rx: mpsc::Receiver<SignedMessage>,
address: ConnectedPeerAddr,
key: PublicKey,
id: u64,
}
#[derive(Clone)]
struct NetworkHandler {
listen_address: SocketAddr,
pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
}
impl NetworkHandler {
fn new(
address: SocketAddr,
connection_pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
) -> Self {
Self {
listen_address: address,
pool: connection_pool,
network_config,
network_tx,
handshake_params,
connect_list,
}
}
async fn listener(self) -> anyhow::Result<()> {
let mut listener = TcpListener::bind(&self.listen_address).await?;
let mut incoming_connections = listener.incoming();
// Incoming connections limiter
let incoming_connections_limit = self.network_config.max_incoming_connections;
while let Some(mut socket) = incoming_connections.try_next().await? {
let peer_address = match socket.peer_addr() {
Ok(address) => address,
Err(err) => {
log::warn!("Peer address resolution failed: {}", err);
continue;
}
};
// Check incoming connections count.
let connections_count = self.pool.read().count_incoming();
if connections_count >= incoming_connections_limit {
log::warn!(
"Rejected incoming connection with peer={}, connections limit reached.",
peer_address
);
continue;
}
let pool = self.pool.clone();
let connect_list = self.connect_list.clone();
let network_tx = self.network_tx.clone();
let handshake = NoiseHandshake::responder(&self.handshake_params);
let task = async move {
let HandshakeData {
codec,
raw_message,
peer_key,
} = handshake.listen(&mut socket).await?;
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let peer_key = connect.author();
if !connect_list.is_peer_allowed(&peer_key) {
bail!(
"Rejecting incoming connection with peer={} public_key={}, \
the peer is not in the connect list",
peer_address,
peer_key
);
}
let conn_addr = ConnectedPeerAddr::In(peer_address);
let socket = Framed::new(socket, codec);
let maybe_connection = pool.create_connection(peer_key, conn_addr, socket);
if let Some(connection) = maybe_connection {
Self::handle_connection(connection, connect, pool, network_tx).await
} else {
Ok(())
}
};
tokio::spawn(task.unwrap_or_else(|err| log::warn!("{}", err)));
}
Ok(())
}
/// # Return value
///
/// The returned future resolves when the connection is established. The connection processing
/// is spawned onto `tokio` runtime.
fn connect(
&self,
key: PublicKey,
handshake_params: &HandshakeParams,
) -> impl Future<Output = anyhow::Result<()>> {
// Resolve peer key to an address.
let maybe_address = self.connect_list.find_address_by_key(&key);
let unresolved_address = if let Some(address) = maybe_address {
address
} else {
let err = format_err!("Trying to connect to peer {} not from connect list", key);
return future::err(err).left_future();
};
let max_connections = self.network_config.max_outgoing_connections;
let mut handshake_params = handshake_params.clone();
handshake_params.set_remote_key(key);
let pool = self.pool.clone();
let network_tx = self.network_tx.clone();
let network_config = self.network_config;
let description = format!(
"Connecting to {} (remote address = {})",
key, unresolved_address
);
let on_error = ErrorAction::new(&network_config, description);
async move {
let connect = || TcpStream::connect(&unresolved_address);
// The second component in returned value / error is the number of retries,
// which we ignore.
let (mut socket, _) = FutureRetry::new(connect, on_error)
.await
.map_err(|(err, _)| err)?;
let peer_address = match socket.peer_addr() {
Ok(addr) => addr,
Err(err) => {
let err = format_err!("Couldn't take peer addr from socket: {}", err);
return Err(err);
}
};
Self::configure_socket(&mut socket, network_config)?;
let HandshakeData {
codec,
raw_message,
peer_key,
} = NoiseHandshake::initiator(&handshake_params) | "Ignoring outgoing connection to {:?} because the connection limit ({}) \
is reached",
key,
max_connections
);
return Ok(());
}
let conn_addr = ConnectedPeerAddr::Out(unresolved_address, peer_address);
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let socket = Framed::new(socket, codec);
if let Some(connection) = pool.create_connection(key, conn_addr, socket) {
let handler = Self::handle_connection(connection, connect, pool, network_tx);
tokio::spawn(handler);
}
Ok(())
}
.right_future()
}
async fn process_messages(
pool: SharedConnectionPool,
connection: Connection,
mut network_tx: mpsc::Sender<NetworkEvent>,
) {
let (sink, stream) = connection.socket.split();
let key = connection.key;
let connection_id = connection.id;
// Processing of incoming messages.
let incoming = async move {
let res = (&mut network_tx)
.sink_map_err(anyhow::Error::from)
.send_all(&mut stream.map_ok(NetworkEvent::MessageReceived))
.await;
if pool.write().remove(&key, Some(connection_id)) {
network_tx
.send(NetworkEvent::PeerDisconnected(key))
.await
.ok();
}
res
};
futures::pin_mut!(incoming);
// Processing of outgoing messages.
let outgoing = connection.receiver_rx.map(Ok).forward(sink);
// Select the first future to terminate and drop the remaining one.
let task = future::select(incoming, outgoing).map(|res| {
if let (Err(err), _) = res.factor_first() {
log::info!(
"Connection with peer {} terminated: {} (root cause: {})",
key,
err,
err.root_cause()
);
}
});
task.await
}
fn configure_socket(
socket: &mut TcpStream,
network_config: NetworkConfiguration,
) -> anyhow::Result<()> {
socket.set_nodelay(network_config.tcp_nodelay)?;
let duration = network_config.tcp_keep_alive.map(Duration::from_millis);
socket.set_keepalive(duration)?;
Ok(())
}
async fn handle_connection(
connection: Connection,
connect: Verified<Connect>,
pool: SharedConnectionPool,
mut network_tx: mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let address = connection.address.clone();
log::trace!("Established connection with peer {:?}", address);
Self::send_peer_connected_event(address, connect, &mut network_tx).await?;
Self::process_messages(pool, connection, network_tx).await;
Ok(())
}
fn parse_connect_msg(
raw: Vec<u8>,
key: &x25519::PublicKey,
) -> anyhow::Result<Verified<Connect>> {
let message = Message::from_raw_buffer(raw)?;
let connect: Verified<Connect> = match message {
Message::Service(Service::Connect(connect)) => connect,
other => bail!(
"First message from a remote peer is not `Connect`, got={:?}",
other
),
};
let author = into_x25519_public_key(connect.author());
ensure!(
author == *key,
"Connect message public key doesn't match with the received peer key"
);
Ok(connect)
}
pub async fn handle_requests(self, mut receiver: mpsc::Receiver<NetworkRequest>) {
while let Some(request) = receiver.next().await {
match request {
NetworkRequest::SendMessage(key, message) => {
let mut this = self.clone();
tokio::spawn(async move {
if let Err(e) = this.handle_send_message(key, message).await {
log::error!("Cannot send message to peer {:?}: {}", key, e);
}
});
}
#[cfg(test)]
NetworkRequest::DisconnectWithPeer(peer) => {
let disconnected = self.pool.write().remove(&peer, None);
if disconnected {
let mut network_tx = self.network_tx.clone();
tokio::spawn(async move {
network_tx
.send(NetworkEvent::PeerDisconnected(peer))
.await
.ok();
});
}
}
}
}
}
async fn handle_send_message(
&mut self,
address: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
if self.pool.read().contains(&address) {
self.pool.send_message(&address, message).await;
Ok(())
} else if self.can_create_connections() {
self.create_new_connection(address, message).await
} else {
self.send_unable_connect_event(address).await
}
}
async fn create_new_connection(
&self,
key: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
self.connect(key, &self.handshake_params).await?;
let connect = &self.handshake_params.connect;
if message != *connect.as_raw() {
self.pool.send_message(&key, message).await;
}
Ok(())
}
async fn send_peer_connected_event(
addr: ConnectedPeerAddr,
connect: Verified<Connect>,
network_tx: &mut mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let peer_connected = NetworkEvent::PeerConnected {
addr,
connect: Box::new(connect),
};
network_tx
.send(peer_connected)
.await
.map_err(|_| format_err!("Cannot send `PeerConnected` notification"))
}
fn can_create_connections(&self) -> bool {
self.pool.read().count_outgoing() < self.network_config.max_outgoing_connections
}
async fn send_unable_connect_event(&mut self, peer: PublicKey) -> anyhow::Result<()> {
let event = NetworkEvent::UnableConnectToPeer(peer);
self.network_tx
.send(event)
.await
.map_err(|_| format_err!("can't send network event"))
}
}
impl NetworkPart {
pub async fn run(self, handshake_params: HandshakeParams) {
let our_key = handshake_params.connect.author();
let handler = NetworkHandler::new(
self.listen_address,
SharedConnectionPool::new(our_key),
self.network_config,
self.network_tx,
handshake_params,
self.connect_list,
);
let listener = handler.clone().listener().unwrap_or_else(|e| {
log::error!("Listening to incoming peer connections failed: {}", e);
});
futures::pin_mut!(listener);
let request_handler = handler.handle_requests(self.network_requests);
futures::pin_mut!(request_handler);
// FIXME: is `select` appropriate here?
future::select(listener, request_handler).await;
}
} | .send(&mut socket)
.await?;
if pool.read().count_outgoing() >= max_connections {
log::info!( | random_line_split |
network.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use anyhow::{bail, ensure, format_err};
use exonum::{
crypto::{
x25519::{self, into_x25519_public_key},
PublicKey,
},
messages::{SignedMessage, Verified},
};
use futures::{channel::mpsc, future, prelude::*};
use futures_retry::{ErrorHandler, FutureRetry, RetryPolicy};
use rand::{thread_rng, Rng};
use tokio::net::{TcpListener, TcpStream};
use tokio_util::codec::Framed;
use std::{
collections::HashMap,
io,
net::SocketAddr,
ops,
sync::{Arc, RwLock},
time::Duration,
};
use crate::{
events::{
codec::MessagesCodec,
noise::{Handshake, HandshakeData, HandshakeParams, NoiseHandshake},
},
messages::{Connect, Message, Service},
state::SharedConnectList,
NetworkConfiguration,
};
const OUTGOING_CHANNEL_SIZE: usize = 10;
#[derive(Debug)]
struct ErrorAction {
retry_timeout: Duration,
max_retries: usize,
description: String,
}
impl ErrorAction {
fn new(config: &NetworkConfiguration, description: String) -> Self {
Self {
retry_timeout: Duration::from_millis(config.tcp_connect_retry_timeout),
max_retries: config.tcp_connect_max_retries as usize,
description,
}
}
}
impl ErrorHandler<io::Error> for ErrorAction {
type OutError = io::Error;
fn handle(&mut self, attempt: usize, e: io::Error) -> RetryPolicy<io::Error> {
log::trace!(
"{} failed [Attempt: {}/{}]: {}",
self.description,
attempt,
self.max_retries,
e
);
if attempt >= self.max_retries {
RetryPolicy::ForwardError(e)
} else {
let jitter = thread_rng().gen_range(0.5, 1.0);
let timeout = self.retry_timeout.mul_f64(jitter);
RetryPolicy::WaitRetry(timeout)
}
}
}
#[derive(Debug, Clone)]
pub enum ConnectedPeerAddr {
In(SocketAddr),
Out(String, SocketAddr),
}
impl ConnectedPeerAddr {
pub fn is_incoming(&self) -> bool {
match self {
Self::In(_) => true,
Self::Out(_, _) => false,
}
}
}
/// Network events.
#[derive(Debug)]
pub enum NetworkEvent {
/// A message was received from the network.
MessageReceived(Vec<u8>),
/// The node has connected to a peer.
PeerConnected {
/// Peer address.
addr: ConnectedPeerAddr,
/// Connect message.
connect: Box<Verified<Connect>>,
},
/// The node has disconnected from a peer.
PeerDisconnected(PublicKey),
/// Connection to a peer failed.
UnableConnectToPeer(PublicKey),
}
#[derive(Debug, Clone)]
pub enum NetworkRequest {
SendMessage(PublicKey, SignedMessage),
#[cfg(test)]
DisconnectWithPeer(PublicKey),
}
#[derive(Debug)]
pub struct NetworkPart {
pub our_connect_message: Verified<Connect>,
pub listen_address: SocketAddr,
pub network_config: NetworkConfiguration,
pub max_message_len: u32,
pub network_requests: mpsc::Receiver<NetworkRequest>,
pub network_tx: mpsc::Sender<NetworkEvent>,
pub(crate) connect_list: SharedConnectList,
}
#[derive(Clone, Debug)]
struct ConnectionPoolEntry {
sender: mpsc::Sender<SignedMessage>,
address: ConnectedPeerAddr,
// Connection ID assigned to the connection during instantiation. This ID is unique among
// all connections and is used in `ConnectList::remove()` to figure out whether
// it would make sense to remove a connection, or the request has been obsoleted.
id: u64,
}
#[derive(Clone, Debug)]
struct SharedConnectionPool {
inner: Arc<RwLock<ConnectionPool>>,
}
impl SharedConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
inner: Arc::new(RwLock::new(ConnectionPool::new(our_key))),
}
}
fn read(&self) -> impl ops::Deref<Target = ConnectionPool> + '_ {
self.inner.read().unwrap()
}
fn write(&self) -> impl ops::DerefMut<Target = ConnectionPool> + '_ {
self.inner.write().unwrap()
}
async fn send_message(&self, peer_key: &PublicKey, message: SignedMessage) {
let maybe_peer_info = {
// Ensure that we don't hold the lock across the `await` point.
let peers = &self.inner.read().unwrap().peers;
peers
.get(peer_key)
.map(|peer| (peer.sender.clone(), peer.id))
};
if let Some((mut sender, connection_id)) = maybe_peer_info {
if sender.send(message).await.is_err() {
log::warn!("Cannot send message to peer {}", peer_key);
self.write().remove(peer_key, Some(connection_id));
}
}
}
fn create_connection(
&self,
peer_key: PublicKey,
address: ConnectedPeerAddr,
socket: Framed<TcpStream, MessagesCodec>,
) -> Option<Connection> {
let mut guard = self.write();
if guard.contains(&peer_key) && Self::ignore_connection(guard.our_key, peer_key) {
log::info!("Ignoring connection to {:?} per priority rules", peer_key);
return None;
}
let (receiver_rx, connection_id) = guard.add(peer_key, address.clone());
Some(Connection {
socket,
receiver_rx,
address,
key: peer_key,
id: connection_id,
})
}
/// Provides a complete, anti-symmetric relation among two peers bound in a connection.
/// This is used by the peers to decide which one of two connections are left alive
/// if the peers connect to each other simultaneously.
fn ignore_connection(our_key: PublicKey, their_key: PublicKey) -> bool {
our_key[..] < their_key[..]
}
}
#[derive(Debug)]
struct ConnectionPool {
peers: HashMap<PublicKey, ConnectionPoolEntry>,
our_key: PublicKey,
next_connection_id: u64,
}
impl ConnectionPool {
fn new(our_key: PublicKey) -> Self {
Self {
peers: HashMap::new(),
our_key,
next_connection_id: 0,
}
}
fn count_incoming(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
fn count_outgoing(&self) -> usize {
self.peers
.values()
.filter(|entry| entry.address.is_incoming())
.count()
}
/// Adds a peer to the connection list.
///
/// # Return value
///
/// Returns the receiver for outgoing messages to the peer and the connection ID.
fn add(
&mut self,
key: PublicKey,
address: ConnectedPeerAddr,
) -> (mpsc::Receiver<SignedMessage>, u64) {
let id = self.next_connection_id;
let (sender, receiver_rx) = mpsc::channel(OUTGOING_CHANNEL_SIZE);
let entry = ConnectionPoolEntry {
sender,
address,
id,
};
self.next_connection_id += 1;
self.peers.insert(key, entry);
(receiver_rx, id)
}
fn contains(&self, address: &PublicKey) -> bool |
/// Drops the connection to a peer. The request can be optionally filtered by the connection ID
/// in order to avoid issuing obsolete requests.
///
/// # Return value
///
/// Returns `true` if the connection with the peer was dropped. If the connection with the
/// peer was not dropped (either because it did not exist, or because
/// the provided `connection_id` is outdated), returns `false`.
fn remove(&mut self, address: &PublicKey, connection_id: Option<u64>) -> bool {
if let Some(entry) = self.peers.get(address) {
if connection_id.map_or(true, |id| id == entry.id) {
self.peers.remove(address);
return true;
}
}
false
}
}
struct Connection {
socket: Framed<TcpStream, MessagesCodec>,
receiver_rx: mpsc::Receiver<SignedMessage>,
address: ConnectedPeerAddr,
key: PublicKey,
id: u64,
}
#[derive(Clone)]
struct NetworkHandler {
listen_address: SocketAddr,
pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
}
impl NetworkHandler {
fn new(
address: SocketAddr,
connection_pool: SharedConnectionPool,
network_config: NetworkConfiguration,
network_tx: mpsc::Sender<NetworkEvent>,
handshake_params: HandshakeParams,
connect_list: SharedConnectList,
) -> Self {
Self {
listen_address: address,
pool: connection_pool,
network_config,
network_tx,
handshake_params,
connect_list,
}
}
async fn listener(self) -> anyhow::Result<()> {
let mut listener = TcpListener::bind(&self.listen_address).await?;
let mut incoming_connections = listener.incoming();
// Incoming connections limiter
let incoming_connections_limit = self.network_config.max_incoming_connections;
while let Some(mut socket) = incoming_connections.try_next().await? {
let peer_address = match socket.peer_addr() {
Ok(address) => address,
Err(err) => {
log::warn!("Peer address resolution failed: {}", err);
continue;
}
};
// Check incoming connections count.
let connections_count = self.pool.read().count_incoming();
if connections_count >= incoming_connections_limit {
log::warn!(
"Rejected incoming connection with peer={}, connections limit reached.",
peer_address
);
continue;
}
let pool = self.pool.clone();
let connect_list = self.connect_list.clone();
let network_tx = self.network_tx.clone();
let handshake = NoiseHandshake::responder(&self.handshake_params);
let task = async move {
let HandshakeData {
codec,
raw_message,
peer_key,
} = handshake.listen(&mut socket).await?;
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let peer_key = connect.author();
if !connect_list.is_peer_allowed(&peer_key) {
bail!(
"Rejecting incoming connection with peer={} public_key={}, \
the peer is not in the connect list",
peer_address,
peer_key
);
}
let conn_addr = ConnectedPeerAddr::In(peer_address);
let socket = Framed::new(socket, codec);
let maybe_connection = pool.create_connection(peer_key, conn_addr, socket);
if let Some(connection) = maybe_connection {
Self::handle_connection(connection, connect, pool, network_tx).await
} else {
Ok(())
}
};
tokio::spawn(task.unwrap_or_else(|err| log::warn!("{}", err)));
}
Ok(())
}
/// # Return value
///
/// The returned future resolves when the connection is established. The connection processing
/// is spawned onto `tokio` runtime.
fn connect(
&self,
key: PublicKey,
handshake_params: &HandshakeParams,
) -> impl Future<Output = anyhow::Result<()>> {
// Resolve peer key to an address.
let maybe_address = self.connect_list.find_address_by_key(&key);
let unresolved_address = if let Some(address) = maybe_address {
address
} else {
let err = format_err!("Trying to connect to peer {} not from connect list", key);
return future::err(err).left_future();
};
let max_connections = self.network_config.max_outgoing_connections;
let mut handshake_params = handshake_params.clone();
handshake_params.set_remote_key(key);
let pool = self.pool.clone();
let network_tx = self.network_tx.clone();
let network_config = self.network_config;
let description = format!(
"Connecting to {} (remote address = {})",
key, unresolved_address
);
let on_error = ErrorAction::new(&network_config, description);
async move {
let connect = || TcpStream::connect(&unresolved_address);
// The second component in returned value / error is the number of retries,
// which we ignore.
let (mut socket, _) = FutureRetry::new(connect, on_error)
.await
.map_err(|(err, _)| err)?;
let peer_address = match socket.peer_addr() {
Ok(addr) => addr,
Err(err) => {
let err = format_err!("Couldn't take peer addr from socket: {}", err);
return Err(err);
}
};
Self::configure_socket(&mut socket, network_config)?;
let HandshakeData {
codec,
raw_message,
peer_key,
} = NoiseHandshake::initiator(&handshake_params)
.send(&mut socket)
.await?;
if pool.read().count_outgoing() >= max_connections {
log::info!(
"Ignoring outgoing connection to {:?} because the connection limit ({}) \
is reached",
key,
max_connections
);
return Ok(());
}
let conn_addr = ConnectedPeerAddr::Out(unresolved_address, peer_address);
let connect = Self::parse_connect_msg(raw_message, &peer_key)?;
let socket = Framed::new(socket, codec);
if let Some(connection) = pool.create_connection(key, conn_addr, socket) {
let handler = Self::handle_connection(connection, connect, pool, network_tx);
tokio::spawn(handler);
}
Ok(())
}
.right_future()
}
async fn process_messages(
pool: SharedConnectionPool,
connection: Connection,
mut network_tx: mpsc::Sender<NetworkEvent>,
) {
let (sink, stream) = connection.socket.split();
let key = connection.key;
let connection_id = connection.id;
// Processing of incoming messages.
let incoming = async move {
let res = (&mut network_tx)
.sink_map_err(anyhow::Error::from)
.send_all(&mut stream.map_ok(NetworkEvent::MessageReceived))
.await;
if pool.write().remove(&key, Some(connection_id)) {
network_tx
.send(NetworkEvent::PeerDisconnected(key))
.await
.ok();
}
res
};
futures::pin_mut!(incoming);
// Processing of outgoing messages.
let outgoing = connection.receiver_rx.map(Ok).forward(sink);
// Select the first future to terminate and drop the remaining one.
let task = future::select(incoming, outgoing).map(|res| {
if let (Err(err), _) = res.factor_first() {
log::info!(
"Connection with peer {} terminated: {} (root cause: {})",
key,
err,
err.root_cause()
);
}
});
task.await
}
fn configure_socket(
socket: &mut TcpStream,
network_config: NetworkConfiguration,
) -> anyhow::Result<()> {
socket.set_nodelay(network_config.tcp_nodelay)?;
let duration = network_config.tcp_keep_alive.map(Duration::from_millis);
socket.set_keepalive(duration)?;
Ok(())
}
async fn handle_connection(
connection: Connection,
connect: Verified<Connect>,
pool: SharedConnectionPool,
mut network_tx: mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let address = connection.address.clone();
log::trace!("Established connection with peer {:?}", address);
Self::send_peer_connected_event(address, connect, &mut network_tx).await?;
Self::process_messages(pool, connection, network_tx).await;
Ok(())
}
fn parse_connect_msg(
raw: Vec<u8>,
key: &x25519::PublicKey,
) -> anyhow::Result<Verified<Connect>> {
let message = Message::from_raw_buffer(raw)?;
let connect: Verified<Connect> = match message {
Message::Service(Service::Connect(connect)) => connect,
other => bail!(
"First message from a remote peer is not `Connect`, got={:?}",
other
),
};
let author = into_x25519_public_key(connect.author());
ensure!(
author == *key,
"Connect message public key doesn't match with the received peer key"
);
Ok(connect)
}
pub async fn handle_requests(self, mut receiver: mpsc::Receiver<NetworkRequest>) {
while let Some(request) = receiver.next().await {
match request {
NetworkRequest::SendMessage(key, message) => {
let mut this = self.clone();
tokio::spawn(async move {
if let Err(e) = this.handle_send_message(key, message).await {
log::error!("Cannot send message to peer {:?}: {}", key, e);
}
});
}
#[cfg(test)]
NetworkRequest::DisconnectWithPeer(peer) => {
let disconnected = self.pool.write().remove(&peer, None);
if disconnected {
let mut network_tx = self.network_tx.clone();
tokio::spawn(async move {
network_tx
.send(NetworkEvent::PeerDisconnected(peer))
.await
.ok();
});
}
}
}
}
}
async fn handle_send_message(
&mut self,
address: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
if self.pool.read().contains(&address) {
self.pool.send_message(&address, message).await;
Ok(())
} else if self.can_create_connections() {
self.create_new_connection(address, message).await
} else {
self.send_unable_connect_event(address).await
}
}
async fn create_new_connection(
&self,
key: PublicKey,
message: SignedMessage,
) -> anyhow::Result<()> {
self.connect(key, &self.handshake_params).await?;
let connect = &self.handshake_params.connect;
if message != *connect.as_raw() {
self.pool.send_message(&key, message).await;
}
Ok(())
}
async fn send_peer_connected_event(
addr: ConnectedPeerAddr,
connect: Verified<Connect>,
network_tx: &mut mpsc::Sender<NetworkEvent>,
) -> anyhow::Result<()> {
let peer_connected = NetworkEvent::PeerConnected {
addr,
connect: Box::new(connect),
};
network_tx
.send(peer_connected)
.await
.map_err(|_| format_err!("Cannot send `PeerConnected` notification"))
}
fn can_create_connections(&self) -> bool {
self.pool.read().count_outgoing() < self.network_config.max_outgoing_connections
}
async fn send_unable_connect_event(&mut self, peer: PublicKey) -> anyhow::Result<()> {
let event = NetworkEvent::UnableConnectToPeer(peer);
self.network_tx
.send(event)
.await
.map_err(|_| format_err!("can't send network event"))
}
}
impl NetworkPart {
pub async fn run(self, handshake_params: HandshakeParams) {
let our_key = handshake_params.connect.author();
let handler = NetworkHandler::new(
self.listen_address,
SharedConnectionPool::new(our_key),
self.network_config,
self.network_tx,
handshake_params,
self.connect_list,
);
let listener = handler.clone().listener().unwrap_or_else(|e| {
log::error!("Listening to incoming peer connections failed: {}", e);
});
futures::pin_mut!(listener);
let request_handler = handler.handle_requests(self.network_requests);
futures::pin_mut!(request_handler);
// FIXME: is `select` appropriate here?
future::select(listener, request_handler).await;
}
}
| {
self.peers.get(address).is_some()
} | identifier_body |
shortlex_strings_using_chars.rs | use itertools::Itertools;
use malachite_base::chars::exhaustive::exhaustive_ascii_chars;
use malachite_base::strings::exhaustive::shortlex_strings_using_chars; | let ss = shortlex_strings_using_chars(cs).take(20).collect_vec();
assert_eq!(ss.iter().map(String::as_str).collect_vec().as_slice(), out);
}
#[test]
fn test_shortlex_strings_using_chars() {
shortlex_strings_using_chars_helper(empty(), &[""]);
shortlex_strings_using_chars_helper(
once('a'),
&[
"",
"a",
"aa",
"aaa",
"aaaa",
"aaaaa",
"aaaaaa",
"aaaaaaa",
"aaaaaaaa",
"aaaaaaaaa",
"aaaaaaaaaa",
"aaaaaaaaaaa",
"aaaaaaaaaaaa",
"aaaaaaaaaaaaa",
"aaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaa",
],
);
shortlex_strings_using_chars_helper(
"ab".chars(),
&[
"", "a", "b", "aa", "ab", "ba", "bb", "aaa", "aab", "aba", "abb", "baa", "bab", "bba",
"bbb", "aaaa", "aaab", "aaba", "aabb", "abaa",
],
);
shortlex_strings_using_chars_helper(
"xyz".chars(),
&[
"", "x", "y", "z", "xx", "xy", "xz", "yx", "yy", "yz", "zx", "zy", "zz", "xxx", "xxy",
"xxz", "xyx", "xyy", "xyz", "xzx",
],
);
shortlex_strings_using_chars_helper(
exhaustive_ascii_chars(),
&[
"", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p",
"q", "r", "s",
],
);
} | use std::iter::{empty, once};
fn shortlex_strings_using_chars_helper<I: Clone + Iterator<Item = char>>(cs: I, out: &[&str]) { | random_line_split |
shortlex_strings_using_chars.rs | use itertools::Itertools;
use malachite_base::chars::exhaustive::exhaustive_ascii_chars;
use malachite_base::strings::exhaustive::shortlex_strings_using_chars;
use std::iter::{empty, once};
fn shortlex_strings_using_chars_helper<I: Clone + Iterator<Item = char>>(cs: I, out: &[&str]) |
#[test]
fn test_shortlex_strings_using_chars() {
shortlex_strings_using_chars_helper(empty(), &[""]);
shortlex_strings_using_chars_helper(
once('a'),
&[
"",
"a",
"aa",
"aaa",
"aaaa",
"aaaaa",
"aaaaaa",
"aaaaaaa",
"aaaaaaaa",
"aaaaaaaaa",
"aaaaaaaaaa",
"aaaaaaaaaaa",
"aaaaaaaaaaaa",
"aaaaaaaaaaaaa",
"aaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaa",
],
);
shortlex_strings_using_chars_helper(
"ab".chars(),
&[
"", "a", "b", "aa", "ab", "ba", "bb", "aaa", "aab", "aba", "abb", "baa", "bab", "bba",
"bbb", "aaaa", "aaab", "aaba", "aabb", "abaa",
],
);
shortlex_strings_using_chars_helper(
"xyz".chars(),
&[
"", "x", "y", "z", "xx", "xy", "xz", "yx", "yy", "yz", "zx", "zy", "zz", "xxx", "xxy",
"xxz", "xyx", "xyy", "xyz", "xzx",
],
);
shortlex_strings_using_chars_helper(
exhaustive_ascii_chars(),
&[
"", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p",
"q", "r", "s",
],
);
}
| {
let ss = shortlex_strings_using_chars(cs).take(20).collect_vec();
assert_eq!(ss.iter().map(String::as_str).collect_vec().as_slice(), out);
} | identifier_body |
shortlex_strings_using_chars.rs | use itertools::Itertools;
use malachite_base::chars::exhaustive::exhaustive_ascii_chars;
use malachite_base::strings::exhaustive::shortlex_strings_using_chars;
use std::iter::{empty, once};
fn | <I: Clone + Iterator<Item = char>>(cs: I, out: &[&str]) {
let ss = shortlex_strings_using_chars(cs).take(20).collect_vec();
assert_eq!(ss.iter().map(String::as_str).collect_vec().as_slice(), out);
}
#[test]
fn test_shortlex_strings_using_chars() {
shortlex_strings_using_chars_helper(empty(), &[""]);
shortlex_strings_using_chars_helper(
once('a'),
&[
"",
"a",
"aa",
"aaa",
"aaaa",
"aaaaa",
"aaaaaa",
"aaaaaaa",
"aaaaaaaa",
"aaaaaaaaa",
"aaaaaaaaaa",
"aaaaaaaaaaa",
"aaaaaaaaaaaa",
"aaaaaaaaaaaaa",
"aaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaa",
"aaaaaaaaaaaaaaaaaaa",
],
);
shortlex_strings_using_chars_helper(
"ab".chars(),
&[
"", "a", "b", "aa", "ab", "ba", "bb", "aaa", "aab", "aba", "abb", "baa", "bab", "bba",
"bbb", "aaaa", "aaab", "aaba", "aabb", "abaa",
],
);
shortlex_strings_using_chars_helper(
"xyz".chars(),
&[
"", "x", "y", "z", "xx", "xy", "xz", "yx", "yy", "yz", "zx", "zy", "zz", "xxx", "xxy",
"xxz", "xyx", "xyy", "xyz", "xzx",
],
);
shortlex_strings_using_chars_helper(
exhaustive_ascii_chars(),
&[
"", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p",
"q", "r", "s",
],
);
}
| shortlex_strings_using_chars_helper | identifier_name |
data.py | # Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from digits.utils import subclass, override, constants
from digits.extensions.data.interface import DataIngestionInterface
from .forms import DatasetForm, InferenceForm
import numpy as np
import os
TEMPLATE = "templates/template.html"
INFERENCE_TEMPLATE = "templates/inference_template.html"
@subclass
class DataIngestion(DataIngestionInterface):
"""
A data ingestion extension for an image gradient dataset
"""
def __init__(self, is_inference_db=False, **kwargs):
super(DataIngestion, self).__init__(**kwargs)
self.userdata['is_inference_db'] = is_inference_db
# Used to calculate the gradients later
self.yy, self.xx = np.mgrid[:self.image_height,
:self.image_width].astype('float')
@override
def encode_entry(self, entry):
xslope, yslope = entry
label = np.array([xslope, yslope])
a = xslope * 255 / self.image_width
b = yslope * 255 / self.image_height
image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5
image = image.astype('uint8')
# convert to 3D tensors
image = image[np.newaxis, ...]
label = label[np.newaxis, np.newaxis, ...]
return image, label
@staticmethod
@override
def get_category():
return "Images"
@staticmethod
@override
def get_id():
return "image-gradients"
@staticmethod
@override
def get_dataset_form():
return DatasetForm()
@staticmethod
@override
def get_dataset_template(form):
"""
parameters:
- form: form returned by get_dataset_form(). This may be populated
with values if the job was cloned
return:
- (template, context) tuple
- template is a Jinja template to use for rendering dataset creation
options
- context is a dictionary of context variables to use for rendering
the form
"""
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@override
def get_inference_form(self):
return InferenceForm()
@staticmethod
@override
def get_inference_template(form):
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@staticmethod
@override
def get_title():
return "Gradients"
@override
def | (self, stage):
count = 0
if self.userdata['is_inference_db']:
if stage == constants.TEST_DB:
if self.test_image_count:
count = self.test_image_count
else:
return [(self.gradient_x, self.gradient_y)]
else:
if stage == constants.TRAIN_DB:
count = self.train_image_count
elif stage == constants.VAL_DB:
count = self.val_image_count
elif stage == constants.TEST_DB:
count = self.test_image_count
return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else []
| itemize_entries | identifier_name |
data.py | # Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from digits.utils import subclass, override, constants
from digits.extensions.data.interface import DataIngestionInterface
from .forms import DatasetForm, InferenceForm
import numpy as np
import os
TEMPLATE = "templates/template.html"
INFERENCE_TEMPLATE = "templates/inference_template.html"
@subclass
class DataIngestion(DataIngestionInterface):
"""
A data ingestion extension for an image gradient dataset
"""
def __init__(self, is_inference_db=False, **kwargs):
super(DataIngestion, self).__init__(**kwargs)
self.userdata['is_inference_db'] = is_inference_db
# Used to calculate the gradients later
self.yy, self.xx = np.mgrid[:self.image_height,
:self.image_width].astype('float')
@override
def encode_entry(self, entry):
xslope, yslope = entry
label = np.array([xslope, yslope])
a = xslope * 255 / self.image_width
b = yslope * 255 / self.image_height
image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5
image = image.astype('uint8')
# convert to 3D tensors
image = image[np.newaxis, ...]
label = label[np.newaxis, np.newaxis, ...]
return image, label
@staticmethod
@override
def get_category():
return "Images"
@staticmethod
@override
def get_id():
return "image-gradients"
@staticmethod
@override
def get_dataset_form():
return DatasetForm()
@staticmethod
@override
def get_dataset_template(form):
"""
parameters:
- form: form returned by get_dataset_form(). This may be populated
with values if the job was cloned
return:
- (template, context) tuple
- template is a Jinja template to use for rendering dataset creation
options
- context is a dictionary of context variables to use for rendering
the form
"""
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@override
def get_inference_form(self):
return InferenceForm()
@staticmethod
@override
def get_inference_template(form):
|
@staticmethod
@override
def get_title():
return "Gradients"
@override
def itemize_entries(self, stage):
count = 0
if self.userdata['is_inference_db']:
if stage == constants.TEST_DB:
if self.test_image_count:
count = self.test_image_count
else:
return [(self.gradient_x, self.gradient_y)]
else:
if stage == constants.TRAIN_DB:
count = self.train_image_count
elif stage == constants.VAL_DB:
count = self.val_image_count
elif stage == constants.TEST_DB:
count = self.test_image_count
return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else []
| extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read()
context = {'form': form}
return (template, context) | identifier_body |
data.py | # Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from digits.utils import subclass, override, constants
from digits.extensions.data.interface import DataIngestionInterface
from .forms import DatasetForm, InferenceForm
| import numpy as np
import os
TEMPLATE = "templates/template.html"
INFERENCE_TEMPLATE = "templates/inference_template.html"
@subclass
class DataIngestion(DataIngestionInterface):
"""
A data ingestion extension for an image gradient dataset
"""
def __init__(self, is_inference_db=False, **kwargs):
super(DataIngestion, self).__init__(**kwargs)
self.userdata['is_inference_db'] = is_inference_db
# Used to calculate the gradients later
self.yy, self.xx = np.mgrid[:self.image_height,
:self.image_width].astype('float')
@override
def encode_entry(self, entry):
xslope, yslope = entry
label = np.array([xslope, yslope])
a = xslope * 255 / self.image_width
b = yslope * 255 / self.image_height
image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5
image = image.astype('uint8')
# convert to 3D tensors
image = image[np.newaxis, ...]
label = label[np.newaxis, np.newaxis, ...]
return image, label
@staticmethod
@override
def get_category():
return "Images"
@staticmethod
@override
def get_id():
return "image-gradients"
@staticmethod
@override
def get_dataset_form():
return DatasetForm()
@staticmethod
@override
def get_dataset_template(form):
"""
parameters:
- form: form returned by get_dataset_form(). This may be populated
with values if the job was cloned
return:
- (template, context) tuple
- template is a Jinja template to use for rendering dataset creation
options
- context is a dictionary of context variables to use for rendering
the form
"""
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@override
def get_inference_form(self):
return InferenceForm()
@staticmethod
@override
def get_inference_template(form):
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@staticmethod
@override
def get_title():
return "Gradients"
@override
def itemize_entries(self, stage):
count = 0
if self.userdata['is_inference_db']:
if stage == constants.TEST_DB:
if self.test_image_count:
count = self.test_image_count
else:
return [(self.gradient_x, self.gradient_y)]
else:
if stage == constants.TRAIN_DB:
count = self.train_image_count
elif stage == constants.VAL_DB:
count = self.val_image_count
elif stage == constants.TEST_DB:
count = self.test_image_count
return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else [] | random_line_split |
|
data.py | # Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from digits.utils import subclass, override, constants
from digits.extensions.data.interface import DataIngestionInterface
from .forms import DatasetForm, InferenceForm
import numpy as np
import os
TEMPLATE = "templates/template.html"
INFERENCE_TEMPLATE = "templates/inference_template.html"
@subclass
class DataIngestion(DataIngestionInterface):
"""
A data ingestion extension for an image gradient dataset
"""
def __init__(self, is_inference_db=False, **kwargs):
super(DataIngestion, self).__init__(**kwargs)
self.userdata['is_inference_db'] = is_inference_db
# Used to calculate the gradients later
self.yy, self.xx = np.mgrid[:self.image_height,
:self.image_width].astype('float')
@override
def encode_entry(self, entry):
xslope, yslope = entry
label = np.array([xslope, yslope])
a = xslope * 255 / self.image_width
b = yslope * 255 / self.image_height
image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5
image = image.astype('uint8')
# convert to 3D tensors
image = image[np.newaxis, ...]
label = label[np.newaxis, np.newaxis, ...]
return image, label
@staticmethod
@override
def get_category():
return "Images"
@staticmethod
@override
def get_id():
return "image-gradients"
@staticmethod
@override
def get_dataset_form():
return DatasetForm()
@staticmethod
@override
def get_dataset_template(form):
"""
parameters:
- form: form returned by get_dataset_form(). This may be populated
with values if the job was cloned
return:
- (template, context) tuple
- template is a Jinja template to use for rendering dataset creation
options
- context is a dictionary of context variables to use for rendering
the form
"""
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@override
def get_inference_form(self):
return InferenceForm()
@staticmethod
@override
def get_inference_template(form):
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@staticmethod
@override
def get_title():
return "Gradients"
@override
def itemize_entries(self, stage):
count = 0
if self.userdata['is_inference_db']:
if stage == constants.TEST_DB:
if self.test_image_count:
count = self.test_image_count
else:
return [(self.gradient_x, self.gradient_y)]
else:
if stage == constants.TRAIN_DB:
count = self.train_image_count
elif stage == constants.VAL_DB:
count = self.val_image_count
elif stage == constants.TEST_DB:
|
return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else []
| count = self.test_image_count | conditional_block |
CommandGroupManager.js | /* ************************************************************************
qooxdoo - the new era of web development
http://qooxdoo.org
Copyright:
2004-2012 1&1 Internet AG, Germany, http://www.1und1.de
License:
LGPL: http://www.gnu.org/licenses/lgpl.html
EPL: http://www.eclipse.org/org/documents/epl-v10.php
See the LICENSE file in the project's top-level directory for details.
Authors:
* Mustafa Sak (msak)
************************************************************************ */
/**
* @ignore(ColorSwitch)
*/
qx.Class.define("demobrowser.demo.ui.CommandGroupManager",
{
extend : qx.application.Standalone,
/*
*****************************************************************************
MEMBERS
*****************************************************************************
*/
members :
{
main : function()
{
this.base(arguments);
this._manager = new qx.ui.command.GroupManager();
this._createWidgets();
},
_createWidgets : function()
{
var tabview = new qx.ui.tabview.TabView();
var page1 = new qx.ui.tabview.Page("Page1 - press 5 to change color");
page1.setLayout(new qx.ui.layout.Canvas());
var page2 = new qx.ui.tabview.Page("Page2 - press 5 to change color");
page2.setLayout(new qx.ui.layout.Canvas());
var page3 = new qx.ui.tabview.Page("Page3 - press 5 to change color");
page3.setLayout(new qx.ui.layout.Canvas());
page1.add(new ColorSwitch(this), {edge:0});
page2.add(new ColorSwitch(this), {edge:0});
page3.add(new ColorSwitch(this), {edge:0});
tabview.add(page1);
tabview.add(page2);
tabview.add(page3);
this.getRoot().add(tabview, {edge: 10});
},
getGroupManager : function()
{
return this._manager;
}
}
});
/**
* View
*/
qx.Class.define("ColorSwitch",
{
extend : qx.ui.container.Composite,
construct : function(controller)
{
this.base(arguments);
this.setLayout(new qx.ui.layout.VBox(15));
this.setPadding(25);
this._controller = controller;
// create command
var cmd = new qx.ui.command.Command("5");
cmd.addListener("execute", this.toggleColor, this);
// create command group
var group = new qx.ui.command.Group();
this._group = group;
// add command into group
group.add("toggleColor", cmd);
// Register command group at command group manager
controller.getGroupManager().add(group);
this.addListener("appear", this._onAppear, this);
this._createWidgets();
},
members :
{
_group : null,
_createWidgets : function()
{
var btn = new qx.ui.form.TextField();
btn.setPlaceholder("If focused here, all commands will be disabled! Please press key \"5\"!");
btn.addListener("focusin", this._blockCommands, this);
btn.addListener("focusout", this._unblockCommands, this);
this.add(btn);
var label = new qx.ui.basic.Label("All tabview pages are holding a view class with same command shortcut! Press key \"5\" on any page to change the color of the view. You will see that only the appeared page will change his color.");
label.set({
rich : true,
wrap : true
});
this.add(label);
},
toggleColor : function(target, command)
{
this.setBackgroundColor(this.getBackgroundColor() == "#ABEFEF" ? "#ABEFAB" : "#ABEFEF");
},
_onAppear : function(e)
{
this._controller.getGroupManager().setActive(this._group);
},
_blockCommands : function(e)
{
this._controller.getGroupManager().block();
},
_unblockCommands : function(e)
{
this._controller.getGroupManager().unblock(); | }
}); | } | random_line_split |
render.rs | use glfw_ffi::*;
use nanovg;
use std::os::raw::c_int;
use std::ptr;
#[repr(usize)]
#[derive(PartialEq, Eq)]
pub enum Fonts {
Inter = 0,
Vga8,
Moderno,
NumFonts,
}
pub struct RenderContext<'a> {
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
}
impl<'a> RenderContext<'a> {
pub fn new(
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
) -> Self |
pub fn size(&self) -> (f32, f32) {
let (mut w, mut h) = (0i32, 0i32);
unsafe {
glfwGetWindowSize(self.window, &mut w as *mut _, &mut h as *mut _);
}
(w as f32, h as f32)
}
pub fn pixel_ratio(&self) -> f32 {
unsafe {
let mut fb_width: c_int = 0;
let mut win_width: c_int = 0;
glfwGetFramebufferSize(self.window, &mut fb_width as *mut _, ptr::null_mut());
glfwGetWindowSize(self.window, &mut win_width as *mut _, ptr::null_mut());
fb_width as f32 / win_width as f32
}
}
pub fn frame<F: FnOnce(nanovg::Frame)>(&self, f: F) {
self.nvg.frame(self.size(), self.pixel_ratio(), f);
}
pub fn font(&self, id: Fonts) -> nanovg::Font<'a> {
if id == Fonts::NumFonts {
panic!("Tried to access font `Fonts::NumFonts`");
}
self.fonts[id as usize]
}
}
| {
Self { window, nvg, fonts }
} | identifier_body |
render.rs | use glfw_ffi::*;
use nanovg;
use std::os::raw::c_int;
use std::ptr;
#[repr(usize)]
#[derive(PartialEq, Eq)]
pub enum Fonts {
Inter = 0,
Vga8,
Moderno,
NumFonts,
}
pub struct RenderContext<'a> {
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
}
impl<'a> RenderContext<'a> {
pub fn new(
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
) -> Self {
Self { window, nvg, fonts }
}
pub fn size(&self) -> (f32, f32) {
let (mut w, mut h) = (0i32, 0i32);
unsafe {
glfwGetWindowSize(self.window, &mut w as *mut _, &mut h as *mut _);
}
(w as f32, h as f32)
}
pub fn pixel_ratio(&self) -> f32 {
unsafe {
let mut fb_width: c_int = 0;
let mut win_width: c_int = 0;
glfwGetFramebufferSize(self.window, &mut fb_width as *mut _, ptr::null_mut());
glfwGetWindowSize(self.window, &mut win_width as *mut _, ptr::null_mut());
fb_width as f32 / win_width as f32
}
}
pub fn | <F: FnOnce(nanovg::Frame)>(&self, f: F) {
self.nvg.frame(self.size(), self.pixel_ratio(), f);
}
pub fn font(&self, id: Fonts) -> nanovg::Font<'a> {
if id == Fonts::NumFonts {
panic!("Tried to access font `Fonts::NumFonts`");
}
self.fonts[id as usize]
}
}
| frame | identifier_name |
render.rs | use glfw_ffi::*;
use nanovg;
use std::os::raw::c_int;
use std::ptr;
#[repr(usize)]
#[derive(PartialEq, Eq)]
pub enum Fonts {
Inter = 0, | NumFonts,
}
pub struct RenderContext<'a> {
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
}
impl<'a> RenderContext<'a> {
pub fn new(
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
) -> Self {
Self { window, nvg, fonts }
}
pub fn size(&self) -> (f32, f32) {
let (mut w, mut h) = (0i32, 0i32);
unsafe {
glfwGetWindowSize(self.window, &mut w as *mut _, &mut h as *mut _);
}
(w as f32, h as f32)
}
pub fn pixel_ratio(&self) -> f32 {
unsafe {
let mut fb_width: c_int = 0;
let mut win_width: c_int = 0;
glfwGetFramebufferSize(self.window, &mut fb_width as *mut _, ptr::null_mut());
glfwGetWindowSize(self.window, &mut win_width as *mut _, ptr::null_mut());
fb_width as f32 / win_width as f32
}
}
pub fn frame<F: FnOnce(nanovg::Frame)>(&self, f: F) {
self.nvg.frame(self.size(), self.pixel_ratio(), f);
}
pub fn font(&self, id: Fonts) -> nanovg::Font<'a> {
if id == Fonts::NumFonts {
panic!("Tried to access font `Fonts::NumFonts`");
}
self.fonts[id as usize]
}
} | Vga8,
Moderno, | random_line_split |
render.rs | use glfw_ffi::*;
use nanovg;
use std::os::raw::c_int;
use std::ptr;
#[repr(usize)]
#[derive(PartialEq, Eq)]
pub enum Fonts {
Inter = 0,
Vga8,
Moderno,
NumFonts,
}
pub struct RenderContext<'a> {
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
}
impl<'a> RenderContext<'a> {
pub fn new(
window: *mut GLFWwindow,
nvg: &'a nanovg::Context,
fonts: [nanovg::Font<'a>; Fonts::NumFonts as usize],
) -> Self {
Self { window, nvg, fonts }
}
pub fn size(&self) -> (f32, f32) {
let (mut w, mut h) = (0i32, 0i32);
unsafe {
glfwGetWindowSize(self.window, &mut w as *mut _, &mut h as *mut _);
}
(w as f32, h as f32)
}
pub fn pixel_ratio(&self) -> f32 {
unsafe {
let mut fb_width: c_int = 0;
let mut win_width: c_int = 0;
glfwGetFramebufferSize(self.window, &mut fb_width as *mut _, ptr::null_mut());
glfwGetWindowSize(self.window, &mut win_width as *mut _, ptr::null_mut());
fb_width as f32 / win_width as f32
}
}
pub fn frame<F: FnOnce(nanovg::Frame)>(&self, f: F) {
self.nvg.frame(self.size(), self.pixel_ratio(), f);
}
pub fn font(&self, id: Fonts) -> nanovg::Font<'a> {
if id == Fonts::NumFonts |
self.fonts[id as usize]
}
}
| {
panic!("Tried to access font `Fonts::NumFonts`");
} | conditional_block |
Block.js | 'use strict';
/**
* λΈλ λͺ¨λΈ
*
* @class Block
*
* @exception {Error} Messages.CONSTRUCT_ERROR
* μλ λ¬Έμμ 1.3 Models Folderμ νλͺ© μ°Έμ‘°
* @link https://github.com/Gaia3D/F4DConverter/blob/master/doc/F4D_SpecificationV1.pdf
*/
var Block = function()
{
if (!(this instanceof Block))
{
throw new Error(Messages | container which holds the VBO Cache Keys.
* @type {VBOVertexIdxCacheKeysContainer}
*/
this.vBOVertexIdxCacheKeysContainer = new VBOVertexIdxCacheKeysContainer();
/**
* @deprecated
* @type {number}
* @default -1
*/
this.mIFCEntityType = -1;
/**
* small object flag.
* if bbox.getMaxLength() < 0.5, isSmallObj = true
*
* @type {Boolean}
* @default false
*/
this.isSmallObj = false;
/**
* block radius
* μΌλ°μ μΌλ‘ bbox.getMaxLength() / 2.0 λ‘ μ μΈλ¨.
*
* @type {Boolean}
* @default 10
*/
this.radius = 10;
/**
* only for test.delete this.
* @deprecated
*/
this.vertexCount = 0;
/**
* κ°κ°μ μ¬λ¬Όμ€ 볡μ‘ν λͺ¨λΈμ΄ μμ κ²½μ° Legoλ‘ μ²λ¦¬
* νμ¬λ μ¬μ©νμ§ μμΌλ μΆνμ νμν μ μμ΄μ κ·Έλλ‘ λ .
* legoBlock.
* @type {Lego}
*/
this.lego;
};
/**
* block μ΄κΈ°ν. glμμ ν΄λΉ block λ° lego μμ
*
* @param {WebGLRenderingContext} gl
* @param {VboManager} vboMemManager
*/
Block.prototype.deleteObjects = function(gl, vboMemManager)
{
this.vBOVertexIdxCacheKeysContainer.deleteGlObjects(gl, vboMemManager);
this.vBOVertexIdxCacheKeysContainer = undefined;
this.mIFCEntityType = undefined;
this.isSmallObj = undefined;
this.radius = undefined;
// only for test. delete this.
this.vertexCount = undefined;
if (this.lego) { this.lego.deleteGlObjects(gl); }
this.lego = undefined;
};
/**
* renderν μ€λΉκ° λ¬λμ§ μ²΄ν¬
*
* @param {NeoReference} neoReference magoManagerμ objectSelectedμ λΉκ΅ νκΈ° μν neoReference κ°μ²΄
* @param {MagoManager} magoManager
* @param {Number} maxSizeToRender blockμ radiusμ λΉκ΅νκΈ° μν ref number.
* @returns {Boolean} blockμ radiusκ° maxSizeToRenderλ³΄λ€ ν¬κ³ , blockμ radiusκ° magoManagerμ λ³΄λ€ ν¬κ³ , μΉ΄λ©λΌκ° μμ§μ΄κ³ μμ§ μκ³ , magoManagerμ objectSelectedμ neoReferenceκ° κ°μ κ²½μ° true λ°ν
*/
Block.prototype.isReadyToRender = function(neoReference, magoManager, maxSizeToRender)
{
if (maxSizeToRender && (this.radius < maxSizeToRender))
{ return false; }
if (magoManager.isCameraMoving && this.radius < magoManager.smallObjectSize && magoManager.objectSelected !== neoReference)
{ return false; }
return true;
};
| .CONSTRUCT_ERROR);
}
/**
* This class is the | conditional_block |
Block.js | 'use strict';
/**
* λΈλ λͺ¨λΈ
*
* @class Block
*
* @exception {Error} Messages.CONSTRUCT_ERROR
* μλ λ¬Έμμ 1.3 Models Folderμ νλͺ© μ°Έμ‘°
* @link https://github.com/Gaia3D/F4DConverter/blob/master/doc/F4D_SpecificationV1.pdf
*/
var Block = function()
{
if (!(this instanceof Block))
{
throw new Error(Messages.CONSTRUCT_ERROR);
}
/**
* This class is the container which holds the VBO Cache Keys.
* @type {VBOVertexIdxCacheKeysContainer}
*/
this.vBOVertexIdxCacheKeysContainer = new VBOVertexIdxCacheKeysContainer();
/**
* @deprecated
* @type {number}
* @default -1
*/
this.mIFCEntityType = -1;
/**
* small object flag.
* if bbox.getMaxLength() < 0.5, isSmallObj = true
*
* @type {Boolean}
* @default false
*/
this.isSmallObj = false;
/**
* block radius
* μΌλ°μ μΌλ‘ bbox.getMaxLength() / 2.0 λ‘ μ μΈλ¨.
*
* @type {Boolean}
* @default 10
*/
this.radius = 10;
/**
* only for test.delete this.
* @deprecated |
/**
* κ°κ°μ μ¬λ¬Όμ€ 볡μ‘ν λͺ¨λΈμ΄ μμ κ²½μ° Legoλ‘ μ²λ¦¬
* νμ¬λ μ¬μ©νμ§ μμΌλ μΆνμ νμν μ μμ΄μ κ·Έλλ‘ λ .
* legoBlock.
* @type {Lego}
*/
this.lego;
};
/**
* block μ΄κΈ°ν. glμμ ν΄λΉ block λ° lego μμ
*
* @param {WebGLRenderingContext} gl
* @param {VboManager} vboMemManager
*/
Block.prototype.deleteObjects = function(gl, vboMemManager)
{
this.vBOVertexIdxCacheKeysContainer.deleteGlObjects(gl, vboMemManager);
this.vBOVertexIdxCacheKeysContainer = undefined;
this.mIFCEntityType = undefined;
this.isSmallObj = undefined;
this.radius = undefined;
// only for test. delete this.
this.vertexCount = undefined;
if (this.lego) { this.lego.deleteGlObjects(gl); }
this.lego = undefined;
};
/**
* renderν μ€λΉκ° λ¬λμ§ μ²΄ν¬
*
* @param {NeoReference} neoReference magoManagerμ objectSelectedμ λΉκ΅ νκΈ° μν neoReference κ°μ²΄
* @param {MagoManager} magoManager
* @param {Number} maxSizeToRender blockμ radiusμ λΉκ΅νκΈ° μν ref number.
* @returns {Boolean} blockμ radiusκ° maxSizeToRenderλ³΄λ€ ν¬κ³ , blockμ radiusκ° magoManagerμ λ³΄λ€ ν¬κ³ , μΉ΄λ©λΌκ° μμ§μ΄κ³ μμ§ μκ³ , magoManagerμ objectSelectedμ neoReferenceκ° κ°μ κ²½μ° true λ°ν
*/
Block.prototype.isReadyToRender = function(neoReference, magoManager, maxSizeToRender)
{
if (maxSizeToRender && (this.radius < maxSizeToRender))
{ return false; }
if (magoManager.isCameraMoving && this.radius < magoManager.smallObjectSize && magoManager.objectSelected !== neoReference)
{ return false; }
return true;
}; | */
this.vertexCount = 0; | random_line_split |
mvn.py | #!/usr/bin/env python3
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software |
from __future__ import print_function
import argparse
from os import path, environ
from subprocess import check_output, CalledProcessError
from sys import stderr
parser = argparse.ArgumentParser()
parser.add_argument('--repository', help='maven repository id')
parser.add_argument('--url', help='maven repository url')
parser.add_argument('-o')
parser.add_argument('-a', help='action (valid actions are: install,deploy)')
parser.add_argument('-v', help='gerrit version')
parser.add_argument('-s', action='append', help='triplet of artifactId:type:path')
args = parser.parse_args()
if not args.v:
print('version is empty', file=stderr)
exit(1)
root = path.abspath(__file__)
while not path.exists(path.join(root, 'WORKSPACE')):
root = path.dirname(root)
if 'install' == args.a:
cmd = [
'mvn',
'install:install-file',
'-Dversion=%s' % args.v,
]
elif 'deploy' == args.a:
cmd = [
'mvn',
'gpg:sign-and-deploy-file',
'-Dversion=%s' % args.v,
'-DrepositoryId=%s' % args.repository,
'-Durl=%s' % args.url,
]
else:
print("unknown action -a %s" % args.a, file=stderr)
exit(1)
for spec in args.s:
artifact, packaging_type, src = spec.split(':')
exe = cmd + [
'-DpomFile=%s' % path.join(root, 'tools', 'maven',
'%s_pom.xml' % artifact),
'-Dpackaging=%s' % packaging_type,
'-Dfile=%s' % src,
]
try:
if environ.get('VERBOSE'):
print(' '.join(exe), file=stderr)
check_output(exe)
except Exception as e:
print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
file=stderr)
if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
print('Command output\n%s' % e.output, file=stderr)
exit(1)
out = stderr
if args.o:
out = open(args.o, 'w')
with out as fd:
if args.repository:
print('Repository: %s' % args.repository, file=fd)
if args.url:
print('URL: %s' % args.url, file=fd)
print('Version: %s' % args.v, file=fd) | # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. | random_line_split |
mvn.py | #!/usr/bin/env python3
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
from os import path, environ
from subprocess import check_output, CalledProcessError
from sys import stderr
parser = argparse.ArgumentParser()
parser.add_argument('--repository', help='maven repository id')
parser.add_argument('--url', help='maven repository url')
parser.add_argument('-o')
parser.add_argument('-a', help='action (valid actions are: install,deploy)')
parser.add_argument('-v', help='gerrit version')
parser.add_argument('-s', action='append', help='triplet of artifactId:type:path')
args = parser.parse_args()
if not args.v:
print('version is empty', file=stderr)
exit(1)
root = path.abspath(__file__)
while not path.exists(path.join(root, 'WORKSPACE')):
root = path.dirname(root)
if 'install' == args.a:
cmd = [
'mvn',
'install:install-file',
'-Dversion=%s' % args.v,
]
elif 'deploy' == args.a:
cmd = [
'mvn',
'gpg:sign-and-deploy-file',
'-Dversion=%s' % args.v,
'-DrepositoryId=%s' % args.repository,
'-Durl=%s' % args.url,
]
else:
print("unknown action -a %s" % args.a, file=stderr)
exit(1)
for spec in args.s:
artifact, packaging_type, src = spec.split(':')
exe = cmd + [
'-DpomFile=%s' % path.join(root, 'tools', 'maven',
'%s_pom.xml' % artifact),
'-Dpackaging=%s' % packaging_type,
'-Dfile=%s' % src,
]
try:
if environ.get('VERBOSE'):
|
check_output(exe)
except Exception as e:
print('%s command failed: %s\n%s' % (args.a, ' '.join(exe), e),
file=stderr)
if environ.get('VERBOSE') and isinstance(e, CalledProcessError):
print('Command output\n%s' % e.output, file=stderr)
exit(1)
out = stderr
if args.o:
out = open(args.o, 'w')
with out as fd:
if args.repository:
print('Repository: %s' % args.repository, file=fd)
if args.url:
print('URL: %s' % args.url, file=fd)
print('Version: %s' % args.v, file=fd)
| print(' '.join(exe), file=stderr) | conditional_block |
search.ts | import { ContextKeys, GlyphChars } from '../../constants';
import { Container } from '../../container';
import { getContext } from '../../context';
import { GitCommit, GitLog, Repository } from '../../git/models';
import { searchOperators, SearchOperators, SearchPattern } from '../../git/search';
import { ActionQuickPickItem, QuickPickItemOfT } from '../../quickpicks/items/common';
import { pluralize } from '../../system/string';
import { SearchResultsNode } from '../../views/nodes';
import { ViewsWithRepositoryFolders } from '../../views/viewBase';
import { getSteps } from '../gitCommands.utils';
import {
appendReposToTitle,
PartialStepState,
pickCommitStep,
pickRepositoryStep,
QuickCommand,
QuickCommandButtons,
StepGenerator,
StepResult,
StepResultGenerator,
StepSelection,
StepState,
} from '../quickCommand';
interface Context {
repos: Repository[];
associatedView: ViewsWithRepositoryFolders;
commit: GitCommit | undefined;
hasVirtualFolders: boolean;
resultsKey: string | undefined;
resultsPromise: Promise<GitLog | undefined> | undefined;
title: string;
}
interface State extends Required<SearchPattern> {
repo: string | Repository;
showResultsInSideBar: boolean | SearchResultsNode;
}
export interface SearchGitCommandArgs {
readonly command: 'search' | 'grep';
prefillOnly?: boolean;
state?: Partial<State>;
}
const searchOperatorToTitleMap = new Map<SearchOperators, string>([
['', 'Search by Message'],
['=:', 'Search by Message'],
['message:', 'Search by Message'],
['@:', 'Search by Author'],
['author:', 'Search by Author'],
['#:', 'Search by Commit SHA'],
['commit:', 'Search by Commit SHA'],
['?:', 'Search by File'],
['file:', 'Search by File'],
['~:', 'Search by Changes'],
['change:', 'Search by Changes'],
]);
type SearchStepState<T extends State = State> = ExcludeSome<StepState<T>, 'repo', string>;
export class SearchGitCommand extends QuickCommand<State> {
constructor(container: Container, args?: SearchGitCommandArgs) {
super(container, 'search', 'search', 'Commit Search', {
description: 'aka grep, searches for commits',
});
let counter = 0;
if (args?.state?.repo != null) {
counter++;
}
if (args?.state?.pattern != null && !args.prefillOnly) {
counter++;
}
this.initialState = {
counter: counter,
confirm: false,
...args?.state,
};
}
override get canConfirm(): boolean {
return false;
}
override isMatch(key: string) {
return super.isMatch(key) || key === 'grep';
}
override isFuzzyMatch(name: string) {
return super.isFuzzyMatch(name) || name === 'grep';
}
protected async *steps(state: PartialStepState<State>): StepGenerator {
const context: Context = {
repos: this.container.git.openRepositories,
associatedView: this.container.searchAndCompareView,
commit: undefined,
hasVirtualFolders: getContext<boolean>(ContextKeys.HasVirtualFolders, false),
resultsKey: undefined,
resultsPromise: undefined,
title: this.title,
};
const cfg = this.container.config.gitCommands.search;
if (state.matchAll == null) {
state.matchAll = cfg.matchAll;
}
if (state.matchCase == null) {
state.matchCase = cfg.matchCase;
}
if (state.matchRegex == null) |
if (state.showResultsInSideBar == null) {
state.showResultsInSideBar = cfg.showResultsInSideBar ?? undefined;
}
let skippedStepOne = false;
while (this.canStepsContinue(state)) {
context.title = this.title;
if (state.counter < 1 || state.repo == null || typeof state.repo === 'string') {
skippedStepOne = false;
if (context.repos.length === 1) {
skippedStepOne = true;
if (state.repo == null) {
state.counter++;
}
state.repo = context.repos[0];
} else {
const result = yield* pickRepositoryStep(state, context);
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.repo = result;
}
}
if (state.counter < 2 || state.pattern == null) {
const result = yield* this.pickSearchOperatorStep(state as SearchStepState, context);
if (result === StepResult.Break) {
// If we skipped the previous step, make sure we back up past it
if (skippedStepOne) {
state.counter--;
}
state.pattern = undefined;
continue;
}
state.pattern = result;
}
const search: SearchPattern = {
pattern: state.pattern,
matchAll: state.matchAll,
matchCase: state.matchCase,
matchRegex: state.matchRegex,
};
const searchKey = SearchPattern.toKey(search);
if (context.resultsPromise == null || context.resultsKey !== searchKey) {
context.resultsPromise = state.repo.searchForCommits(search);
context.resultsKey = searchKey;
}
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
if (state.showResultsInSideBar) {
void this.container.searchAndCompareView.search(
state.repo.path,
search,
{
label: { label: `for ${state.pattern}` },
},
context.resultsPromise,
state.showResultsInSideBar instanceof SearchResultsNode ? state.showResultsInSideBar : undefined,
);
break;
}
if (state.counter < 3 || context.commit == null) {
const repoPath = state.repo.path;
const result = yield* pickCommitStep(state as SearchStepState, context, {
ignoreFocusOut: true,
log: await context.resultsPromise,
onDidLoadMore: log => (context.resultsPromise = Promise.resolve(log)),
placeholder: (context, log) =>
log == null
? `No results for ${state.pattern}`
: `${pluralize('result', log.count, {
format: c => (log.hasMore ? `${c}+` : undefined),
})} for ${state.pattern}`,
picked: context.commit?.ref,
showInSideBarCommand: new ActionQuickPickItem(
'$(link-external) Show Results in Side Bar',
() =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
),
showInSideBarButton: {
button: QuickCommandButtons.ShowResultsInSideBar,
onDidClick: () =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
},
});
if (result === StepResult.Break) {
state.counter--;
continue;
}
context.commit = result;
}
const result = yield* getSteps(
this.container,
{
command: 'show',
state: {
repo: state.repo,
reference: context.commit,
},
},
this.pickedVia,
);
state.counter--;
if (result === StepResult.Break) {
QuickCommand.endSteps(state);
}
}
return state.counter < 0 ? StepResult.Break : undefined;
}
private *pickSearchOperatorStep(state: SearchStepState, context: Context): StepResultGenerator<string> {
const items: QuickPickItemOfT<SearchOperators>[] = [
{
label: searchOperatorToTitleMap.get('')!,
description: `pattern or message: pattern or =: pattern ${GlyphChars.Dash} use quotes to search for phrases`,
item: 'message:' as const,
},
{
label: searchOperatorToTitleMap.get('author:')!,
description: 'author: pattern or @: pattern',
item: 'author:' as const,
},
{
label: searchOperatorToTitleMap.get('commit:')!,
description: 'commit: sha or #: sha',
item: 'commit:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('file:')!,
description: 'file: glob or ?: glob',
item: 'file:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('change:')!,
description: 'change: pattern or ~: pattern',
item: 'change:' as const,
},
].filter(<T>(i?: T): i is T => i != null);
const matchCaseButton = new QuickCommandButtons.MatchCaseToggle(state.matchCase);
const matchAllButton = new QuickCommandButtons.MatchAllToggle(state.matchAll);
const matchRegexButton = new QuickCommandButtons.MatchRegexToggle(state.matchRegex);
const step = QuickCommand.createPickStep<QuickPickItemOfT<SearchOperators>>({
title: appendReposToTitle(context.title, state, context),
placeholder: 'e.g. "Updates dependencies" author:eamodio',
matchOnDescription: true,
matchOnDetail: true,
additionalButtons: [matchCaseButton, matchAllButton, matchRegexButton],
items: items,
value: state.pattern,
onDidAccept: (quickpick): boolean => {
const pick = quickpick.selectedItems[0];
if (!searchOperators.has(pick.item)) return true;
const value = quickpick.value.trim();
if (value.length === 0 || searchOperators.has(value)) {
quickpick.value = pick.item;
} else {
quickpick.value = `${value} ${pick.item}`;
}
void step.onDidChangeValue!(quickpick);
return false;
},
onDidClickButton: (quickpick, button) => {
if (button === matchCaseButton) {
state.matchCase = !state.matchCase;
matchCaseButton.on = state.matchCase;
} else if (button === matchAllButton) {
state.matchAll = !state.matchAll;
matchAllButton.on = state.matchAll;
} else if (button === matchRegexButton) {
state.matchRegex = !state.matchRegex;
matchRegexButton.on = state.matchRegex;
}
},
onDidChangeValue: (quickpick): boolean => {
const value = quickpick.value.trim();
// Simulate an extra step if we have a value
state.counter = value ? 3 : 2;
const operations = SearchPattern.parseSearchOperations(value);
quickpick.title = appendReposToTitle(
operations.size === 0 || operations.size > 1
? context.title
: `Commit ${searchOperatorToTitleMap.get(operations.keys().next().value)!}`,
state,
context,
);
if (quickpick.value.length === 0) {
quickpick.items = items;
} else {
// If something was typed/selected, keep the quick pick open on focus loss
quickpick.ignoreFocusOut = true;
step.ignoreFocusOut = true;
quickpick.items = [
{
label: 'Search for',
description: quickpick.value,
item: quickpick.value as SearchOperators,
},
];
}
return true;
},
});
const selection: StepSelection<typeof step> = yield step;
if (!QuickCommand.canPickStepContinue(step, state, selection)) {
// Since we simulated a step above, we need to remove it here
state.counter--;
return StepResult.Break;
}
// Since we simulated a step above, we need to remove it here
state.counter--;
return selection[0].item.trim();
}
}
| {
state.matchRegex = cfg.matchRegex;
} | conditional_block |
search.ts | import { ContextKeys, GlyphChars } from '../../constants';
import { Container } from '../../container';
import { getContext } from '../../context';
import { GitCommit, GitLog, Repository } from '../../git/models';
import { searchOperators, SearchOperators, SearchPattern } from '../../git/search';
import { ActionQuickPickItem, QuickPickItemOfT } from '../../quickpicks/items/common';
import { pluralize } from '../../system/string';
import { SearchResultsNode } from '../../views/nodes';
import { ViewsWithRepositoryFolders } from '../../views/viewBase';
import { getSteps } from '../gitCommands.utils';
import {
appendReposToTitle,
PartialStepState,
pickCommitStep,
pickRepositoryStep,
QuickCommand,
QuickCommandButtons,
StepGenerator,
StepResult,
StepResultGenerator,
StepSelection,
StepState,
} from '../quickCommand';
interface Context {
repos: Repository[];
associatedView: ViewsWithRepositoryFolders;
commit: GitCommit | undefined;
hasVirtualFolders: boolean;
resultsKey: string | undefined;
resultsPromise: Promise<GitLog | undefined> | undefined;
title: string;
}
interface State extends Required<SearchPattern> {
repo: string | Repository;
showResultsInSideBar: boolean | SearchResultsNode;
}
export interface SearchGitCommandArgs {
readonly command: 'search' | 'grep';
prefillOnly?: boolean;
state?: Partial<State>;
}
const searchOperatorToTitleMap = new Map<SearchOperators, string>([
['', 'Search by Message'],
['=:', 'Search by Message'],
['message:', 'Search by Message'],
['@:', 'Search by Author'],
['author:', 'Search by Author'],
['#:', 'Search by Commit SHA'],
['commit:', 'Search by Commit SHA'],
['?:', 'Search by File'],
['file:', 'Search by File'],
['~:', 'Search by Changes'],
['change:', 'Search by Changes'],
]);
type SearchStepState<T extends State = State> = ExcludeSome<StepState<T>, 'repo', string>;
export class SearchGitCommand extends QuickCommand<State> {
constructor(container: Container, args?: SearchGitCommandArgs) {
super(container, 'search', 'search', 'Commit Search', {
description: 'aka grep, searches for commits',
});
let counter = 0;
if (args?.state?.repo != null) {
counter++;
}
if (args?.state?.pattern != null && !args.prefillOnly) {
counter++;
}
this.initialState = {
counter: counter,
confirm: false,
...args?.state,
};
}
override get canConfirm(): boolean {
return false;
}
override isMatch(key: string) {
return super.isMatch(key) || key === 'grep';
}
override isFuzzyMatch(name: string) {
return super.isFuzzyMatch(name) || name === 'grep';
}
protected async *steps(state: PartialStepState<State>): StepGenerator {
const context: Context = {
repos: this.container.git.openRepositories,
associatedView: this.container.searchAndCompareView,
commit: undefined,
hasVirtualFolders: getContext<boolean>(ContextKeys.HasVirtualFolders, false),
resultsKey: undefined,
resultsPromise: undefined,
title: this.title,
};
const cfg = this.container.config.gitCommands.search;
if (state.matchAll == null) {
state.matchAll = cfg.matchAll;
}
if (state.matchCase == null) {
state.matchCase = cfg.matchCase;
}
if (state.matchRegex == null) {
state.matchRegex = cfg.matchRegex;
}
if (state.showResultsInSideBar == null) {
state.showResultsInSideBar = cfg.showResultsInSideBar ?? undefined;
}
let skippedStepOne = false;
while (this.canStepsContinue(state)) {
context.title = this.title;
if (state.counter < 1 || state.repo == null || typeof state.repo === 'string') {
skippedStepOne = false;
if (context.repos.length === 1) {
skippedStepOne = true;
if (state.repo == null) {
state.counter++;
}
state.repo = context.repos[0];
} else {
const result = yield* pickRepositoryStep(state, context);
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.repo = result;
}
}
if (state.counter < 2 || state.pattern == null) {
const result = yield* this.pickSearchOperatorStep(state as SearchStepState, context);
if (result === StepResult.Break) {
// If we skipped the previous step, make sure we back up past it
if (skippedStepOne) {
state.counter--;
}
state.pattern = undefined; |
continue;
}
state.pattern = result;
}
const search: SearchPattern = {
pattern: state.pattern,
matchAll: state.matchAll,
matchCase: state.matchCase,
matchRegex: state.matchRegex,
};
const searchKey = SearchPattern.toKey(search);
if (context.resultsPromise == null || context.resultsKey !== searchKey) {
context.resultsPromise = state.repo.searchForCommits(search);
context.resultsKey = searchKey;
}
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
if (state.showResultsInSideBar) {
void this.container.searchAndCompareView.search(
state.repo.path,
search,
{
label: { label: `for ${state.pattern}` },
},
context.resultsPromise,
state.showResultsInSideBar instanceof SearchResultsNode ? state.showResultsInSideBar : undefined,
);
break;
}
if (state.counter < 3 || context.commit == null) {
const repoPath = state.repo.path;
const result = yield* pickCommitStep(state as SearchStepState, context, {
ignoreFocusOut: true,
log: await context.resultsPromise,
onDidLoadMore: log => (context.resultsPromise = Promise.resolve(log)),
placeholder: (context, log) =>
log == null
? `No results for ${state.pattern}`
: `${pluralize('result', log.count, {
format: c => (log.hasMore ? `${c}+` : undefined),
})} for ${state.pattern}`,
picked: context.commit?.ref,
showInSideBarCommand: new ActionQuickPickItem(
'$(link-external) Show Results in Side Bar',
() =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
),
showInSideBarButton: {
button: QuickCommandButtons.ShowResultsInSideBar,
onDidClick: () =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
},
});
if (result === StepResult.Break) {
state.counter--;
continue;
}
context.commit = result;
}
const result = yield* getSteps(
this.container,
{
command: 'show',
state: {
repo: state.repo,
reference: context.commit,
},
},
this.pickedVia,
);
state.counter--;
if (result === StepResult.Break) {
QuickCommand.endSteps(state);
}
}
return state.counter < 0 ? StepResult.Break : undefined;
}
private *pickSearchOperatorStep(state: SearchStepState, context: Context): StepResultGenerator<string> {
const items: QuickPickItemOfT<SearchOperators>[] = [
{
label: searchOperatorToTitleMap.get('')!,
description: `pattern or message: pattern or =: pattern ${GlyphChars.Dash} use quotes to search for phrases`,
item: 'message:' as const,
},
{
label: searchOperatorToTitleMap.get('author:')!,
description: 'author: pattern or @: pattern',
item: 'author:' as const,
},
{
label: searchOperatorToTitleMap.get('commit:')!,
description: 'commit: sha or #: sha',
item: 'commit:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('file:')!,
description: 'file: glob or ?: glob',
item: 'file:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('change:')!,
description: 'change: pattern or ~: pattern',
item: 'change:' as const,
},
].filter(<T>(i?: T): i is T => i != null);
const matchCaseButton = new QuickCommandButtons.MatchCaseToggle(state.matchCase);
const matchAllButton = new QuickCommandButtons.MatchAllToggle(state.matchAll);
const matchRegexButton = new QuickCommandButtons.MatchRegexToggle(state.matchRegex);
const step = QuickCommand.createPickStep<QuickPickItemOfT<SearchOperators>>({
title: appendReposToTitle(context.title, state, context),
placeholder: 'e.g. "Updates dependencies" author:eamodio',
matchOnDescription: true,
matchOnDetail: true,
additionalButtons: [matchCaseButton, matchAllButton, matchRegexButton],
items: items,
value: state.pattern,
onDidAccept: (quickpick): boolean => {
const pick = quickpick.selectedItems[0];
if (!searchOperators.has(pick.item)) return true;
const value = quickpick.value.trim();
if (value.length === 0 || searchOperators.has(value)) {
quickpick.value = pick.item;
} else {
quickpick.value = `${value} ${pick.item}`;
}
void step.onDidChangeValue!(quickpick);
return false;
},
onDidClickButton: (quickpick, button) => {
if (button === matchCaseButton) {
state.matchCase = !state.matchCase;
matchCaseButton.on = state.matchCase;
} else if (button === matchAllButton) {
state.matchAll = !state.matchAll;
matchAllButton.on = state.matchAll;
} else if (button === matchRegexButton) {
state.matchRegex = !state.matchRegex;
matchRegexButton.on = state.matchRegex;
}
},
onDidChangeValue: (quickpick): boolean => {
const value = quickpick.value.trim();
// Simulate an extra step if we have a value
state.counter = value ? 3 : 2;
const operations = SearchPattern.parseSearchOperations(value);
quickpick.title = appendReposToTitle(
operations.size === 0 || operations.size > 1
? context.title
: `Commit ${searchOperatorToTitleMap.get(operations.keys().next().value)!}`,
state,
context,
);
if (quickpick.value.length === 0) {
quickpick.items = items;
} else {
// If something was typed/selected, keep the quick pick open on focus loss
quickpick.ignoreFocusOut = true;
step.ignoreFocusOut = true;
quickpick.items = [
{
label: 'Search for',
description: quickpick.value,
item: quickpick.value as SearchOperators,
},
];
}
return true;
},
});
const selection: StepSelection<typeof step> = yield step;
if (!QuickCommand.canPickStepContinue(step, state, selection)) {
// Since we simulated a step above, we need to remove it here
state.counter--;
return StepResult.Break;
}
// Since we simulated a step above, we need to remove it here
state.counter--;
return selection[0].item.trim();
}
} | random_line_split |
|
search.ts | import { ContextKeys, GlyphChars } from '../../constants';
import { Container } from '../../container';
import { getContext } from '../../context';
import { GitCommit, GitLog, Repository } from '../../git/models';
import { searchOperators, SearchOperators, SearchPattern } from '../../git/search';
import { ActionQuickPickItem, QuickPickItemOfT } from '../../quickpicks/items/common';
import { pluralize } from '../../system/string';
import { SearchResultsNode } from '../../views/nodes';
import { ViewsWithRepositoryFolders } from '../../views/viewBase';
import { getSteps } from '../gitCommands.utils';
import {
appendReposToTitle,
PartialStepState,
pickCommitStep,
pickRepositoryStep,
QuickCommand,
QuickCommandButtons,
StepGenerator,
StepResult,
StepResultGenerator,
StepSelection,
StepState,
} from '../quickCommand';
interface Context {
repos: Repository[];
associatedView: ViewsWithRepositoryFolders;
commit: GitCommit | undefined;
hasVirtualFolders: boolean;
resultsKey: string | undefined;
resultsPromise: Promise<GitLog | undefined> | undefined;
title: string;
}
interface State extends Required<SearchPattern> {
repo: string | Repository;
showResultsInSideBar: boolean | SearchResultsNode;
}
export interface SearchGitCommandArgs {
readonly command: 'search' | 'grep';
prefillOnly?: boolean;
state?: Partial<State>;
}
const searchOperatorToTitleMap = new Map<SearchOperators, string>([
['', 'Search by Message'],
['=:', 'Search by Message'],
['message:', 'Search by Message'],
['@:', 'Search by Author'],
['author:', 'Search by Author'],
['#:', 'Search by Commit SHA'],
['commit:', 'Search by Commit SHA'],
['?:', 'Search by File'],
['file:', 'Search by File'],
['~:', 'Search by Changes'],
['change:', 'Search by Changes'],
]);
type SearchStepState<T extends State = State> = ExcludeSome<StepState<T>, 'repo', string>;
export class SearchGitCommand extends QuickCommand<State> {
constructor(container: Container, args?: SearchGitCommandArgs) {
super(container, 'search', 'search', 'Commit Search', {
description: 'aka grep, searches for commits',
});
let counter = 0;
if (args?.state?.repo != null) {
counter++;
}
if (args?.state?.pattern != null && !args.prefillOnly) {
counter++;
}
this.initialState = {
counter: counter,
confirm: false,
...args?.state,
};
}
override get canConfirm(): boolean |
override isMatch(key: string) {
return super.isMatch(key) || key === 'grep';
}
override isFuzzyMatch(name: string) {
return super.isFuzzyMatch(name) || name === 'grep';
}
protected async *steps(state: PartialStepState<State>): StepGenerator {
const context: Context = {
repos: this.container.git.openRepositories,
associatedView: this.container.searchAndCompareView,
commit: undefined,
hasVirtualFolders: getContext<boolean>(ContextKeys.HasVirtualFolders, false),
resultsKey: undefined,
resultsPromise: undefined,
title: this.title,
};
const cfg = this.container.config.gitCommands.search;
if (state.matchAll == null) {
state.matchAll = cfg.matchAll;
}
if (state.matchCase == null) {
state.matchCase = cfg.matchCase;
}
if (state.matchRegex == null) {
state.matchRegex = cfg.matchRegex;
}
if (state.showResultsInSideBar == null) {
state.showResultsInSideBar = cfg.showResultsInSideBar ?? undefined;
}
let skippedStepOne = false;
while (this.canStepsContinue(state)) {
context.title = this.title;
if (state.counter < 1 || state.repo == null || typeof state.repo === 'string') {
skippedStepOne = false;
if (context.repos.length === 1) {
skippedStepOne = true;
if (state.repo == null) {
state.counter++;
}
state.repo = context.repos[0];
} else {
const result = yield* pickRepositoryStep(state, context);
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.repo = result;
}
}
if (state.counter < 2 || state.pattern == null) {
const result = yield* this.pickSearchOperatorStep(state as SearchStepState, context);
if (result === StepResult.Break) {
// If we skipped the previous step, make sure we back up past it
if (skippedStepOne) {
state.counter--;
}
state.pattern = undefined;
continue;
}
state.pattern = result;
}
const search: SearchPattern = {
pattern: state.pattern,
matchAll: state.matchAll,
matchCase: state.matchCase,
matchRegex: state.matchRegex,
};
const searchKey = SearchPattern.toKey(search);
if (context.resultsPromise == null || context.resultsKey !== searchKey) {
context.resultsPromise = state.repo.searchForCommits(search);
context.resultsKey = searchKey;
}
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
if (state.showResultsInSideBar) {
void this.container.searchAndCompareView.search(
state.repo.path,
search,
{
label: { label: `for ${state.pattern}` },
},
context.resultsPromise,
state.showResultsInSideBar instanceof SearchResultsNode ? state.showResultsInSideBar : undefined,
);
break;
}
if (state.counter < 3 || context.commit == null) {
const repoPath = state.repo.path;
const result = yield* pickCommitStep(state as SearchStepState, context, {
ignoreFocusOut: true,
log: await context.resultsPromise,
onDidLoadMore: log => (context.resultsPromise = Promise.resolve(log)),
placeholder: (context, log) =>
log == null
? `No results for ${state.pattern}`
: `${pluralize('result', log.count, {
format: c => (log.hasMore ? `${c}+` : undefined),
})} for ${state.pattern}`,
picked: context.commit?.ref,
showInSideBarCommand: new ActionQuickPickItem(
'$(link-external) Show Results in Side Bar',
() =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
),
showInSideBarButton: {
button: QuickCommandButtons.ShowResultsInSideBar,
onDidClick: () =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
},
});
if (result === StepResult.Break) {
state.counter--;
continue;
}
context.commit = result;
}
const result = yield* getSteps(
this.container,
{
command: 'show',
state: {
repo: state.repo,
reference: context.commit,
},
},
this.pickedVia,
);
state.counter--;
if (result === StepResult.Break) {
QuickCommand.endSteps(state);
}
}
return state.counter < 0 ? StepResult.Break : undefined;
}
private *pickSearchOperatorStep(state: SearchStepState, context: Context): StepResultGenerator<string> {
const items: QuickPickItemOfT<SearchOperators>[] = [
{
label: searchOperatorToTitleMap.get('')!,
description: `pattern or message: pattern or =: pattern ${GlyphChars.Dash} use quotes to search for phrases`,
item: 'message:' as const,
},
{
label: searchOperatorToTitleMap.get('author:')!,
description: 'author: pattern or @: pattern',
item: 'author:' as const,
},
{
label: searchOperatorToTitleMap.get('commit:')!,
description: 'commit: sha or #: sha',
item: 'commit:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('file:')!,
description: 'file: glob or ?: glob',
item: 'file:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('change:')!,
description: 'change: pattern or ~: pattern',
item: 'change:' as const,
},
].filter(<T>(i?: T): i is T => i != null);
const matchCaseButton = new QuickCommandButtons.MatchCaseToggle(state.matchCase);
const matchAllButton = new QuickCommandButtons.MatchAllToggle(state.matchAll);
const matchRegexButton = new QuickCommandButtons.MatchRegexToggle(state.matchRegex);
const step = QuickCommand.createPickStep<QuickPickItemOfT<SearchOperators>>({
title: appendReposToTitle(context.title, state, context),
placeholder: 'e.g. "Updates dependencies" author:eamodio',
matchOnDescription: true,
matchOnDetail: true,
additionalButtons: [matchCaseButton, matchAllButton, matchRegexButton],
items: items,
value: state.pattern,
onDidAccept: (quickpick): boolean => {
const pick = quickpick.selectedItems[0];
if (!searchOperators.has(pick.item)) return true;
const value = quickpick.value.trim();
if (value.length === 0 || searchOperators.has(value)) {
quickpick.value = pick.item;
} else {
quickpick.value = `${value} ${pick.item}`;
}
void step.onDidChangeValue!(quickpick);
return false;
},
onDidClickButton: (quickpick, button) => {
if (button === matchCaseButton) {
state.matchCase = !state.matchCase;
matchCaseButton.on = state.matchCase;
} else if (button === matchAllButton) {
state.matchAll = !state.matchAll;
matchAllButton.on = state.matchAll;
} else if (button === matchRegexButton) {
state.matchRegex = !state.matchRegex;
matchRegexButton.on = state.matchRegex;
}
},
onDidChangeValue: (quickpick): boolean => {
const value = quickpick.value.trim();
// Simulate an extra step if we have a value
state.counter = value ? 3 : 2;
const operations = SearchPattern.parseSearchOperations(value);
quickpick.title = appendReposToTitle(
operations.size === 0 || operations.size > 1
? context.title
: `Commit ${searchOperatorToTitleMap.get(operations.keys().next().value)!}`,
state,
context,
);
if (quickpick.value.length === 0) {
quickpick.items = items;
} else {
// If something was typed/selected, keep the quick pick open on focus loss
quickpick.ignoreFocusOut = true;
step.ignoreFocusOut = true;
quickpick.items = [
{
label: 'Search for',
description: quickpick.value,
item: quickpick.value as SearchOperators,
},
];
}
return true;
},
});
const selection: StepSelection<typeof step> = yield step;
if (!QuickCommand.canPickStepContinue(step, state, selection)) {
// Since we simulated a step above, we need to remove it here
state.counter--;
return StepResult.Break;
}
// Since we simulated a step above, we need to remove it here
state.counter--;
return selection[0].item.trim();
}
}
| {
return false;
} | identifier_body |
search.ts | import { ContextKeys, GlyphChars } from '../../constants';
import { Container } from '../../container';
import { getContext } from '../../context';
import { GitCommit, GitLog, Repository } from '../../git/models';
import { searchOperators, SearchOperators, SearchPattern } from '../../git/search';
import { ActionQuickPickItem, QuickPickItemOfT } from '../../quickpicks/items/common';
import { pluralize } from '../../system/string';
import { SearchResultsNode } from '../../views/nodes';
import { ViewsWithRepositoryFolders } from '../../views/viewBase';
import { getSteps } from '../gitCommands.utils';
import {
appendReposToTitle,
PartialStepState,
pickCommitStep,
pickRepositoryStep,
QuickCommand,
QuickCommandButtons,
StepGenerator,
StepResult,
StepResultGenerator,
StepSelection,
StepState,
} from '../quickCommand';
interface Context {
repos: Repository[];
associatedView: ViewsWithRepositoryFolders;
commit: GitCommit | undefined;
hasVirtualFolders: boolean;
resultsKey: string | undefined;
resultsPromise: Promise<GitLog | undefined> | undefined;
title: string;
}
interface State extends Required<SearchPattern> {
repo: string | Repository;
showResultsInSideBar: boolean | SearchResultsNode;
}
export interface SearchGitCommandArgs {
readonly command: 'search' | 'grep';
prefillOnly?: boolean;
state?: Partial<State>;
}
const searchOperatorToTitleMap = new Map<SearchOperators, string>([
['', 'Search by Message'],
['=:', 'Search by Message'],
['message:', 'Search by Message'],
['@:', 'Search by Author'],
['author:', 'Search by Author'],
['#:', 'Search by Commit SHA'],
['commit:', 'Search by Commit SHA'],
['?:', 'Search by File'],
['file:', 'Search by File'],
['~:', 'Search by Changes'],
['change:', 'Search by Changes'],
]);
type SearchStepState<T extends State = State> = ExcludeSome<StepState<T>, 'repo', string>;
export class | extends QuickCommand<State> {
constructor(container: Container, args?: SearchGitCommandArgs) {
super(container, 'search', 'search', 'Commit Search', {
description: 'aka grep, searches for commits',
});
let counter = 0;
if (args?.state?.repo != null) {
counter++;
}
if (args?.state?.pattern != null && !args.prefillOnly) {
counter++;
}
this.initialState = {
counter: counter,
confirm: false,
...args?.state,
};
}
override get canConfirm(): boolean {
return false;
}
override isMatch(key: string) {
return super.isMatch(key) || key === 'grep';
}
override isFuzzyMatch(name: string) {
return super.isFuzzyMatch(name) || name === 'grep';
}
protected async *steps(state: PartialStepState<State>): StepGenerator {
const context: Context = {
repos: this.container.git.openRepositories,
associatedView: this.container.searchAndCompareView,
commit: undefined,
hasVirtualFolders: getContext<boolean>(ContextKeys.HasVirtualFolders, false),
resultsKey: undefined,
resultsPromise: undefined,
title: this.title,
};
const cfg = this.container.config.gitCommands.search;
if (state.matchAll == null) {
state.matchAll = cfg.matchAll;
}
if (state.matchCase == null) {
state.matchCase = cfg.matchCase;
}
if (state.matchRegex == null) {
state.matchRegex = cfg.matchRegex;
}
if (state.showResultsInSideBar == null) {
state.showResultsInSideBar = cfg.showResultsInSideBar ?? undefined;
}
let skippedStepOne = false;
while (this.canStepsContinue(state)) {
context.title = this.title;
if (state.counter < 1 || state.repo == null || typeof state.repo === 'string') {
skippedStepOne = false;
if (context.repos.length === 1) {
skippedStepOne = true;
if (state.repo == null) {
state.counter++;
}
state.repo = context.repos[0];
} else {
const result = yield* pickRepositoryStep(state, context);
// Always break on the first step (so we will go back)
if (result === StepResult.Break) break;
state.repo = result;
}
}
if (state.counter < 2 || state.pattern == null) {
const result = yield* this.pickSearchOperatorStep(state as SearchStepState, context);
if (result === StepResult.Break) {
// If we skipped the previous step, make sure we back up past it
if (skippedStepOne) {
state.counter--;
}
state.pattern = undefined;
continue;
}
state.pattern = result;
}
const search: SearchPattern = {
pattern: state.pattern,
matchAll: state.matchAll,
matchCase: state.matchCase,
matchRegex: state.matchRegex,
};
const searchKey = SearchPattern.toKey(search);
if (context.resultsPromise == null || context.resultsKey !== searchKey) {
context.resultsPromise = state.repo.searchForCommits(search);
context.resultsKey = searchKey;
}
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
if (state.showResultsInSideBar) {
void this.container.searchAndCompareView.search(
state.repo.path,
search,
{
label: { label: `for ${state.pattern}` },
},
context.resultsPromise,
state.showResultsInSideBar instanceof SearchResultsNode ? state.showResultsInSideBar : undefined,
);
break;
}
if (state.counter < 3 || context.commit == null) {
const repoPath = state.repo.path;
const result = yield* pickCommitStep(state as SearchStepState, context, {
ignoreFocusOut: true,
log: await context.resultsPromise,
onDidLoadMore: log => (context.resultsPromise = Promise.resolve(log)),
placeholder: (context, log) =>
log == null
? `No results for ${state.pattern}`
: `${pluralize('result', log.count, {
format: c => (log.hasMore ? `${c}+` : undefined),
})} for ${state.pattern}`,
picked: context.commit?.ref,
showInSideBarCommand: new ActionQuickPickItem(
'$(link-external) Show Results in Side Bar',
() =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
),
showInSideBarButton: {
button: QuickCommandButtons.ShowResultsInSideBar,
onDidClick: () =>
void this.container.searchAndCompareView.search(
repoPath,
search,
{
label: { label: `for ${state.pattern}` },
reveal: {
select: true,
focus: false,
expand: true,
},
},
context.resultsPromise,
),
},
});
if (result === StepResult.Break) {
state.counter--;
continue;
}
context.commit = result;
}
const result = yield* getSteps(
this.container,
{
command: 'show',
state: {
repo: state.repo,
reference: context.commit,
},
},
this.pickedVia,
);
state.counter--;
if (result === StepResult.Break) {
QuickCommand.endSteps(state);
}
}
return state.counter < 0 ? StepResult.Break : undefined;
}
private *pickSearchOperatorStep(state: SearchStepState, context: Context): StepResultGenerator<string> {
const items: QuickPickItemOfT<SearchOperators>[] = [
{
label: searchOperatorToTitleMap.get('')!,
description: `pattern or message: pattern or =: pattern ${GlyphChars.Dash} use quotes to search for phrases`,
item: 'message:' as const,
},
{
label: searchOperatorToTitleMap.get('author:')!,
description: 'author: pattern or @: pattern',
item: 'author:' as const,
},
{
label: searchOperatorToTitleMap.get('commit:')!,
description: 'commit: sha or #: sha',
item: 'commit:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('file:')!,
description: 'file: glob or ?: glob',
item: 'file:' as const,
},
context.hasVirtualFolders
? undefined
: {
label: searchOperatorToTitleMap.get('change:')!,
description: 'change: pattern or ~: pattern',
item: 'change:' as const,
},
].filter(<T>(i?: T): i is T => i != null);
const matchCaseButton = new QuickCommandButtons.MatchCaseToggle(state.matchCase);
const matchAllButton = new QuickCommandButtons.MatchAllToggle(state.matchAll);
const matchRegexButton = new QuickCommandButtons.MatchRegexToggle(state.matchRegex);
const step = QuickCommand.createPickStep<QuickPickItemOfT<SearchOperators>>({
title: appendReposToTitle(context.title, state, context),
placeholder: 'e.g. "Updates dependencies" author:eamodio',
matchOnDescription: true,
matchOnDetail: true,
additionalButtons: [matchCaseButton, matchAllButton, matchRegexButton],
items: items,
value: state.pattern,
onDidAccept: (quickpick): boolean => {
const pick = quickpick.selectedItems[0];
if (!searchOperators.has(pick.item)) return true;
const value = quickpick.value.trim();
if (value.length === 0 || searchOperators.has(value)) {
quickpick.value = pick.item;
} else {
quickpick.value = `${value} ${pick.item}`;
}
void step.onDidChangeValue!(quickpick);
return false;
},
onDidClickButton: (quickpick, button) => {
if (button === matchCaseButton) {
state.matchCase = !state.matchCase;
matchCaseButton.on = state.matchCase;
} else if (button === matchAllButton) {
state.matchAll = !state.matchAll;
matchAllButton.on = state.matchAll;
} else if (button === matchRegexButton) {
state.matchRegex = !state.matchRegex;
matchRegexButton.on = state.matchRegex;
}
},
onDidChangeValue: (quickpick): boolean => {
const value = quickpick.value.trim();
// Simulate an extra step if we have a value
state.counter = value ? 3 : 2;
const operations = SearchPattern.parseSearchOperations(value);
quickpick.title = appendReposToTitle(
operations.size === 0 || operations.size > 1
? context.title
: `Commit ${searchOperatorToTitleMap.get(operations.keys().next().value)!}`,
state,
context,
);
if (quickpick.value.length === 0) {
quickpick.items = items;
} else {
// If something was typed/selected, keep the quick pick open on focus loss
quickpick.ignoreFocusOut = true;
step.ignoreFocusOut = true;
quickpick.items = [
{
label: 'Search for',
description: quickpick.value,
item: quickpick.value as SearchOperators,
},
];
}
return true;
},
});
const selection: StepSelection<typeof step> = yield step;
if (!QuickCommand.canPickStepContinue(step, state, selection)) {
// Since we simulated a step above, we need to remove it here
state.counter--;
return StepResult.Break;
}
// Since we simulated a step above, we need to remove it here
state.counter--;
return selection[0].item.trim();
}
}
| SearchGitCommand | identifier_name |
lib.rs | #![crate_name = "librespot"]
#![cfg_attr(feature = "cargo-clippy", allow(unused_io_amount))]
#[macro_use] extern crate error_chain;
#[macro_use] extern crate futures;
#[macro_use] extern crate lazy_static;
#[macro_use] extern crate log;
#[macro_use] extern crate serde_json;
#[macro_use] extern crate serde_derive;
extern crate base64;
extern crate bit_set;
extern crate byteorder;
extern crate crypto;
extern crate getopts;
extern crate hyper;
extern crate linear_map;
extern crate mdns;
extern crate num_bigint;
extern crate num_integer;
extern crate num_traits;
extern crate protobuf;
extern crate rand;
extern crate rpassword;
extern crate serde;
extern crate shannon;
extern crate tempfile;
extern crate tokio_core;
extern crate tokio_proto;
extern crate url;
pub extern crate librespot_protocol as protocol;
#[cfg(not(feature = "with-tremor"))]
extern crate vorbis;
#[cfg(feature = "with-tremor")] | extern crate tremor as vorbis;
#[cfg(feature = "alsa-backend")]
extern crate alsa;
#[cfg(feature = "portaudio")]
extern crate portaudio;
#[cfg(feature = "libpulse-sys")]
extern crate libpulse_sys;
#[macro_use] mod component;
pub mod album_cover;
pub mod apresolve;
pub mod audio_backend;
pub mod audio_decrypt;
pub mod audio_file;
pub mod audio_key;
pub mod authentication;
pub mod cache;
pub mod channel;
pub mod diffie_hellman;
pub mod mercury;
pub mod metadata;
pub mod player;
pub mod session;
pub mod util;
pub mod version;
pub mod mixer;
include!(concat!(env!("OUT_DIR"), "/lib.rs")); | random_line_split |
|
keras_vectorizer.py | import sys
import numpy as np
from normalization import tokenize
from helpers import ahash
class KerasVectorizer():
'''
Convert list of documents to numpy array for input into Keras model | def __init__(self, n_features=100000, maxlen=None, maxper=100, hash_function=ahash):
self.maxlen = maxlen
self.maxper = maxper
self.n_features = n_features
self.hash_function = hash_function
def _exact_hash(self, word, n_features):
return self.token_lookup.get(word, 0)
def fit_transform(self, raw_documents, y=None, suffix='', verbose=True):
if verbose:
print >> sys.stderr, 'splitting raw documents'
# Some way to print progress?
tokens = map(self._split_function, raw_documents)
if self.maxlen:
maxlen = self.maxlen
else:
maxlen = int(np.percentile(map(len, tokens), self.maxper))
self.maxlen = maxlen
X = np.zeros((len(tokens), maxlen))
for i,t in enumerate(tokens):
if verbose:
if not i % 10000:
print >> sys.stderr, 'processed %d tokens' % i
if len(t) > 0:
X[i,-len(t):] = map(lambda x: self.hash_function(x + suffix, self.n_features), t[:maxlen])
return X
class KerasCharacterVectorizer(KerasVectorizer):
'''
Split a string into characters
'''
def _split_function(self, doc):
return list(doc)
class KerasTokenVectorizer(KerasVectorizer):
'''
Split a string into words,
'''
def _split_function(self, doc):
return tokenize(doc, keep_punctuation=True)
class KerasPretokenizedVectorizer(KerasVectorizer):
def _split_function(self, doc):
return doc
'''
from keras_vectorizer import KerasTokenVectorizer, KerasCharacterVectorizer
ktv = KerasTokenVectorizer()
ktv.fit_transform(['this is a test'])
ktv.fit_transform(['this is a test', 'this is a another test'])
ktv = KerasTokenVectorizer(maxlen=2)
ktv.fit_transform(['this is a test', 'this is a another test'])
kcv = KerasCharacterVectorizer()
kcv.fit_transform(['something', 'else'])
''' | ''' | random_line_split |
keras_vectorizer.py | import sys
import numpy as np
from normalization import tokenize
from helpers import ahash
class KerasVectorizer():
'''
Convert list of documents to numpy array for input into Keras model
'''
def __init__(self, n_features=100000, maxlen=None, maxper=100, hash_function=ahash):
self.maxlen = maxlen
self.maxper = maxper
self.n_features = n_features
self.hash_function = hash_function
def _exact_hash(self, word, n_features):
return self.token_lookup.get(word, 0)
def fit_transform(self, raw_documents, y=None, suffix='', verbose=True):
if verbose:
print >> sys.stderr, 'splitting raw documents'
# Some way to print progress?
tokens = map(self._split_function, raw_documents)
if self.maxlen:
maxlen = self.maxlen
else:
maxlen = int(np.percentile(map(len, tokens), self.maxper))
self.maxlen = maxlen
X = np.zeros((len(tokens), maxlen))
for i,t in enumerate(tokens):
if verbose:
if not i % 10000:
print >> sys.stderr, 'processed %d tokens' % i
if len(t) > 0:
X[i,-len(t):] = map(lambda x: self.hash_function(x + suffix, self.n_features), t[:maxlen])
return X
class KerasCharacterVectorizer(KerasVectorizer):
'''
Split a string into characters
'''
def _split_function(self, doc):
return list(doc)
class KerasTokenVectorizer(KerasVectorizer):
'''
Split a string into words,
'''
def _split_function(self, doc):
return tokenize(doc, keep_punctuation=True)
class | (KerasVectorizer):
def _split_function(self, doc):
return doc
'''
from keras_vectorizer import KerasTokenVectorizer, KerasCharacterVectorizer
ktv = KerasTokenVectorizer()
ktv.fit_transform(['this is a test'])
ktv.fit_transform(['this is a test', 'this is a another test'])
ktv = KerasTokenVectorizer(maxlen=2)
ktv.fit_transform(['this is a test', 'this is a another test'])
kcv = KerasCharacterVectorizer()
kcv.fit_transform(['something', 'else'])
'''
| KerasPretokenizedVectorizer | identifier_name |
keras_vectorizer.py | import sys
import numpy as np
from normalization import tokenize
from helpers import ahash
class KerasVectorizer():
'''
Convert list of documents to numpy array for input into Keras model
'''
def __init__(self, n_features=100000, maxlen=None, maxper=100, hash_function=ahash):
self.maxlen = maxlen
self.maxper = maxper
self.n_features = n_features
self.hash_function = hash_function
def _exact_hash(self, word, n_features):
return self.token_lookup.get(word, 0)
def fit_transform(self, raw_documents, y=None, suffix='', verbose=True):
if verbose:
print >> sys.stderr, 'splitting raw documents'
# Some way to print progress?
tokens = map(self._split_function, raw_documents)
if self.maxlen:
maxlen = self.maxlen
else:
maxlen = int(np.percentile(map(len, tokens), self.maxper))
self.maxlen = maxlen
X = np.zeros((len(tokens), maxlen))
for i,t in enumerate(tokens):
if verbose:
if not i % 10000:
|
if len(t) > 0:
X[i,-len(t):] = map(lambda x: self.hash_function(x + suffix, self.n_features), t[:maxlen])
return X
class KerasCharacterVectorizer(KerasVectorizer):
'''
Split a string into characters
'''
def _split_function(self, doc):
return list(doc)
class KerasTokenVectorizer(KerasVectorizer):
'''
Split a string into words,
'''
def _split_function(self, doc):
return tokenize(doc, keep_punctuation=True)
class KerasPretokenizedVectorizer(KerasVectorizer):
def _split_function(self, doc):
return doc
'''
from keras_vectorizer import KerasTokenVectorizer, KerasCharacterVectorizer
ktv = KerasTokenVectorizer()
ktv.fit_transform(['this is a test'])
ktv.fit_transform(['this is a test', 'this is a another test'])
ktv = KerasTokenVectorizer(maxlen=2)
ktv.fit_transform(['this is a test', 'this is a another test'])
kcv = KerasCharacterVectorizer()
kcv.fit_transform(['something', 'else'])
'''
| print >> sys.stderr, 'processed %d tokens' % i | conditional_block |
keras_vectorizer.py | import sys
import numpy as np
from normalization import tokenize
from helpers import ahash
class KerasVectorizer():
'''
Convert list of documents to numpy array for input into Keras model
'''
def __init__(self, n_features=100000, maxlen=None, maxper=100, hash_function=ahash):
self.maxlen = maxlen
self.maxper = maxper
self.n_features = n_features
self.hash_function = hash_function
def _exact_hash(self, word, n_features):
return self.token_lookup.get(word, 0)
def fit_transform(self, raw_documents, y=None, suffix='', verbose=True):
if verbose:
print >> sys.stderr, 'splitting raw documents'
# Some way to print progress?
tokens = map(self._split_function, raw_documents)
if self.maxlen:
maxlen = self.maxlen
else:
maxlen = int(np.percentile(map(len, tokens), self.maxper))
self.maxlen = maxlen
X = np.zeros((len(tokens), maxlen))
for i,t in enumerate(tokens):
if verbose:
if not i % 10000:
print >> sys.stderr, 'processed %d tokens' % i
if len(t) > 0:
X[i,-len(t):] = map(lambda x: self.hash_function(x + suffix, self.n_features), t[:maxlen])
return X
class KerasCharacterVectorizer(KerasVectorizer):
'''
Split a string into characters
'''
def _split_function(self, doc):
|
class KerasTokenVectorizer(KerasVectorizer):
'''
Split a string into words,
'''
def _split_function(self, doc):
return tokenize(doc, keep_punctuation=True)
class KerasPretokenizedVectorizer(KerasVectorizer):
def _split_function(self, doc):
return doc
'''
from keras_vectorizer import KerasTokenVectorizer, KerasCharacterVectorizer
ktv = KerasTokenVectorizer()
ktv.fit_transform(['this is a test'])
ktv.fit_transform(['this is a test', 'this is a another test'])
ktv = KerasTokenVectorizer(maxlen=2)
ktv.fit_transform(['this is a test', 'this is a another test'])
kcv = KerasCharacterVectorizer()
kcv.fit_transform(['something', 'else'])
'''
| return list(doc) | identifier_body |
testNightwatchApi.js | var Api = require('../../lib/api.js');
module.exports = {
setUp: function (callback) {
this.client = require('../nightwatch.js').init();
callback();
},
testAddCommand : function(test) {
var client = this.client;
var api = client.api;
client.on('selenium:session_create', function(sessionId) {
test.equals(api.sessionId, sessionId);
test.deepEqual(api.capabilities, {
'javascriptEnabled': true,
'browserName': 'firefox'
});
test.done();
}); | return 'testCommand action';
};
test.deepEqual(api.globals, {
myGlobal : 'test'
});
Api.addCommand('testCommand', command, this.client);
test.ok('testCommand' in this.client.api, 'Test if the command was added');
test.throws(function() {
Api.addCommand('testCommand', command, client);
});
},
testAddCustomCommand : function(test) {
var client = this.client;
client.on('selenium:session_create', function(sessionId) {
test.done();
});
client.options.custom_commands_path = './extra';
Api.init(client);
Api.loadCustomCommands();
test.ok('customCommand' in this.client.api, 'Test if the custom command was added');
test.ok('customCommandConstructor' in this.client.api, 'Test if the custom command with constructor style was added');
var queue = client.enqueueCommand('customCommandConstructor', []);
var command = queue.currentNode.children[0];
test.equal(command.name, 'customCommandConstructor');
test.equal(command.context.client, client, 'Command should contain a reference to main client instance.');
},
testLocatorStrategy : function(test) {
var client = this.client;
client.on('selenium:session_create', function(sessionId) {
test.done();
});
Api.init(client);
client.api.useXpath();
test.equal(client.locateStrategy, 'xpath');
client.api.useCss();
test.equal(client.locateStrategy, 'css selector');
},
tearDown : function(callback) {
this.client = null;
// clean up
callback();
}
}; | var command = function() { | random_line_split |
MessagesNoticesResponse.ts | /* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
import type { Card } from './Card';
import type { Error } from './Error';
import type { Image } from './Image';
import type { MessagesNotice } from './MessagesNotice';
export type MessagesNoticesResponse = {
messagesNotices: Array<MessagesNotice>;
/**
* MessagesNotice
*/
description?: string;
/**
* MessagesNotice
*/
summary?: string;
image?: Image;
/**
* Square icon png url
*/
avatar?: string;
/**
* Ex: ion-ios-person
*/
ionIcon?: string;
/**
* Embeddable list of study summaries with explanation at the top
*/
html?: string;
/**
* Array of error objects with message property
*/
errors?: Array<Error>;
/**
* Error message
*/
error?: string;
/**
* Error message
*/
errorMessage?: string;
/**
* ex. OK or ERROR
*/
status?: string;
/**
* true or false
*/
success?: boolean;
/**
* Response code such as 200
*/
code?: number;
/**
* A super neat url you might want to share with your users! | */
link?: string;
card?: Card;
} | random_line_split |
|
legacy.ts | import * as Babylon from "../index";
import * as DebugImport from "../Debug/index";
declare var global: any;
/**
* Legacy support, defining window.BABYLON (global variable).
*
* This is the entry point for the UMD module.
* The entry point for a future ESM package should be index.ts
*/
var globalObject = (typeof global !== 'undefined') ? global : ((typeof window !== 'undefined') ? window : undefined);
if (typeof globalObject !== "undefined") |
export * from "../index";
export const Debug = {
AxesViewer: DebugImport.AxesViewer,
BoneAxesViewer: DebugImport.BoneAxesViewer,
PhysicsViewer: DebugImport.PhysicsViewer,
SkeletonViewer: DebugImport.SkeletonViewer,
}; | {
(<any>globalObject).BABYLON = BABYLON;
(<any>globalObject).BABYLON = (<any>globalObject).BABYLON || {};
var BABYLON = (<any>globalObject).BABYLON;
BABYLON.Debug = BABYLON.Debug || {};
const keys = [];
for (var key in DebugImport) {
BABYLON.Debug[key] = (<any>DebugImport)[key];
keys.push(key);
}
for (var key in Babylon) {
BABYLON[key] = (<any>Babylon)[key];
}
} | conditional_block |
legacy.ts | import * as Babylon from "../index";
import * as DebugImport from "../Debug/index";
declare var global: any;
/**
* Legacy support, defining window.BABYLON (global variable).
*
* This is the entry point for the UMD module.
* The entry point for a future ESM package should be index.ts
*/
var globalObject = (typeof global !== 'undefined') ? global : ((typeof window !== 'undefined') ? window : undefined);
if (typeof globalObject !== "undefined") {
(<any>globalObject).BABYLON = BABYLON;
(<any>globalObject).BABYLON = (<any>globalObject).BABYLON || {};
var BABYLON = (<any>globalObject).BABYLON;
BABYLON.Debug = BABYLON.Debug || {};
const keys = [];
for (var key in DebugImport) {
BABYLON.Debug[key] = (<any>DebugImport)[key];
keys.push(key);
}
for (var key in Babylon) {
BABYLON[key] = (<any>Babylon)[key];
| }
export * from "../index";
export const Debug = {
AxesViewer: DebugImport.AxesViewer,
BoneAxesViewer: DebugImport.BoneAxesViewer,
PhysicsViewer: DebugImport.PhysicsViewer,
SkeletonViewer: DebugImport.SkeletonViewer,
}; | }
| random_line_split |
Confirm.js | /**
* Confirm Component
*
* Same as window.confirm but using Filter component and divs.
*
* @param text The text to confim.
* @param onOK (optional) Javascript function to call when OK button is clicked.
* Make sure that it is a String!
* @param onCancel (optional) Javascript function to call when Cancel button is clicked.
* Make sure that it is a String!
* @return a Confirm object
*/
function Confirm(text, onOK, onCancel) | {
this.id = 'zuiConfirm'; // getElementById id of this DOM
this.DOM = init();
window.dialog = this;
function init() {
window.filter = new Filter(); // create new filter at window.filter
var h1 = createHeader('Confirm', id, window.filter);
var p1 = document.createElement('P');
p1.className = 'zuiAlertText';
p1.innerHTML = text;
var ok = createButton('OK', id, window.filter, onOK);
var cancel = createButton('Cancel', id, window.filter, onCancel);
if(text.match(/\?$/)) {
ok.value = LABEL_YES;
cancel.value = LABEL_NO;
}
var p2 = document.createElement('P');
p2.className = 'center';
p2.appendChild(ok);
p2.appendChild(cancel);
var container = document.createElement('DIV');
container.className = 'zuiDialog';
container.id = id;
container.onkeydown = function(e) {
e = e || event;
if(e.keyCode==ESC_KEY) {
removeObject(id, window.filter);
}
}
container.style.display = 'block';
container.appendChild(h1);
container.appendChild(p1);
container.appendChild(p2);
document.body.insertBefore(container, document.body.firstChild);
centerDiv(container);
return container;
}
} | identifier_body |
|
Confirm.js | /**
* Confirm Component
*
* Same as window.confirm but using Filter component and divs.
*
* @param text The text to confim.
* @param onOK (optional) Javascript function to call when OK button is clicked.
* Make sure that it is a String!
* @param onCancel (optional) Javascript function to call when Cancel button is clicked.
* Make sure that it is a String!
* @return a Confirm object
*/
function | (text, onOK, onCancel) {
this.id = 'zuiConfirm'; // getElementById id of this DOM
this.DOM = init();
window.dialog = this;
function init() {
window.filter = new Filter(); // create new filter at window.filter
var h1 = createHeader('Confirm', id, window.filter);
var p1 = document.createElement('P');
p1.className = 'zuiAlertText';
p1.innerHTML = text;
var ok = createButton('OK', id, window.filter, onOK);
var cancel = createButton('Cancel', id, window.filter, onCancel);
if(text.match(/\?$/)) {
ok.value = LABEL_YES;
cancel.value = LABEL_NO;
}
var p2 = document.createElement('P');
p2.className = 'center';
p2.appendChild(ok);
p2.appendChild(cancel);
var container = document.createElement('DIV');
container.className = 'zuiDialog';
container.id = id;
container.onkeydown = function(e) {
e = e || event;
if(e.keyCode==ESC_KEY) {
removeObject(id, window.filter);
}
}
container.style.display = 'block';
container.appendChild(h1);
container.appendChild(p1);
container.appendChild(p2);
document.body.insertBefore(container, document.body.firstChild);
centerDiv(container);
return container;
}
} | Confirm | identifier_name |
Confirm.js | /**
* Confirm Component
*
* Same as window.confirm but using Filter component and divs.
*
* @param text The text to confim.
* @param onOK (optional) Javascript function to call when OK button is clicked.
* Make sure that it is a String!
* @param onCancel (optional) Javascript function to call when Cancel button is clicked.
* Make sure that it is a String!
* @return a Confirm object
*/
function Confirm(text, onOK, onCancel) {
this.id = 'zuiConfirm'; // getElementById id of this DOM
this.DOM = init();
window.dialog = this;
function init() {
window.filter = new Filter(); // create new filter at window.filter
var h1 = createHeader('Confirm', id, window.filter);
var p1 = document.createElement('P');
p1.className = 'zuiAlertText';
p1.innerHTML = text;
var ok = createButton('OK', id, window.filter, onOK);
var cancel = createButton('Cancel', id, window.filter, onCancel);
if(text.match(/\?$/)) |
var p2 = document.createElement('P');
p2.className = 'center';
p2.appendChild(ok);
p2.appendChild(cancel);
var container = document.createElement('DIV');
container.className = 'zuiDialog';
container.id = id;
container.onkeydown = function(e) {
e = e || event;
if(e.keyCode==ESC_KEY) {
removeObject(id, window.filter);
}
}
container.style.display = 'block';
container.appendChild(h1);
container.appendChild(p1);
container.appendChild(p2);
document.body.insertBefore(container, document.body.firstChild);
centerDiv(container);
return container;
}
} | {
ok.value = LABEL_YES;
cancel.value = LABEL_NO;
} | conditional_block |
Confirm.js | /**
* Confirm Component
*
* Same as window.confirm but using Filter component and divs.
*
* @param text The text to confim.
* @param onOK (optional) Javascript function to call when OK button is clicked.
* Make sure that it is a String!
* @param onCancel (optional) Javascript function to call when Cancel button is clicked.
* Make sure that it is a String!
* @return a Confirm object
*/
function Confirm(text, onOK, onCancel) {
this.id = 'zuiConfirm'; // getElementById id of this DOM
this.DOM = init();
| window.filter = new Filter(); // create new filter at window.filter
var h1 = createHeader('Confirm', id, window.filter);
var p1 = document.createElement('P');
p1.className = 'zuiAlertText';
p1.innerHTML = text;
var ok = createButton('OK', id, window.filter, onOK);
var cancel = createButton('Cancel', id, window.filter, onCancel);
if(text.match(/\?$/)) {
ok.value = LABEL_YES;
cancel.value = LABEL_NO;
}
var p2 = document.createElement('P');
p2.className = 'center';
p2.appendChild(ok);
p2.appendChild(cancel);
var container = document.createElement('DIV');
container.className = 'zuiDialog';
container.id = id;
container.onkeydown = function(e) {
e = e || event;
if(e.keyCode==ESC_KEY) {
removeObject(id, window.filter);
}
}
container.style.display = 'block';
container.appendChild(h1);
container.appendChild(p1);
container.appendChild(p2);
document.body.insertBefore(container, document.body.firstChild);
centerDiv(container);
return container;
}
} | window.dialog = this;
function init() {
| random_line_split |
lnbin.py | import numpy as np
#x must be a np array
def lnbin(x, BinNum):
"""
Logarithmically bins a numpy array, returns (midpoints, Freq)
This function take the input of a data vector x, which is to be binned;
it also takes in the amount bins one would like the data binned into. The
output is two vectors, one containing the normalised frequency of each bin
(Freq), the other, the midpoint of each bin (midpts).
Added and error to the binned frequency: eFreq (As of June 30 2010). If this
option is not required, just call the function without including the third out
put; i.e.: [midpts Freq]=lnbin(x,BinNum).
Updated 2/6/14 to change the min to scale automatically
"""
if type(x) != np.ndarray:
try:
x = np.array(x)
except:
print 'Improper input format!'
raise
x = np.sort(x)
i = 0
while x[i] <= 0:
|
percent_binned = float((x.size-(i+1))) / x.size*100
#print 'Percentage of input vec binned {}'.format(percent_binned)
FPT = x[i:]
LFPT = np.log(FPT)
max1 = np.log( np.ceil(np.amax(FPT)))
#min1 = 1
min1 = np.log(np.floor(np.min(FPT)))
LFreq = np.zeros((BinNum, 1))
LTime = np.zeros((BinNum, 1))
Lends = np.zeros((BinNum, 2))
step = (max1-min1) / BinNum
#LOG Binning Data ###########################
for i in range(FPT.size):
for k in range(BinNum):
if( k*step+min1 <= LFPT[i] and LFPT[i] < (k+1)*step+min1):
LFreq[k] += 1 #check LFreq on the first bin
LTime[k] = (k+1)*step-(0.5*step)+min1
Lends[k, 0] = k*step+min1
Lends[k, 1] = (k+1)*step+min1
ends = np.exp(Lends)
widths = ends[:,1] - ends[:,0]
Freq = LFreq.T / widths / x.size
eFreq = 1.0 / np.sqrt(LFreq) * Freq
midpts = np.exp(LTime)
return (midpts[:,0], Freq.T[:,0]) | i += 1 | conditional_block |
lnbin.py | import numpy as np
#x must be a np array
def lnbin(x, BinNum):
| """
Logarithmically bins a numpy array, returns (midpoints, Freq)
This function take the input of a data vector x, which is to be binned;
it also takes in the amount bins one would like the data binned into. The
output is two vectors, one containing the normalised frequency of each bin
(Freq), the other, the midpoint of each bin (midpts).
Added and error to the binned frequency: eFreq (As of June 30 2010). If this
option is not required, just call the function without including the third out
put; i.e.: [midpts Freq]=lnbin(x,BinNum).
Updated 2/6/14 to change the min to scale automatically
"""
if type(x) != np.ndarray:
try:
x = np.array(x)
except:
print 'Improper input format!'
raise
x = np.sort(x)
i = 0
while x[i] <= 0:
i += 1
percent_binned = float((x.size-(i+1))) / x.size*100
#print 'Percentage of input vec binned {}'.format(percent_binned)
FPT = x[i:]
LFPT = np.log(FPT)
max1 = np.log( np.ceil(np.amax(FPT)))
#min1 = 1
min1 = np.log(np.floor(np.min(FPT)))
LFreq = np.zeros((BinNum, 1))
LTime = np.zeros((BinNum, 1))
Lends = np.zeros((BinNum, 2))
step = (max1-min1) / BinNum
#LOG Binning Data ###########################
for i in range(FPT.size):
for k in range(BinNum):
if( k*step+min1 <= LFPT[i] and LFPT[i] < (k+1)*step+min1):
LFreq[k] += 1 #check LFreq on the first bin
LTime[k] = (k+1)*step-(0.5*step)+min1
Lends[k, 0] = k*step+min1
Lends[k, 1] = (k+1)*step+min1
ends = np.exp(Lends)
widths = ends[:,1] - ends[:,0]
Freq = LFreq.T / widths / x.size
eFreq = 1.0 / np.sqrt(LFreq) * Freq
midpts = np.exp(LTime)
return (midpts[:,0], Freq.T[:,0]) | identifier_body |
|
lnbin.py | import numpy as np
#x must be a np array
def lnbin(x, BinNum):
"""
Logarithmically bins a numpy array, returns (midpoints, Freq)
This function take the input of a data vector x, which is to be binned;
it also takes in the amount bins one would like the data binned into. The
output is two vectors, one containing the normalised frequency of each bin
(Freq), the other, the midpoint of each bin (midpts).
Added and error to the binned frequency: eFreq (As of June 30 2010). If this
option is not required, just call the function without including the third out | if type(x) != np.ndarray:
try:
x = np.array(x)
except:
print 'Improper input format!'
raise
x = np.sort(x)
i = 0
while x[i] <= 0:
i += 1
percent_binned = float((x.size-(i+1))) / x.size*100
#print 'Percentage of input vec binned {}'.format(percent_binned)
FPT = x[i:]
LFPT = np.log(FPT)
max1 = np.log( np.ceil(np.amax(FPT)))
#min1 = 1
min1 = np.log(np.floor(np.min(FPT)))
LFreq = np.zeros((BinNum, 1))
LTime = np.zeros((BinNum, 1))
Lends = np.zeros((BinNum, 2))
step = (max1-min1) / BinNum
#LOG Binning Data ###########################
for i in range(FPT.size):
for k in range(BinNum):
if( k*step+min1 <= LFPT[i] and LFPT[i] < (k+1)*step+min1):
LFreq[k] += 1 #check LFreq on the first bin
LTime[k] = (k+1)*step-(0.5*step)+min1
Lends[k, 0] = k*step+min1
Lends[k, 1] = (k+1)*step+min1
ends = np.exp(Lends)
widths = ends[:,1] - ends[:,0]
Freq = LFreq.T / widths / x.size
eFreq = 1.0 / np.sqrt(LFreq) * Freq
midpts = np.exp(LTime)
return (midpts[:,0], Freq.T[:,0]) | put; i.e.: [midpts Freq]=lnbin(x,BinNum).
Updated 2/6/14 to change the min to scale automatically
""" | random_line_split |
lnbin.py | import numpy as np
#x must be a np array
def | (x, BinNum):
"""
Logarithmically bins a numpy array, returns (midpoints, Freq)
This function take the input of a data vector x, which is to be binned;
it also takes in the amount bins one would like the data binned into. The
output is two vectors, one containing the normalised frequency of each bin
(Freq), the other, the midpoint of each bin (midpts).
Added and error to the binned frequency: eFreq (As of June 30 2010). If this
option is not required, just call the function without including the third out
put; i.e.: [midpts Freq]=lnbin(x,BinNum).
Updated 2/6/14 to change the min to scale automatically
"""
if type(x) != np.ndarray:
try:
x = np.array(x)
except:
print 'Improper input format!'
raise
x = np.sort(x)
i = 0
while x[i] <= 0:
i += 1
percent_binned = float((x.size-(i+1))) / x.size*100
#print 'Percentage of input vec binned {}'.format(percent_binned)
FPT = x[i:]
LFPT = np.log(FPT)
max1 = np.log( np.ceil(np.amax(FPT)))
#min1 = 1
min1 = np.log(np.floor(np.min(FPT)))
LFreq = np.zeros((BinNum, 1))
LTime = np.zeros((BinNum, 1))
Lends = np.zeros((BinNum, 2))
step = (max1-min1) / BinNum
#LOG Binning Data ###########################
for i in range(FPT.size):
for k in range(BinNum):
if( k*step+min1 <= LFPT[i] and LFPT[i] < (k+1)*step+min1):
LFreq[k] += 1 #check LFreq on the first bin
LTime[k] = (k+1)*step-(0.5*step)+min1
Lends[k, 0] = k*step+min1
Lends[k, 1] = (k+1)*step+min1
ends = np.exp(Lends)
widths = ends[:,1] - ends[:,0]
Freq = LFreq.T / widths / x.size
eFreq = 1.0 / np.sqrt(LFreq) * Freq
midpts = np.exp(LTime)
return (midpts[:,0], Freq.T[:,0]) | lnbin | identifier_name |
get_users.py | import emission.analysis.modelling.tour_model.data_preprocessing as preprocess
# to determine if the user is valid:
# valid user should have >= 10 trips for further analysis and the proportion of filter_trips is >=50%
def valid_user(filter_trips,trips):
valid = False
if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5:
valid = True
return valid
# - user_ls: a list of strings representing short user names, such as [user1, user2, user3...]
# - valid_user_ls: a subset of `user_ls` for valid users, so also string representation of user names
# - all_users: a collection of all user ids, in terms of user id objects
def get_user_ls(all_users,radius):
user_ls = []
valid_user_ls = []
for i in range(len(all_users)):
curr_user = 'user' + str(i + 1)
user = all_users[i]
trips = preprocess.read_data(user)
filter_trips = preprocess.filter_data(trips,radius)
if valid_user(filter_trips,trips):
valid_user_ls.append(curr_user) | user_ls.append(curr_user)
else:
user_ls.append(curr_user)
continue
return user_ls,valid_user_ls | random_line_split |
|
get_users.py | import emission.analysis.modelling.tour_model.data_preprocessing as preprocess
# to determine if the user is valid:
# valid user should have >= 10 trips for further analysis and the proportion of filter_trips is >=50%
def valid_user(filter_trips,trips):
valid = False
if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5:
valid = True
return valid
# - user_ls: a list of strings representing short user names, such as [user1, user2, user3...]
# - valid_user_ls: a subset of `user_ls` for valid users, so also string representation of user names
# - all_users: a collection of all user ids, in terms of user id objects
def get_user_ls(all_users,radius):
user_ls = []
valid_user_ls = []
for i in range(len(all_users)):
curr_user = 'user' + str(i + 1)
user = all_users[i]
trips = preprocess.read_data(user)
filter_trips = preprocess.filter_data(trips,radius)
if valid_user(filter_trips,trips):
|
else:
user_ls.append(curr_user)
continue
return user_ls,valid_user_ls
| valid_user_ls.append(curr_user)
user_ls.append(curr_user) | conditional_block |
get_users.py | import emission.analysis.modelling.tour_model.data_preprocessing as preprocess
# to determine if the user is valid:
# valid user should have >= 10 trips for further analysis and the proportion of filter_trips is >=50%
def valid_user(filter_trips,trips):
valid = False
if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5:
valid = True
return valid
# - user_ls: a list of strings representing short user names, such as [user1, user2, user3...]
# - valid_user_ls: a subset of `user_ls` for valid users, so also string representation of user names
# - all_users: a collection of all user ids, in terms of user id objects
def | (all_users,radius):
user_ls = []
valid_user_ls = []
for i in range(len(all_users)):
curr_user = 'user' + str(i + 1)
user = all_users[i]
trips = preprocess.read_data(user)
filter_trips = preprocess.filter_data(trips,radius)
if valid_user(filter_trips,trips):
valid_user_ls.append(curr_user)
user_ls.append(curr_user)
else:
user_ls.append(curr_user)
continue
return user_ls,valid_user_ls
| get_user_ls | identifier_name |
get_users.py | import emission.analysis.modelling.tour_model.data_preprocessing as preprocess
# to determine if the user is valid:
# valid user should have >= 10 trips for further analysis and the proportion of filter_trips is >=50%
def valid_user(filter_trips,trips):
|
# - user_ls: a list of strings representing short user names, such as [user1, user2, user3...]
# - valid_user_ls: a subset of `user_ls` for valid users, so also string representation of user names
# - all_users: a collection of all user ids, in terms of user id objects
def get_user_ls(all_users,radius):
user_ls = []
valid_user_ls = []
for i in range(len(all_users)):
curr_user = 'user' + str(i + 1)
user = all_users[i]
trips = preprocess.read_data(user)
filter_trips = preprocess.filter_data(trips,radius)
if valid_user(filter_trips,trips):
valid_user_ls.append(curr_user)
user_ls.append(curr_user)
else:
user_ls.append(curr_user)
continue
return user_ls,valid_user_ls
| valid = False
if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5:
valid = True
return valid | identifier_body |
Facebook.js | import React from 'react';
import SvgIcon from '@material-ui/core/SvgIcon';
/* eslint-disable max-len */
const FacebookIcon = props => (
<SvgIcon viewBox="0 0 216 216" {...props}>
<path d="M204.1,0H11.9C5.3,0,0,5.3,0,11.9v192.2c0,6.6,5.3,11.9,11.9,11.9h103.5v-83.6H87.2V99.8h28.1 v-24c0-27.9,17-43.1,41.9-43.1c11.9,0,22.2,0.9,25.2,1.3v29.2l-17.3,0c-13.5,0-16.2,6.4-16.2,15.9v20.8h32.3l-4.2,32.6H149V216h55 c6.6,0,11.9-5.3,11.9-11.9V11.9C216,5.3,210.7,0,204.1,0z" />
</SvgIcon> | /* eslint-enable max-len */
export default FacebookIcon; | ); | random_line_split |
structure.js | var ref = require("prosemirror-model");
var Slice = ref.Slice;
var Fragment = ref.Fragment;
var ref$1 = require("./transform");
var Transform = ref$1.Transform;
var ref$2 = require("./replace_step");
var ReplaceStep = ref$2.ReplaceStep;
var ReplaceAroundStep = ref$2.ReplaceAroundStep;
function canCut(node, start, end) {
return (start == 0 || node.canReplace(start, node.childCount)) &&
(end == node.childCount || node.canReplace(0, end))
}
// :: (NodeRange) β ?number
// Try to find a target depth to which the content in the given range
// can be lifted.
function liftTarget(range) { | var parent = range.parent
var content = parent.content.cutByIndex(range.startIndex, range.endIndex)
for (var depth = range.depth;; --depth) {
var node = range.$from.node(depth), index = range.$from.index(depth), endIndex = range.$to.indexAfter(depth)
if (depth < range.depth && node.canReplace(index, endIndex, content))
{ return depth }
if (depth == 0 || !canCut(node, index, endIndex)) { break }
}
}
exports.liftTarget = liftTarget
// :: (NodeRange, number) β Transform
// Split the content in the given range off from its parent, if there
// is sibling content before or after it, and move it up the tree to
// the depth specified by `target`. You'll probably want to use
// `liftTarget` to compute `target`, in order to be sure the lift is
// valid.
Transform.prototype.lift = function(range, target) {
var $from = range.$from;
var $to = range.$to;
var depth = range.depth;
var gapStart = $from.before(depth + 1), gapEnd = $to.after(depth + 1)
var start = gapStart, end = gapEnd
var before = Fragment.empty, openLeft = 0
for (var d = depth, splitting = false; d > target; d--)
{ if (splitting || $from.index(d) > 0) {
splitting = true
before = Fragment.from($from.node(d).copy(before))
openLeft++
} else {
start--
} }
var after = Fragment.empty, openRight = 0
for (var d$1 = depth, splitting$1 = false; d$1 > target; d$1--)
{ if (splitting$1 || $to.after(d$1 + 1) < $to.end(d$1)) {
splitting$1 = true
after = Fragment.from($to.node(d$1).copy(after))
openRight++
} else {
end++
} }
return this.step(new ReplaceAroundStep(start, end, gapStart, gapEnd,
new Slice(before.append(after), openLeft, openRight),
before.size - openLeft, true))
}
// :: (NodeRange, NodeType, ?Object) β ?[{type: NodeType, attrs: ?Object}]
// Try to find a valid way to wrap the content in the given range in a
// node of the given type. May introduce extra nodes around and inside
// the wrapper node, if necessary. Returns null if no valid wrapping
// could be found.
function findWrapping(range, nodeType, attrs, innerRange) {
if ( innerRange === void 0 ) innerRange = range;
var wrap = {type: nodeType, attrs: attrs}
var around = findWrappingOutside(range, wrap)
var inner = around && findWrappingInside(innerRange, wrap)
if (!inner) { return null }
return around.concat(wrap).concat(inner)
}
exports.findWrapping = findWrapping
function findWrappingOutside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var around = parent.contentMatchAt(startIndex).findWrapping(wrap.type, wrap.attrs)
if (!around) { return null }
var outer = around.length ? around[0] : wrap
if (!parent.canReplaceWith(startIndex, endIndex, outer.type, outer.attrs))
{ return null }
return around
}
function findWrappingInside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var inner = parent.child(startIndex)
var inside = wrap.type.contentExpr.start(wrap.attrs).findWrappingFor(inner)
if (!inside) { return null }
var last = inside.length ? inside[inside.length - 1] : wrap
var innerMatch = last.type.contentExpr.start(last.attrs)
for (var i = startIndex; i < endIndex; i++)
{ innerMatch = innerMatch && innerMatch.matchNode(parent.child(i)) }
if (!innerMatch || !innerMatch.validEnd()) { return null }
return inside
}
// :: (NodeRange, [{type: NodeType, attrs: ?Object}]) β Transform
// Wrap the given [range](#model.NodeRange) in the given set of wrappers.
// The wrappers are assumed to be valid in this position, and should
// probably be computed with `findWrapping`.
Transform.prototype.wrap = function(range, wrappers) {
var content = Fragment.empty
for (var i = wrappers.length - 1; i >= 0; i--)
{ content = Fragment.from(wrappers[i].type.create(wrappers[i].attrs, content)) }
var start = range.start, end = range.end
return this.step(new ReplaceAroundStep(start, end, start, end, new Slice(content, 0, 0), wrappers.length, true))
}
// :: (number, ?number, NodeType, ?Object) β Transform
// Set the type of all textblocks (partly) between `from` and `to` to
// the given node type with the given attributes.
Transform.prototype.setBlockType = function(from, to, type, attrs) {
var this$1 = this;
if ( to === void 0 ) to = from;
if (!type.isTextblock) { throw new RangeError("Type given to setBlockType should be a textblock") }
var mapFrom = this.steps.length
this.doc.nodesBetween(from, to, function (node, pos) {
if (node.isTextblock && !node.hasMarkup(type, attrs)) {
// Ensure all markup that isn't allowed in the new node type is cleared
this$1.clearNonMatching(this$1.mapping.slice(mapFrom).map(pos, 1), type.contentExpr.start(attrs))
var mapping = this$1.mapping.slice(mapFrom)
var startM = mapping.map(pos, 1), endM = mapping.map(pos + node.nodeSize, 1)
this$1.step(new ReplaceAroundStep(startM, endM, startM + 1, endM - 1,
new Slice(Fragment.from(type.create(attrs)), 0, 0), 1, true))
return false
}
})
return this
}
// :: (number, ?NodeType, ?Object, ?[Mark]) β Transform
// Change the type and attributes of the node after `pos`.
Transform.prototype.setNodeType = function(pos, type, attrs, marks) {
var node = this.doc.nodeAt(pos)
if (!node) { throw new RangeError("No node at given position") }
if (!type) { type = node.type }
var newNode = type.create(attrs, null, marks || node.marks)
if (node.isLeaf)
{ return this.replaceWith(pos, pos + node.nodeSize, newNode) }
if (!type.validContent(node.content, attrs))
{ throw new RangeError("Invalid content for node type " + type.name) }
return this.step(new ReplaceAroundStep(pos, pos + node.nodeSize, pos + 1, pos + node.nodeSize - 1,
new Slice(Fragment.from(newNode), 0, 0), 1, true))
}
// :: (Node, number, ?[?{type: NodeType, attrs: ?Object}]) β bool
// Check whether splitting at the given position is allowed.
function canSplit(doc, pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = doc.resolve(pos), base = $pos.depth - depth
if (base < 0 ||
!$pos.parent.canReplace($pos.index(), $pos.parent.childCount) ||
!$pos.parent.canReplace(0, $pos.indexAfter()))
{ return false }
for (var d = $pos.depth - 1, i = depth - 1; d > base; d--, i--) {
var node = $pos.node(d), index$1 = $pos.index(d)
var typeAfter = typesAfter && typesAfter[i]
if (!node.canReplace(0, index$1) ||
!node.canReplaceWith(index$1, node.childCount, typeAfter ? typeAfter.type : $pos.node(d + 1).type,
typeAfter ? typeAfter.attrs : $pos.node(d + 1).attrs))
{ return false }
}
var index = $pos.indexAfter(base)
var baseType = typesAfter && typesAfter[0]
return $pos.node(base).canReplaceWith(index, index, baseType ? baseType.type : $pos.node(base + 1).type,
baseType ? baseType.attrs : $pos.node(base + 1).attrs)
}
exports.canSplit = canSplit
// :: (number, ?number, ?[?{type: NodeType, attrs: ?Object}]) β Transform
// Split the node at the given position, and optionally, if `depth` is
// greater than one, any number of nodes above that. By default, the
// parts split off will inherit the node type of the original node.
// This can be changed by passing an array of types and attributes to
// use after the split.
Transform.prototype.split = function(pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = this.doc.resolve(pos), before = Fragment.empty, after = Fragment.empty
for (var d = $pos.depth, e = $pos.depth - depth, i = depth - 1; d > e; d--, i--) {
before = Fragment.from($pos.node(d).copy(before))
var typeAfter = typesAfter && typesAfter[i]
after = Fragment.from(typeAfter ? typeAfter.type.create(typeAfter.attrs, after) : $pos.node(d).copy(after))
}
return this.step(new ReplaceStep(pos, pos, new Slice(before.append(after), depth, depth, true)))
}
// :: (Node, number) β bool
// Test whether the blocks before and after a given position can be
// joined.
function canJoin(doc, pos) {
var $pos = doc.resolve(pos), index = $pos.index()
return joinable($pos.nodeBefore, $pos.nodeAfter) &&
$pos.parent.canReplace(index, index + 1)
}
exports.canJoin = canJoin
function joinable(a, b) {
return a && b && !a.isLeaf && a.canAppend(b)
}
// :: (Node, number, ?number) β ?number
// Find an ancestor of the given position that can be joined to the
// block before (or after if `dir` is positive). Returns the joinable
// point, if any.
function joinPoint(doc, pos, dir) {
if ( dir === void 0 ) dir = -1;
var $pos = doc.resolve(pos)
for (var d = $pos.depth;; d--) {
var before = (void 0), after = (void 0)
if (d == $pos.depth) {
before = $pos.nodeBefore
after = $pos.nodeAfter
} else if (dir > 0) {
before = $pos.node(d + 1)
after = $pos.node(d).maybeChild($pos.index(d) + 1)
} else {
before = $pos.node(d).maybeChild($pos.index(d) - 1)
after = $pos.node(d + 1)
}
if (before && !before.isTextblock && joinable(before, after)) { return pos }
if (d == 0) { break }
pos = dir < 0 ? $pos.before(d) : $pos.after(d)
}
}
exports.joinPoint = joinPoint
// :: (number, ?number, ?bool) β Transform
// Join the blocks around the given position. If depth is 2, their
// last and first siblings are also joined, and so on.
Transform.prototype.join = function(pos, depth) {
if ( depth === void 0 ) depth = 1;
var step = new ReplaceStep(pos - depth, pos + depth, Slice.empty, true)
return this.step(step)
}
// :: (Node, number, NodeType, ?Object) β ?number
// Try to find a point where a node of the given type can be inserted
// near `pos`, by searching up the node hierarchy when `pos` itself
// isn't a valid place but is at the start or end of a node. Return
// null if no position was found.
function insertPoint(doc, pos, nodeType, attrs) {
var $pos = doc.resolve(pos)
if ($pos.parent.canReplaceWith($pos.index(), $pos.index(), nodeType, attrs)) { return pos }
if ($pos.parentOffset == 0)
{ for (var d = $pos.depth - 1; d >= 0; d--) {
var index = $pos.index(d)
if ($pos.node(d).canReplaceWith(index, index, nodeType, attrs)) { return $pos.before(d + 1) }
if (index > 0) { return null }
} }
if ($pos.parentOffset == $pos.parent.content.size)
{ for (var d$1 = $pos.depth - 1; d$1 >= 0; d$1--) {
var index$1 = $pos.indexAfter(d$1)
if ($pos.node(d$1).canReplaceWith(index$1, index$1, nodeType, attrs)) { return $pos.after(d$1 + 1) }
if (index$1 < $pos.node(d$1).childCount) { return null }
} }
}
exports.insertPoint = insertPoint | random_line_split |
|
structure.js | var ref = require("prosemirror-model");
var Slice = ref.Slice;
var Fragment = ref.Fragment;
var ref$1 = require("./transform");
var Transform = ref$1.Transform;
var ref$2 = require("./replace_step");
var ReplaceStep = ref$2.ReplaceStep;
var ReplaceAroundStep = ref$2.ReplaceAroundStep;
function canCut(node, start, end) |
// :: (NodeRange) β ?number
// Try to find a target depth to which the content in the given range
// can be lifted.
function liftTarget(range) {
var parent = range.parent
var content = parent.content.cutByIndex(range.startIndex, range.endIndex)
for (var depth = range.depth;; --depth) {
var node = range.$from.node(depth), index = range.$from.index(depth), endIndex = range.$to.indexAfter(depth)
if (depth < range.depth && node.canReplace(index, endIndex, content))
{ return depth }
if (depth == 0 || !canCut(node, index, endIndex)) { break }
}
}
exports.liftTarget = liftTarget
// :: (NodeRange, number) β Transform
// Split the content in the given range off from its parent, if there
// is sibling content before or after it, and move it up the tree to
// the depth specified by `target`. You'll probably want to use
// `liftTarget` to compute `target`, in order to be sure the lift is
// valid.
Transform.prototype.lift = function(range, target) {
var $from = range.$from;
var $to = range.$to;
var depth = range.depth;
var gapStart = $from.before(depth + 1), gapEnd = $to.after(depth + 1)
var start = gapStart, end = gapEnd
var before = Fragment.empty, openLeft = 0
for (var d = depth, splitting = false; d > target; d--)
{ if (splitting || $from.index(d) > 0) {
splitting = true
before = Fragment.from($from.node(d).copy(before))
openLeft++
} else {
start--
} }
var after = Fragment.empty, openRight = 0
for (var d$1 = depth, splitting$1 = false; d$1 > target; d$1--)
{ if (splitting$1 || $to.after(d$1 + 1) < $to.end(d$1)) {
splitting$1 = true
after = Fragment.from($to.node(d$1).copy(after))
openRight++
} else {
end++
} }
return this.step(new ReplaceAroundStep(start, end, gapStart, gapEnd,
new Slice(before.append(after), openLeft, openRight),
before.size - openLeft, true))
}
// :: (NodeRange, NodeType, ?Object) β ?[{type: NodeType, attrs: ?Object}]
// Try to find a valid way to wrap the content in the given range in a
// node of the given type. May introduce extra nodes around and inside
// the wrapper node, if necessary. Returns null if no valid wrapping
// could be found.
function findWrapping(range, nodeType, attrs, innerRange) {
if ( innerRange === void 0 ) innerRange = range;
var wrap = {type: nodeType, attrs: attrs}
var around = findWrappingOutside(range, wrap)
var inner = around && findWrappingInside(innerRange, wrap)
if (!inner) { return null }
return around.concat(wrap).concat(inner)
}
exports.findWrapping = findWrapping
function findWrappingOutside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var around = parent.contentMatchAt(startIndex).findWrapping(wrap.type, wrap.attrs)
if (!around) { return null }
var outer = around.length ? around[0] : wrap
if (!parent.canReplaceWith(startIndex, endIndex, outer.type, outer.attrs))
{ return null }
return around
}
function findWrappingInside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var inner = parent.child(startIndex)
var inside = wrap.type.contentExpr.start(wrap.attrs).findWrappingFor(inner)
if (!inside) { return null }
var last = inside.length ? inside[inside.length - 1] : wrap
var innerMatch = last.type.contentExpr.start(last.attrs)
for (var i = startIndex; i < endIndex; i++)
{ innerMatch = innerMatch && innerMatch.matchNode(parent.child(i)) }
if (!innerMatch || !innerMatch.validEnd()) { return null }
return inside
}
// :: (NodeRange, [{type: NodeType, attrs: ?Object}]) β Transform
// Wrap the given [range](#model.NodeRange) in the given set of wrappers.
// The wrappers are assumed to be valid in this position, and should
// probably be computed with `findWrapping`.
Transform.prototype.wrap = function(range, wrappers) {
var content = Fragment.empty
for (var i = wrappers.length - 1; i >= 0; i--)
{ content = Fragment.from(wrappers[i].type.create(wrappers[i].attrs, content)) }
var start = range.start, end = range.end
return this.step(new ReplaceAroundStep(start, end, start, end, new Slice(content, 0, 0), wrappers.length, true))
}
// :: (number, ?number, NodeType, ?Object) β Transform
// Set the type of all textblocks (partly) between `from` and `to` to
// the given node type with the given attributes.
Transform.prototype.setBlockType = function(from, to, type, attrs) {
var this$1 = this;
if ( to === void 0 ) to = from;
if (!type.isTextblock) { throw new RangeError("Type given to setBlockType should be a textblock") }
var mapFrom = this.steps.length
this.doc.nodesBetween(from, to, function (node, pos) {
if (node.isTextblock && !node.hasMarkup(type, attrs)) {
// Ensure all markup that isn't allowed in the new node type is cleared
this$1.clearNonMatching(this$1.mapping.slice(mapFrom).map(pos, 1), type.contentExpr.start(attrs))
var mapping = this$1.mapping.slice(mapFrom)
var startM = mapping.map(pos, 1), endM = mapping.map(pos + node.nodeSize, 1)
this$1.step(new ReplaceAroundStep(startM, endM, startM + 1, endM - 1,
new Slice(Fragment.from(type.create(attrs)), 0, 0), 1, true))
return false
}
})
return this
}
// :: (number, ?NodeType, ?Object, ?[Mark]) β Transform
// Change the type and attributes of the node after `pos`.
Transform.prototype.setNodeType = function(pos, type, attrs, marks) {
var node = this.doc.nodeAt(pos)
if (!node) { throw new RangeError("No node at given position") }
if (!type) { type = node.type }
var newNode = type.create(attrs, null, marks || node.marks)
if (node.isLeaf)
{ return this.replaceWith(pos, pos + node.nodeSize, newNode) }
if (!type.validContent(node.content, attrs))
{ throw new RangeError("Invalid content for node type " + type.name) }
return this.step(new ReplaceAroundStep(pos, pos + node.nodeSize, pos + 1, pos + node.nodeSize - 1,
new Slice(Fragment.from(newNode), 0, 0), 1, true))
}
// :: (Node, number, ?[?{type: NodeType, attrs: ?Object}]) β bool
// Check whether splitting at the given position is allowed.
function canSplit(doc, pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = doc.resolve(pos), base = $pos.depth - depth
if (base < 0 ||
!$pos.parent.canReplace($pos.index(), $pos.parent.childCount) ||
!$pos.parent.canReplace(0, $pos.indexAfter()))
{ return false }
for (var d = $pos.depth - 1, i = depth - 1; d > base; d--, i--) {
var node = $pos.node(d), index$1 = $pos.index(d)
var typeAfter = typesAfter && typesAfter[i]
if (!node.canReplace(0, index$1) ||
!node.canReplaceWith(index$1, node.childCount, typeAfter ? typeAfter.type : $pos.node(d + 1).type,
typeAfter ? typeAfter.attrs : $pos.node(d + 1).attrs))
{ return false }
}
var index = $pos.indexAfter(base)
var baseType = typesAfter && typesAfter[0]
return $pos.node(base).canReplaceWith(index, index, baseType ? baseType.type : $pos.node(base + 1).type,
baseType ? baseType.attrs : $pos.node(base + 1).attrs)
}
exports.canSplit = canSplit
// :: (number, ?number, ?[?{type: NodeType, attrs: ?Object}]) β Transform
// Split the node at the given position, and optionally, if `depth` is
// greater than one, any number of nodes above that. By default, the
// parts split off will inherit the node type of the original node.
// This can be changed by passing an array of types and attributes to
// use after the split.
Transform.prototype.split = function(pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = this.doc.resolve(pos), before = Fragment.empty, after = Fragment.empty
for (var d = $pos.depth, e = $pos.depth - depth, i = depth - 1; d > e; d--, i--) {
before = Fragment.from($pos.node(d).copy(before))
var typeAfter = typesAfter && typesAfter[i]
after = Fragment.from(typeAfter ? typeAfter.type.create(typeAfter.attrs, after) : $pos.node(d).copy(after))
}
return this.step(new ReplaceStep(pos, pos, new Slice(before.append(after), depth, depth, true)))
}
// :: (Node, number) β bool
// Test whether the blocks before and after a given position can be
// joined.
function canJoin(doc, pos) {
var $pos = doc.resolve(pos), index = $pos.index()
return joinable($pos.nodeBefore, $pos.nodeAfter) &&
$pos.parent.canReplace(index, index + 1)
}
exports.canJoin = canJoin
function joinable(a, b) {
return a && b && !a.isLeaf && a.canAppend(b)
}
// :: (Node, number, ?number) β ?number
// Find an ancestor of the given position that can be joined to the
// block before (or after if `dir` is positive). Returns the joinable
// point, if any.
function joinPoint(doc, pos, dir) {
if ( dir === void 0 ) dir = -1;
var $pos = doc.resolve(pos)
for (var d = $pos.depth;; d--) {
var before = (void 0), after = (void 0)
if (d == $pos.depth) {
before = $pos.nodeBefore
after = $pos.nodeAfter
} else if (dir > 0) {
before = $pos.node(d + 1)
after = $pos.node(d).maybeChild($pos.index(d) + 1)
} else {
before = $pos.node(d).maybeChild($pos.index(d) - 1)
after = $pos.node(d + 1)
}
if (before && !before.isTextblock && joinable(before, after)) { return pos }
if (d == 0) { break }
pos = dir < 0 ? $pos.before(d) : $pos.after(d)
}
}
exports.joinPoint = joinPoint
// :: (number, ?number, ?bool) β Transform
// Join the blocks around the given position. If depth is 2, their
// last and first siblings are also joined, and so on.
Transform.prototype.join = function(pos, depth) {
if ( depth === void 0 ) depth = 1;
var step = new ReplaceStep(pos - depth, pos + depth, Slice.empty, true)
return this.step(step)
}
// :: (Node, number, NodeType, ?Object) β ?number
// Try to find a point where a node of the given type can be inserted
// near `pos`, by searching up the node hierarchy when `pos` itself
// isn't a valid place but is at the start or end of a node. Return
// null if no position was found.
function insertPoint(doc, pos, nodeType, attrs) {
var $pos = doc.resolve(pos)
if ($pos.parent.canReplaceWith($pos.index(), $pos.index(), nodeType, attrs)) { return pos }
if ($pos.parentOffset == 0)
{ for (var d = $pos.depth - 1; d >= 0; d--) {
var index = $pos.index(d)
if ($pos.node(d).canReplaceWith(index, index, nodeType, attrs)) { return $pos.before(d + 1) }
if (index > 0) { return null }
} }
if ($pos.parentOffset == $pos.parent.content.size)
{ for (var d$1 = $pos.depth - 1; d$1 >= 0; d$1--) {
var index$1 = $pos.indexAfter(d$1)
if ($pos.node(d$1).canReplaceWith(index$1, index$1, nodeType, attrs)) { return $pos.after(d$1 + 1) }
if (index$1 < $pos.node(d$1).childCount) { return null }
} }
}
exports.insertPoint = insertPoint
| {
return (start == 0 || node.canReplace(start, node.childCount)) &&
(end == node.childCount || node.canReplace(0, end))
} | identifier_body |
structure.js | var ref = require("prosemirror-model");
var Slice = ref.Slice;
var Fragment = ref.Fragment;
var ref$1 = require("./transform");
var Transform = ref$1.Transform;
var ref$2 = require("./replace_step");
var ReplaceStep = ref$2.ReplaceStep;
var ReplaceAroundStep = ref$2.ReplaceAroundStep;
function | (node, start, end) {
return (start == 0 || node.canReplace(start, node.childCount)) &&
(end == node.childCount || node.canReplace(0, end))
}
// :: (NodeRange) β ?number
// Try to find a target depth to which the content in the given range
// can be lifted.
function liftTarget(range) {
var parent = range.parent
var content = parent.content.cutByIndex(range.startIndex, range.endIndex)
for (var depth = range.depth;; --depth) {
var node = range.$from.node(depth), index = range.$from.index(depth), endIndex = range.$to.indexAfter(depth)
if (depth < range.depth && node.canReplace(index, endIndex, content))
{ return depth }
if (depth == 0 || !canCut(node, index, endIndex)) { break }
}
}
exports.liftTarget = liftTarget
// :: (NodeRange, number) β Transform
// Split the content in the given range off from its parent, if there
// is sibling content before or after it, and move it up the tree to
// the depth specified by `target`. You'll probably want to use
// `liftTarget` to compute `target`, in order to be sure the lift is
// valid.
Transform.prototype.lift = function(range, target) {
var $from = range.$from;
var $to = range.$to;
var depth = range.depth;
var gapStart = $from.before(depth + 1), gapEnd = $to.after(depth + 1)
var start = gapStart, end = gapEnd
var before = Fragment.empty, openLeft = 0
for (var d = depth, splitting = false; d > target; d--)
{ if (splitting || $from.index(d) > 0) {
splitting = true
before = Fragment.from($from.node(d).copy(before))
openLeft++
} else {
start--
} }
var after = Fragment.empty, openRight = 0
for (var d$1 = depth, splitting$1 = false; d$1 > target; d$1--)
{ if (splitting$1 || $to.after(d$1 + 1) < $to.end(d$1)) {
splitting$1 = true
after = Fragment.from($to.node(d$1).copy(after))
openRight++
} else {
end++
} }
return this.step(new ReplaceAroundStep(start, end, gapStart, gapEnd,
new Slice(before.append(after), openLeft, openRight),
before.size - openLeft, true))
}
// :: (NodeRange, NodeType, ?Object) β ?[{type: NodeType, attrs: ?Object}]
// Try to find a valid way to wrap the content in the given range in a
// node of the given type. May introduce extra nodes around and inside
// the wrapper node, if necessary. Returns null if no valid wrapping
// could be found.
function findWrapping(range, nodeType, attrs, innerRange) {
if ( innerRange === void 0 ) innerRange = range;
var wrap = {type: nodeType, attrs: attrs}
var around = findWrappingOutside(range, wrap)
var inner = around && findWrappingInside(innerRange, wrap)
if (!inner) { return null }
return around.concat(wrap).concat(inner)
}
exports.findWrapping = findWrapping
function findWrappingOutside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var around = parent.contentMatchAt(startIndex).findWrapping(wrap.type, wrap.attrs)
if (!around) { return null }
var outer = around.length ? around[0] : wrap
if (!parent.canReplaceWith(startIndex, endIndex, outer.type, outer.attrs))
{ return null }
return around
}
function findWrappingInside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var inner = parent.child(startIndex)
var inside = wrap.type.contentExpr.start(wrap.attrs).findWrappingFor(inner)
if (!inside) { return null }
var last = inside.length ? inside[inside.length - 1] : wrap
var innerMatch = last.type.contentExpr.start(last.attrs)
for (var i = startIndex; i < endIndex; i++)
{ innerMatch = innerMatch && innerMatch.matchNode(parent.child(i)) }
if (!innerMatch || !innerMatch.validEnd()) { return null }
return inside
}
// :: (NodeRange, [{type: NodeType, attrs: ?Object}]) β Transform
// Wrap the given [range](#model.NodeRange) in the given set of wrappers.
// The wrappers are assumed to be valid in this position, and should
// probably be computed with `findWrapping`.
Transform.prototype.wrap = function(range, wrappers) {
var content = Fragment.empty
for (var i = wrappers.length - 1; i >= 0; i--)
{ content = Fragment.from(wrappers[i].type.create(wrappers[i].attrs, content)) }
var start = range.start, end = range.end
return this.step(new ReplaceAroundStep(start, end, start, end, new Slice(content, 0, 0), wrappers.length, true))
}
// :: (number, ?number, NodeType, ?Object) β Transform
// Set the type of all textblocks (partly) between `from` and `to` to
// the given node type with the given attributes.
Transform.prototype.setBlockType = function(from, to, type, attrs) {
var this$1 = this;
if ( to === void 0 ) to = from;
if (!type.isTextblock) { throw new RangeError("Type given to setBlockType should be a textblock") }
var mapFrom = this.steps.length
this.doc.nodesBetween(from, to, function (node, pos) {
if (node.isTextblock && !node.hasMarkup(type, attrs)) {
// Ensure all markup that isn't allowed in the new node type is cleared
this$1.clearNonMatching(this$1.mapping.slice(mapFrom).map(pos, 1), type.contentExpr.start(attrs))
var mapping = this$1.mapping.slice(mapFrom)
var startM = mapping.map(pos, 1), endM = mapping.map(pos + node.nodeSize, 1)
this$1.step(new ReplaceAroundStep(startM, endM, startM + 1, endM - 1,
new Slice(Fragment.from(type.create(attrs)), 0, 0), 1, true))
return false
}
})
return this
}
// :: (number, ?NodeType, ?Object, ?[Mark]) β Transform
// Change the type and attributes of the node after `pos`.
Transform.prototype.setNodeType = function(pos, type, attrs, marks) {
var node = this.doc.nodeAt(pos)
if (!node) { throw new RangeError("No node at given position") }
if (!type) { type = node.type }
var newNode = type.create(attrs, null, marks || node.marks)
if (node.isLeaf)
{ return this.replaceWith(pos, pos + node.nodeSize, newNode) }
if (!type.validContent(node.content, attrs))
{ throw new RangeError("Invalid content for node type " + type.name) }
return this.step(new ReplaceAroundStep(pos, pos + node.nodeSize, pos + 1, pos + node.nodeSize - 1,
new Slice(Fragment.from(newNode), 0, 0), 1, true))
}
// :: (Node, number, ?[?{type: NodeType, attrs: ?Object}]) β bool
// Check whether splitting at the given position is allowed.
function canSplit(doc, pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = doc.resolve(pos), base = $pos.depth - depth
if (base < 0 ||
!$pos.parent.canReplace($pos.index(), $pos.parent.childCount) ||
!$pos.parent.canReplace(0, $pos.indexAfter()))
{ return false }
for (var d = $pos.depth - 1, i = depth - 1; d > base; d--, i--) {
var node = $pos.node(d), index$1 = $pos.index(d)
var typeAfter = typesAfter && typesAfter[i]
if (!node.canReplace(0, index$1) ||
!node.canReplaceWith(index$1, node.childCount, typeAfter ? typeAfter.type : $pos.node(d + 1).type,
typeAfter ? typeAfter.attrs : $pos.node(d + 1).attrs))
{ return false }
}
var index = $pos.indexAfter(base)
var baseType = typesAfter && typesAfter[0]
return $pos.node(base).canReplaceWith(index, index, baseType ? baseType.type : $pos.node(base + 1).type,
baseType ? baseType.attrs : $pos.node(base + 1).attrs)
}
exports.canSplit = canSplit
// :: (number, ?number, ?[?{type: NodeType, attrs: ?Object}]) β Transform
// Split the node at the given position, and optionally, if `depth` is
// greater than one, any number of nodes above that. By default, the
// parts split off will inherit the node type of the original node.
// This can be changed by passing an array of types and attributes to
// use after the split.
Transform.prototype.split = function(pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = this.doc.resolve(pos), before = Fragment.empty, after = Fragment.empty
for (var d = $pos.depth, e = $pos.depth - depth, i = depth - 1; d > e; d--, i--) {
before = Fragment.from($pos.node(d).copy(before))
var typeAfter = typesAfter && typesAfter[i]
after = Fragment.from(typeAfter ? typeAfter.type.create(typeAfter.attrs, after) : $pos.node(d).copy(after))
}
return this.step(new ReplaceStep(pos, pos, new Slice(before.append(after), depth, depth, true)))
}
// :: (Node, number) β bool
// Test whether the blocks before and after a given position can be
// joined.
function canJoin(doc, pos) {
var $pos = doc.resolve(pos), index = $pos.index()
return joinable($pos.nodeBefore, $pos.nodeAfter) &&
$pos.parent.canReplace(index, index + 1)
}
exports.canJoin = canJoin
function joinable(a, b) {
return a && b && !a.isLeaf && a.canAppend(b)
}
// :: (Node, number, ?number) β ?number
// Find an ancestor of the given position that can be joined to the
// block before (or after if `dir` is positive). Returns the joinable
// point, if any.
function joinPoint(doc, pos, dir) {
if ( dir === void 0 ) dir = -1;
var $pos = doc.resolve(pos)
for (var d = $pos.depth;; d--) {
var before = (void 0), after = (void 0)
if (d == $pos.depth) {
before = $pos.nodeBefore
after = $pos.nodeAfter
} else if (dir > 0) {
before = $pos.node(d + 1)
after = $pos.node(d).maybeChild($pos.index(d) + 1)
} else {
before = $pos.node(d).maybeChild($pos.index(d) - 1)
after = $pos.node(d + 1)
}
if (before && !before.isTextblock && joinable(before, after)) { return pos }
if (d == 0) { break }
pos = dir < 0 ? $pos.before(d) : $pos.after(d)
}
}
exports.joinPoint = joinPoint
// :: (number, ?number, ?bool) β Transform
// Join the blocks around the given position. If depth is 2, their
// last and first siblings are also joined, and so on.
Transform.prototype.join = function(pos, depth) {
if ( depth === void 0 ) depth = 1;
var step = new ReplaceStep(pos - depth, pos + depth, Slice.empty, true)
return this.step(step)
}
// :: (Node, number, NodeType, ?Object) β ?number
// Try to find a point where a node of the given type can be inserted
// near `pos`, by searching up the node hierarchy when `pos` itself
// isn't a valid place but is at the start or end of a node. Return
// null if no position was found.
function insertPoint(doc, pos, nodeType, attrs) {
var $pos = doc.resolve(pos)
if ($pos.parent.canReplaceWith($pos.index(), $pos.index(), nodeType, attrs)) { return pos }
if ($pos.parentOffset == 0)
{ for (var d = $pos.depth - 1; d >= 0; d--) {
var index = $pos.index(d)
if ($pos.node(d).canReplaceWith(index, index, nodeType, attrs)) { return $pos.before(d + 1) }
if (index > 0) { return null }
} }
if ($pos.parentOffset == $pos.parent.content.size)
{ for (var d$1 = $pos.depth - 1; d$1 >= 0; d$1--) {
var index$1 = $pos.indexAfter(d$1)
if ($pos.node(d$1).canReplaceWith(index$1, index$1, nodeType, attrs)) { return $pos.after(d$1 + 1) }
if (index$1 < $pos.node(d$1).childCount) { return null }
} }
}
exports.insertPoint = insertPoint
| canCut | identifier_name |
structure.js | var ref = require("prosemirror-model");
var Slice = ref.Slice;
var Fragment = ref.Fragment;
var ref$1 = require("./transform");
var Transform = ref$1.Transform;
var ref$2 = require("./replace_step");
var ReplaceStep = ref$2.ReplaceStep;
var ReplaceAroundStep = ref$2.ReplaceAroundStep;
function canCut(node, start, end) {
return (start == 0 || node.canReplace(start, node.childCount)) &&
(end == node.childCount || node.canReplace(0, end))
}
// :: (NodeRange) β ?number
// Try to find a target depth to which the content in the given range
// can be lifted.
function liftTarget(range) {
var parent = range.parent
var content = parent.content.cutByIndex(range.startIndex, range.endIndex)
for (var depth = range.depth;; --depth) {
var node = range.$from.node(depth), index = range.$from.index(depth), endIndex = range.$to.indexAfter(depth)
if (depth < range.depth && node.canReplace(index, endIndex, content))
{ return depth }
if (depth == 0 || !canCut(node, index, endIndex)) { break }
}
}
exports.liftTarget = liftTarget
// :: (NodeRange, number) β Transform
// Split the content in the given range off from its parent, if there
// is sibling content before or after it, and move it up the tree to
// the depth specified by `target`. You'll probably want to use
// `liftTarget` to compute `target`, in order to be sure the lift is
// valid.
Transform.prototype.lift = function(range, target) {
var $from = range.$from;
var $to = range.$to;
var depth = range.depth;
var gapStart = $from.before(depth + 1), gapEnd = $to.after(depth + 1)
var start = gapStart, end = gapEnd
var before = Fragment.empty, openLeft = 0
for (var d = depth, splitting = false; d > target; d--)
{ if (splitting || $from.index(d) > 0) {
splitting = true
before = Fragment.from($from.node(d).copy(before))
openLeft++
} else {
start--
} }
var after = Fragment.empty, openRight = 0
for (var d$1 = depth, splitting$1 = false; d$1 > target; d$1--)
{ if (splitting$1 || $to.after(d$1 + 1) < $to.end(d$1)) {
splitting$1 = true
after = Fragment.from($to.node(d$1).copy(after))
openRight++
} else {
end++
} }
return this.step(new ReplaceAroundStep(start, end, gapStart, gapEnd,
new Slice(before.append(after), openLeft, openRight),
before.size - openLeft, true))
}
// :: (NodeRange, NodeType, ?Object) β ?[{type: NodeType, attrs: ?Object}]
// Try to find a valid way to wrap the content in the given range in a
// node of the given type. May introduce extra nodes around and inside
// the wrapper node, if necessary. Returns null if no valid wrapping
// could be found.
function findWrapping(range, nodeType, attrs, innerRange) {
if ( innerRange === void 0 ) innerRange = range;
var wrap = {type: nodeType, attrs: attrs}
var around = findWrappingOutside(range, wrap)
var inner = around && findWrappingInside(innerRange, wrap)
if (!inner) { return null }
return around.concat(wrap).concat(inner)
}
exports.findWrapping = findWrapping
function findWrappingOutside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var around = parent.contentMatchAt(startIndex).findWrapping(wrap.type, wrap.attrs)
if (!around) { return null }
var outer = around.length ? around[0] : wrap
if (!parent.canReplaceWith(startIndex, endIndex, outer.type, outer.attrs))
{ return null }
return around
}
function findWrappingInside(range, wrap) {
var parent = range.parent;
var startIndex = range.startIndex;
var endIndex = range.endIndex;
var inner = parent.child(startIndex)
var inside = wrap.type.contentExpr.start(wrap.attrs).findWrappingFor(inner)
if (!inside) { return null }
var last = inside.length ? inside[inside.length - 1] : wrap
var innerMatch = last.type.contentExpr.start(last.attrs)
for (var i = startIndex; i < endIndex; i++)
{ innerMatch = innerMatch && innerMatch.matchNode(parent.child(i)) }
if (!innerMatch || !innerMatch.validEnd()) { return null }
return inside
}
// :: (NodeRange, [{type: NodeType, attrs: ?Object}]) β Transform
// Wrap the given [range](#model.NodeRange) in the given set of wrappers.
// The wrappers are assumed to be valid in this position, and should
// probably be computed with `findWrapping`.
Transform.prototype.wrap = function(range, wrappers) {
var content = Fragment.empty
for (var i = wrappers.length - 1; i >= 0; i--)
{ content = Fragment.from(wrappers[i].type.create(wrappers[i].attrs, content)) }
var start = range.start, end = range.end
return this.step(new ReplaceAroundStep(start, end, start, end, new Slice(content, 0, 0), wrappers.length, true))
}
// :: (number, ?number, NodeType, ?Object) β Transform
// Set the type of all textblocks (partly) between `from` and `to` to
// the given node type with the given attributes.
Transform.prototype.setBlockType = function(from, to, type, attrs) {
var this$1 = this;
if ( to === void 0 ) to = from;
if (!type.isTextblock) { throw new RangeError("Type given to setBlockType should be a textblock") }
var mapFrom = this.steps.length
this.doc.nodesBetween(from, to, function (node, pos) {
if (node.isTextblock && !node.hasMarkup(type, attrs)) {
// Ensure all markup that isn't allowed in the new node type is cleared
this$1.clearNonMatching(this$1.mapping.slice(mapFrom).map(pos, 1), type.contentExpr.start(attrs))
var mapping = this$1.mapping.slice(mapFrom)
var startM = mapping.map(pos, 1), endM = mapping.map(pos + node.nodeSize, 1)
this$1.step(new ReplaceAroundStep(startM, endM, startM + 1, endM - 1,
new Slice(Fragment.from(type.create(attrs)), 0, 0), 1, true))
return false
}
})
return this
}
// :: (number, ?NodeType, ?Object, ?[Mark]) β Transform
// Change the type and attributes of the node after `pos`.
Transform.prototype.setNodeType = function(pos, type, attrs, marks) {
var node = this.doc.nodeAt(pos)
if (!node) { throw new RangeError("No node at given position") }
if (!type) { type = node.type }
var newNode = type.create(attrs, null, marks || node.marks)
if (node.isLeaf)
{ return this.replaceWith(pos, pos + node.nodeSize, newNode) }
if (!type.validContent(node.content, attrs))
{ throw new RangeError("Invalid content for node type " + type.name) }
return this.step(new ReplaceAroundStep(pos, pos + node.nodeSize, pos + 1, pos + node.nodeSize - 1,
new Slice(Fragment.from(newNode), 0, 0), 1, true))
}
// :: (Node, number, ?[?{type: NodeType, attrs: ?Object}]) β bool
// Check whether splitting at the given position is allowed.
function canSplit(doc, pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = doc.resolve(pos), base = $pos.depth - depth
if (base < 0 ||
!$pos.parent.canReplace($pos.index(), $pos.parent.childCount) ||
!$pos.parent.canReplace(0, $pos.indexAfter()))
{ return false }
for (var d = $pos.depth - 1, i = depth - 1; d > base; d--, i--) {
var node = $pos.node(d), index$1 = $pos.index(d)
var typeAfter = typesAfter && typesAfter[i]
if (!node.canReplace(0, index$1) ||
!node.canReplaceWith(index$1, node.childCount, typeAfter ? typeAfter.type : $pos.node(d + 1).type,
typeAfter ? typeAfter.attrs : $pos.node(d + 1).attrs))
{ return false }
}
var index = $pos.indexAfter(base)
var baseType = typesAfter && typesAfter[0]
return $pos.node(base).canReplaceWith(index, index, baseType ? baseType.type : $pos.node(base + 1).type,
baseType ? baseType.attrs : $pos.node(base + 1).attrs)
}
exports.canSplit = canSplit
// :: (number, ?number, ?[?{type: NodeType, attrs: ?Object}]) β Transform
// Split the node at the given position, and optionally, if `depth` is
// greater than one, any number of nodes above that. By default, the
// parts split off will inherit the node type of the original node.
// This can be changed by passing an array of types and attributes to
// use after the split.
Transform.prototype.split = function(pos, depth, typesAfter) {
if ( depth === void 0 ) depth = 1;
var $pos = this.doc.resolve(pos), before = Fragment.empty, after = Fragment.empty
for (var d = $pos.depth, e = $pos.depth - depth, i = depth - 1; d > e; d--, i--) {
before = Fragment.from($pos.node(d).copy(before))
var typeAfter = typesAfter && typesAfter[i]
after = Fragment.from(typeAfter ? typeAfter.type.create(typeAfter.attrs, after) : $pos.node(d).copy(after))
}
return this.step(new ReplaceStep(pos, pos, new Slice(before.append(after), depth, depth, true)))
}
// :: (Node, number) β bool
// Test whether the blocks before and after a given position can be
// joined.
function canJoin(doc, pos) {
var $pos = doc.resolve(pos), index = $pos.index()
return joinable($pos.nodeBefore, $pos.nodeAfter) &&
$pos.parent.canReplace(index, index + 1)
}
exports.canJoin = canJoin
function joinable(a, b) {
return a && b && !a.isLeaf && a.canAppend(b)
}
// :: (Node, number, ?number) β ?number
// Find an ancestor of the given position that can be joined to the
// block before (or after if `dir` is positive). Returns the joinable
// point, if any.
function joinPoint(doc, pos, dir) {
if ( dir === void 0 ) dir = -1;
var $pos = doc.resolve(pos)
for (var d = $pos.depth;; d--) {
var before = (void 0), after = (void 0)
if (d == $pos.depth) {
before = $pos.nodeBefore
after = $pos.nodeAfter
} else if (dir > 0) {
before = $pos.node(d + 1)
after = $pos.node(d).maybeChild($pos.index(d) + 1)
} else {
before = $pos.node(d).maybeChild($pos.index(d) - 1)
after = $pos.node(d + 1)
}
if (before && !before.isTextblock && joinable(before, after)) { return pos }
if (d == 0) { break }
pos = | $pos.before(d) : $pos.after(d)
}
}
exports.joinPoint = joinPoint
// :: (number, ?number, ?bool) β Transform
// Join the blocks around the given position. If depth is 2, their
// last and first siblings are also joined, and so on.
Transform.prototype.join = function(pos, depth) {
if ( depth === void 0 ) depth = 1;
var step = new ReplaceStep(pos - depth, pos + depth, Slice.empty, true)
return this.step(step)
}
// :: (Node, number, NodeType, ?Object) β ?number
// Try to find a point where a node of the given type can be inserted
// near `pos`, by searching up the node hierarchy when `pos` itself
// isn't a valid place but is at the start or end of a node. Return
// null if no position was found.
function insertPoint(doc, pos, nodeType, attrs) {
var $pos = doc.resolve(pos)
if ($pos.parent.canReplaceWith($pos.index(), $pos.index(), nodeType, attrs)) { return pos }
if ($pos.parentOffset == 0)
{ for (var d = $pos.depth - 1; d >= 0; d--) {
var index = $pos.index(d)
if ($pos.node(d).canReplaceWith(index, index, nodeType, attrs)) { return $pos.before(d + 1) }
if (index > 0) { return null }
} }
if ($pos.parentOffset == $pos.parent.content.size)
{ for (var d$1 = $pos.depth - 1; d$1 >= 0; d$1--) {
var index$1 = $pos.indexAfter(d$1)
if ($pos.node(d$1).canReplaceWith(index$1, index$1, nodeType, attrs)) { return $pos.after(d$1 + 1) }
if (index$1 < $pos.node(d$1).childCount) { return null }
} }
}
exports.insertPoint = insertPoint
| dir < 0 ? | conditional_block |
autoref-intermediate-types-issue-3585.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Foo {
fn foo(&self) -> ~str;
}
impl<T:Foo> Foo for @T {
fn foo(&self) -> ~str {
fmt!("@%s", (**self).foo())
}
}
impl Foo for uint {
fn foo(&self) -> ~str {
fmt!("%u", *self)
}
}
pub fn main() | {
let x = @3u;
assert_eq!(x.foo(), ~"@3");
} | identifier_body |
|
autoref-intermediate-types-issue-3585.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Foo {
fn foo(&self) -> ~str;
}
impl<T:Foo> Foo for @T {
fn | (&self) -> ~str {
fmt!("@%s", (**self).foo())
}
}
impl Foo for uint {
fn foo(&self) -> ~str {
fmt!("%u", *self)
}
}
pub fn main() {
let x = @3u;
assert_eq!(x.foo(), ~"@3");
}
| foo | identifier_name |
autoref-intermediate-types-issue-3585.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
trait Foo {
fn foo(&self) -> ~str;
}
impl<T:Foo> Foo for @T {
fn foo(&self) -> ~str {
fmt!("@%s", (**self).foo())
}
}
impl Foo for uint {
fn foo(&self) -> ~str {
fmt!("%u", *self)
} | }
pub fn main() {
let x = @3u;
assert_eq!(x.foo(), ~"@3");
} | random_line_split |
|
L0FailsIfThereIsMkdirError.ts | import fs = require('fs');
import mockanswer = require('azure-pipelines-task-lib/mock-answer');
import mockrun = require('azure-pipelines-task-lib/mock-run');
import path = require('path');
let taskPath = path.join(__dirname, '..', 'copyfiles.js');
let runner: mockrun.TaskMockRunner = new mockrun.TaskMockRunner(taskPath);
runner.setInput('Contents', '**');
runner.setInput('SourceFolder', path.normalize('/srcDir'));
runner.setInput('TargetFolder', path.normalize('/destDir'));
runner.setInput('CleanTargetFolder', 'false');
runner.setInput('ignoreMakeDirErrors', 'false');
runner.setInput('Overwrite', 'false'); | let answers = <mockanswer.TaskLibAnswers> {
checkPath: { },
find: { },
};
answers.checkPath[path.normalize('/srcDir')] = true;
answers.find[path.normalize('/srcDir')] = [
path.normalize('/srcDir'),
path.normalize('/srcDir/someOtherDir'),
path.normalize('/srcDir/someOtherDir/file1.file'),
path.normalize('/srcDir/someOtherDir/file2.file'),
];
runner.setAnswers(answers);
runner.registerMockExport('stats', (itemPath: string) => {
console.log('##vso[task.debug]stats ' + itemPath);
switch (itemPath) {
case path.normalize('/srcDir/someOtherDir'):
return { isDirectory: () => true };
case path.normalize('/srcDir/someOtherDir/file1.file'):
case path.normalize('/srcDir/someOtherDir/file2.file'):
return { isDirectory: () => false };
default:
throw { code: 'ENOENT' };
}
});
runner.registerMockExport('mkdirP', (p: string) => {
console.log(`mkdirP: ${p}`);
throw "Error during creation of target folder."
});
// as a precaution, disable fs.chmodSync. it should not be called during this scenario.
fs.chmodSync = null;
runner.registerMock('fs', fs);
runner.run(); | random_line_split |
|
GameRenderer.ts | import {Field, GameSetting} from './../logic/logic'
import {CellRenderer} from './cellrenderer'
class GameRenderer {
private cellRenderer : CellRenderer;
private ctx : CanvasRenderingContext2D;
/**
* Creates an new instace of the GameRenderer class
* @param canvas The canvas to render the gamme
* @param setting The settings for the current game
*/
constructor(private canvas: HTMLCanvasElement, private setting: GameSetting) {
this.ctx = <CanvasRenderingContext2D>this.canvas.getContext('2d');
this.cellRenderer = new CellRenderer(this.ctx, setting);
}
/**
* Renders the game
* @param field The field of the game
*/
public render(field: Field): void |
/**
* Renders the an specific cell on a field
* @param field The field to render the cell
* @param x The number of the cell on the X axis
* @param y The number of the cell on the Y axis
*/
public renderAt(field: Field, x: number, y: number) {
var cell = field.getCellAt(x, y);
if (cell)
this.cellRenderer.renderCell(x, y, cell);
}
}
export {GameRenderer} | {
this.canvas.height = field.getYCellCount() * this.setting.cellHeight;
this.canvas.width = field.getXCellCount() * this.setting.cellWidth;
this.ctx.fillStyle = "white";
this.ctx.fillRect(0, 0, field.getXCellCount() * this.setting.cellWidth, field.getYCellCount() * this.setting.cellHeight);
field.forEachField((x, y, cell) => this.cellRenderer.renderCell(x, y, cell));
} | identifier_body |
GameRenderer.ts | import {Field, GameSetting} from './../logic/logic'
import {CellRenderer} from './cellrenderer'
class GameRenderer {
private cellRenderer : CellRenderer;
private ctx : CanvasRenderingContext2D;
/**
* Creates an new instace of the GameRenderer class
* @param canvas The canvas to render the gamme
* @param setting The settings for the current game
*/
constructor(private canvas: HTMLCanvasElement, private setting: GameSetting) {
this.ctx = <CanvasRenderingContext2D>this.canvas.getContext('2d');
this.cellRenderer = new CellRenderer(this.ctx, setting);
}
/**
* Renders the game
* @param field The field of the game
*/
public render(field: Field): void { |
field.forEachField((x, y, cell) => this.cellRenderer.renderCell(x, y, cell));
}
/**
* Renders the an specific cell on a field
* @param field The field to render the cell
* @param x The number of the cell on the X axis
* @param y The number of the cell on the Y axis
*/
public renderAt(field: Field, x: number, y: number) {
var cell = field.getCellAt(x, y);
if (cell)
this.cellRenderer.renderCell(x, y, cell);
}
}
export {GameRenderer} | this.canvas.height = field.getYCellCount() * this.setting.cellHeight;
this.canvas.width = field.getXCellCount() * this.setting.cellWidth;
this.ctx.fillStyle = "white";
this.ctx.fillRect(0, 0, field.getXCellCount() * this.setting.cellWidth, field.getYCellCount() * this.setting.cellHeight); | random_line_split |
GameRenderer.ts | import {Field, GameSetting} from './../logic/logic'
import {CellRenderer} from './cellrenderer'
class GameRenderer {
private cellRenderer : CellRenderer;
private ctx : CanvasRenderingContext2D;
/**
* Creates an new instace of the GameRenderer class
* @param canvas The canvas to render the gamme
* @param setting The settings for the current game
*/
constructor(private canvas: HTMLCanvasElement, private setting: GameSetting) {
this.ctx = <CanvasRenderingContext2D>this.canvas.getContext('2d');
this.cellRenderer = new CellRenderer(this.ctx, setting);
}
/**
* Renders the game
* @param field The field of the game
*/
public render(field: Field): void {
this.canvas.height = field.getYCellCount() * this.setting.cellHeight;
this.canvas.width = field.getXCellCount() * this.setting.cellWidth;
this.ctx.fillStyle = "white";
this.ctx.fillRect(0, 0, field.getXCellCount() * this.setting.cellWidth, field.getYCellCount() * this.setting.cellHeight);
field.forEachField((x, y, cell) => this.cellRenderer.renderCell(x, y, cell));
}
/**
* Renders the an specific cell on a field
* @param field The field to render the cell
* @param x The number of the cell on the X axis
* @param y The number of the cell on the Y axis
*/
public | (field: Field, x: number, y: number) {
var cell = field.getCellAt(x, y);
if (cell)
this.cellRenderer.renderCell(x, y, cell);
}
}
export {GameRenderer} | renderAt | identifier_name |
final.test.ts | import { Machine, interpret, assign, AnyEventObject } from '../src';
const finalMachine = Machine({
id: 'final',
initial: 'green',
states: {
green: {
on: {
TIMER: 'yellow'
}
},
yellow: { on: { TIMER: 'red' } },
red: {
type: 'parallel',
states: {
crosswalk1: {
initial: 'walk',
states: {
walk: {
on: { PED_WAIT: 'wait' }
},
wait: {
on: { PED_STOP: 'stop' }
},
stop: {
type: 'final',
data: { signal: 'stop' }
}
},
onDone: {
cond: (_, e) => e.data.signal === 'stop',
actions: 'stopCrosswalk1'
}
},
crosswalk2: {
initial: 'walk',
states: {
walk: {
on: { PED_WAIT: 'wait' }
},
wait: {
on: { PED_STOP: 'stop' }
},
stop: {
on: { PED_STOP: 'stop2' }
},
stop2: {
type: 'final'
}
},
onDone: {
actions: 'stopCrosswalk2'
}
}
},
onDone: {
target: 'green',
actions: 'prepareGreenLight'
}
}
},
onDone: {
// this action should never occur because final states are not direct children of machine
actions: 'shouldNeverOccur'
}
});
describe('final states', () => {
it('should emit the "done.state.final.red" event when all nested states are in their final states', () => {
const redState = finalMachine.transition('yellow', 'TIMER');
expect(redState.value).toEqual({
red: {
crosswalk1: 'walk',
crosswalk2: 'walk'
}
});
const waitState = finalMachine.transition(redState, 'PED_WAIT');
expect(waitState.value).toEqual({
red: {
crosswalk1: 'wait',
crosswalk2: 'wait'
}
});
const stopState = finalMachine.transition(waitState, 'PED_STOP');
expect(stopState.value).toEqual({
red: {
crosswalk1: 'stop',
crosswalk2: 'stop'
}
});
expect(stopState.actions).toEqual([
{ type: 'stopCrosswalk1', exec: undefined }
]);
const stopState2 = finalMachine.transition(stopState, 'PED_STOP');
expect(stopState2.actions).toEqual([
{ type: 'stopCrosswalk2', exec: undefined },
{ type: 'prepareGreenLight', exec: undefined }
]);
const greenState = finalMachine.transition(stopState, 'TIMER');
expect(greenState.actions).toHaveLength(0);
});
it('should execute final child state actions first', () => {
const nestedFinalMachine = Machine({
id: 'nestedFinal',
initial: 'foo',
states: {
foo: {
initial: 'bar',
onDone: { actions: 'fooAction' },
states: {
bar: {
initial: 'baz',
onDone: 'barFinal',
states: {
baz: {
type: 'final',
onEntry: 'bazAction'
}
}
},
barFinal: {
type: 'final',
onDone: { actions: 'barAction' }
}
}
}
}
});
const { initialState } = nestedFinalMachine;
expect(initialState.actions.map((a) => a.type)).toEqual([
'bazAction',
'barAction',
'fooAction'
]);
});
it('should call data expressions on nested final nodes', (done) => {
interface Ctx {
revealedSecret?: string;
}
const machine = Machine<Ctx>({
initial: 'secret',
context: {
revealedSecret: undefined
},
states: {
secret: {
initial: 'wait',
states: {
wait: {
on: {
REQUEST_SECRET: 'reveal'
}
},
reveal: {
type: 'final',
data: {
secret: () => 'the secret'
}
}
},
onDone: {
target: 'success',
actions: assign<Ctx, AnyEventObject>({
revealedSecret: (_, event) => {
return event.data.secret;
}
})
}
},
success: {
type: 'final'
}
}
});
let _context: any;
const service = interpret(machine)
.onTransition((state) => (_context = state.context))
.onDone(() => {
expect(_context).toEqual({ revealedSecret: 'the secret' });
done();
})
.start();
service.send('REQUEST_SECRET');
});
it("should only call data expression once when entering root's final state", () => {
const spy = jest.fn();
const machine = Machine({
initial: 'start',
states: { | on: {
FINISH: 'end'
}
},
end: {
type: 'final',
data: spy
}
}
});
const service = interpret(machine).start();
service.send({ type: 'FINISH', value: 1 });
expect(spy).toBeCalledTimes(1);
});
}); | start: { | random_line_split |
system_data.py | """
.. module:: system_data
:platform: linux
:synopsis: The module containing the system data.
.. moduleauthor:: Paul Fanelli <[email protected]>
.. modulecreated:: 6/26/15
"""
import bunch
import sys
from yaml.parser import ParserError
from zope.interface import implements
from planet_alignment.data.interface import ISystemData
class SystemData(bunch.Bunch):
"""This class houses the system data as a bunch object.
The system data consists of a name, theta, radius and period for each planet.
- **parameters** and **types**::
:param data: The system data as a bunch object.
:type data: Bunch object.
"""
implements(ISystemData)
def | (self, data):
try:
super(SystemData, self).__init__(data)
except ParserError as pe:
print("ERROR: Error parsing data!")
sys.exit("ERROR: {}".format(pe))
except Exception as e:
print("ERROR: Unknown exception '{}'".format(e))
sys.exit("ERROR: {}".format(e))
def __iter__(self):
return iter(self.system)
def __len__(self):
return len(self.system)
| __init__ | identifier_name |
system_data.py | """
.. module:: system_data
:platform: linux
:synopsis: The module containing the system data.
.. moduleauthor:: Paul Fanelli <[email protected]>
.. modulecreated:: 6/26/15
"""
import bunch
import sys
from yaml.parser import ParserError
from zope.interface import implements
from planet_alignment.data.interface import ISystemData
class SystemData(bunch.Bunch):
"""This class houses the system data as a bunch object.
The system data consists of a name, theta, radius and period for each planet.
- **parameters** and **types**::
:param data: The system data as a bunch object.
:type data: Bunch object.
"""
implements(ISystemData)
def __init__(self, data):
|
def __iter__(self):
return iter(self.system)
def __len__(self):
return len(self.system)
| try:
super(SystemData, self).__init__(data)
except ParserError as pe:
print("ERROR: Error parsing data!")
sys.exit("ERROR: {}".format(pe))
except Exception as e:
print("ERROR: Unknown exception '{}'".format(e))
sys.exit("ERROR: {}".format(e)) | identifier_body |
system_data.py | """
.. module:: system_data
:platform: linux
:synopsis: The module containing the system data.
.. moduleauthor:: Paul Fanelli <[email protected]>
.. modulecreated:: 6/26/15
"""
import bunch
import sys
from yaml.parser import ParserError
from zope.interface import implements
from planet_alignment.data.interface import ISystemData
class SystemData(bunch.Bunch):
"""This class houses the system data as a bunch object. |
- **parameters** and **types**::
:param data: The system data as a bunch object.
:type data: Bunch object.
"""
implements(ISystemData)
def __init__(self, data):
try:
super(SystemData, self).__init__(data)
except ParserError as pe:
print("ERROR: Error parsing data!")
sys.exit("ERROR: {}".format(pe))
except Exception as e:
print("ERROR: Unknown exception '{}'".format(e))
sys.exit("ERROR: {}".format(e))
def __iter__(self):
return iter(self.system)
def __len__(self):
return len(self.system) |
The system data consists of a name, theta, radius and period for each planet. | random_line_split |
fitsdiff.py | """fitsdiff is now a part of Astropy.
Now this module just provides a wrapper around astropy.io.fits.diff for backwards
compatibility with the old interface in case anyone uses it.
"""
import os
import sys
from astropy.io.fits.diff import FITSDiff
from astropy.io.fits.scripts.fitsdiff import log, main
def fitsdiff(input1, input2, comment_excl_list='', value_excl_list='',
field_excl_list='', maxdiff=10, delta=0.0, neglect_blanks=True,
output=None):
|
def list_parse(name_list):
"""Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line.
"""
if name_list and name_list[0] == '@':
value = name_list[1:]
if not os.path.exists(value):
log.warning('The file %s does not exist' % value)
return
try:
return [v.strip() for v in open(value, 'r').readlines()]
except IOError as e:
log.warning('reading %s failed: %s; ignoring this file' %
(value, e))
else:
return [v.strip() for v in name_list.split(',')]
if __name__ == "__main__":
sys.exit(main())
| if isinstance(comment_excl_list, str):
comment_excl_list = list_parse(comment_excl_list)
if isinstance(value_excl_list, str):
value_excl_list = list_parse(value_excl_list)
if isinstance(field_excl_list, str):
field_excl_list = list_parse(field_excl_list)
diff = FITSDiff(input1, input2, ignore_keywords=value_excl_list,
ignore_comments=comment_excl_list,
ignore_fields=field_excl_list, numdiffs=maxdiff,
tolerance=delta, ignore_blanks=neglect_blanks)
if output is None:
output = sys.stdout
diff.report(output)
return diff.identical | identifier_body |
fitsdiff.py | """fitsdiff is now a part of Astropy.
Now this module just provides a wrapper around astropy.io.fits.diff for backwards
compatibility with the old interface in case anyone uses it.
"""
import os
import sys
from astropy.io.fits.diff import FITSDiff
from astropy.io.fits.scripts.fitsdiff import log, main
def fitsdiff(input1, input2, comment_excl_list='', value_excl_list='',
field_excl_list='', maxdiff=10, delta=0.0, neglect_blanks=True,
output=None):
if isinstance(comment_excl_list, str):
comment_excl_list = list_parse(comment_excl_list)
if isinstance(value_excl_list, str):
value_excl_list = list_parse(value_excl_list)
if isinstance(field_excl_list, str):
field_excl_list = list_parse(field_excl_list)
diff = FITSDiff(input1, input2, ignore_keywords=value_excl_list,
ignore_comments=comment_excl_list,
ignore_fields=field_excl_list, numdiffs=maxdiff,
tolerance=delta, ignore_blanks=neglect_blanks)
if output is None:
output = sys.stdout
diff.report(output)
return diff.identical
def | (name_list):
"""Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line.
"""
if name_list and name_list[0] == '@':
value = name_list[1:]
if not os.path.exists(value):
log.warning('The file %s does not exist' % value)
return
try:
return [v.strip() for v in open(value, 'r').readlines()]
except IOError as e:
log.warning('reading %s failed: %s; ignoring this file' %
(value, e))
else:
return [v.strip() for v in name_list.split(',')]
if __name__ == "__main__":
sys.exit(main())
| list_parse | identifier_name |
fitsdiff.py | """fitsdiff is now a part of Astropy.
Now this module just provides a wrapper around astropy.io.fits.diff for backwards
compatibility with the old interface in case anyone uses it.
"""
import os
import sys
from astropy.io.fits.diff import FITSDiff
from astropy.io.fits.scripts.fitsdiff import log, main
def fitsdiff(input1, input2, comment_excl_list='', value_excl_list='',
field_excl_list='', maxdiff=10, delta=0.0, neglect_blanks=True,
output=None):
if isinstance(comment_excl_list, str):
comment_excl_list = list_parse(comment_excl_list)
if isinstance(value_excl_list, str):
value_excl_list = list_parse(value_excl_list)
if isinstance(field_excl_list, str):
field_excl_list = list_parse(field_excl_list)
diff = FITSDiff(input1, input2, ignore_keywords=value_excl_list,
ignore_comments=comment_excl_list,
ignore_fields=field_excl_list, numdiffs=maxdiff,
tolerance=delta, ignore_blanks=neglect_blanks)
if output is None: |
return diff.identical
def list_parse(name_list):
"""Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line.
"""
if name_list and name_list[0] == '@':
value = name_list[1:]
if not os.path.exists(value):
log.warning('The file %s does not exist' % value)
return
try:
return [v.strip() for v in open(value, 'r').readlines()]
except IOError as e:
log.warning('reading %s failed: %s; ignoring this file' %
(value, e))
else:
return [v.strip() for v in name_list.split(',')]
if __name__ == "__main__":
sys.exit(main()) | output = sys.stdout
diff.report(output) | random_line_split |
fitsdiff.py | """fitsdiff is now a part of Astropy.
Now this module just provides a wrapper around astropy.io.fits.diff for backwards
compatibility with the old interface in case anyone uses it.
"""
import os
import sys
from astropy.io.fits.diff import FITSDiff
from astropy.io.fits.scripts.fitsdiff import log, main
def fitsdiff(input1, input2, comment_excl_list='', value_excl_list='',
field_excl_list='', maxdiff=10, delta=0.0, neglect_blanks=True,
output=None):
if isinstance(comment_excl_list, str):
|
if isinstance(value_excl_list, str):
value_excl_list = list_parse(value_excl_list)
if isinstance(field_excl_list, str):
field_excl_list = list_parse(field_excl_list)
diff = FITSDiff(input1, input2, ignore_keywords=value_excl_list,
ignore_comments=comment_excl_list,
ignore_fields=field_excl_list, numdiffs=maxdiff,
tolerance=delta, ignore_blanks=neglect_blanks)
if output is None:
output = sys.stdout
diff.report(output)
return diff.identical
def list_parse(name_list):
"""Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line.
"""
if name_list and name_list[0] == '@':
value = name_list[1:]
if not os.path.exists(value):
log.warning('The file %s does not exist' % value)
return
try:
return [v.strip() for v in open(value, 'r').readlines()]
except IOError as e:
log.warning('reading %s failed: %s; ignoring this file' %
(value, e))
else:
return [v.strip() for v in name_list.split(',')]
if __name__ == "__main__":
sys.exit(main())
| comment_excl_list = list_parse(comment_excl_list) | conditional_block |
_util.py | import matplotlib.transforms
import numpy
def has_legend(axes):
return axes.get_legend() is not None
def get_legend_text(obj):
"""Check if line is in legend."""
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [h.get_label() for h in leg.legendHandles if h is not None]
values = [t.get_text() for t in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None
def transform_to_data_coordinates(obj, xdata, ydata):
"""The coordinates might not be in data coordinates, but could be sometimes in axes
coordinates. For example, the matplotlib command
axes.axvline(2)
will have the y coordinates set to 0 and 1, not to the limits. Therefore, a
two-stage transform has to be applied:
1. first transforming to display coordinates, then
2. from display to data.
"""
if obj.axes is not None and obj.get_transform() != obj.axes.transData:
|
return xdata, ydata
| points = numpy.array([xdata, ydata]).T
transform = matplotlib.transforms.composite_transform_factory(
obj.get_transform(), obj.axes.transData.inverted()
)
return transform.transform(points).T | conditional_block |
_util.py | import matplotlib.transforms
import numpy
def has_legend(axes):
return axes.get_legend() is not None
def | (obj):
"""Check if line is in legend."""
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [h.get_label() for h in leg.legendHandles if h is not None]
values = [t.get_text() for t in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None
def transform_to_data_coordinates(obj, xdata, ydata):
"""The coordinates might not be in data coordinates, but could be sometimes in axes
coordinates. For example, the matplotlib command
axes.axvline(2)
will have the y coordinates set to 0 and 1, not to the limits. Therefore, a
two-stage transform has to be applied:
1. first transforming to display coordinates, then
2. from display to data.
"""
if obj.axes is not None and obj.get_transform() != obj.axes.transData:
points = numpy.array([xdata, ydata]).T
transform = matplotlib.transforms.composite_transform_factory(
obj.get_transform(), obj.axes.transData.inverted()
)
return transform.transform(points).T
return xdata, ydata
| get_legend_text | identifier_name |
_util.py | import matplotlib.transforms
import numpy
def has_legend(axes):
return axes.get_legend() is not None
def get_legend_text(obj):
"""Check if line is in legend."""
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [h.get_label() for h in leg.legendHandles if h is not None]
values = [t.get_text() for t in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None
def transform_to_data_coordinates(obj, xdata, ydata):
"""The coordinates might not be in data coordinates, but could be sometimes in axes
coordinates. For example, the matplotlib command
axes.axvline(2)
will have the y coordinates set to 0 and 1, not to the limits. Therefore, a
two-stage transform has to be applied:
1. first transforming to display coordinates, then
2. from display to data.
"""
if obj.axes is not None and obj.get_transform() != obj.axes.transData:
points = numpy.array([xdata, ydata]).T
transform = matplotlib.transforms.composite_transform_factory(
obj.get_transform(), obj.axes.transData.inverted()
)
return transform.transform(points).T | return xdata, ydata | random_line_split |
|
_util.py | import matplotlib.transforms
import numpy
def has_legend(axes):
return axes.get_legend() is not None
def get_legend_text(obj):
"""Check if line is in legend."""
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [h.get_label() for h in leg.legendHandles if h is not None]
values = [t.get_text() for t in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None
def transform_to_data_coordinates(obj, xdata, ydata):
| """The coordinates might not be in data coordinates, but could be sometimes in axes
coordinates. For example, the matplotlib command
axes.axvline(2)
will have the y coordinates set to 0 and 1, not to the limits. Therefore, a
two-stage transform has to be applied:
1. first transforming to display coordinates, then
2. from display to data.
"""
if obj.axes is not None and obj.get_transform() != obj.axes.transData:
points = numpy.array([xdata, ydata]).T
transform = matplotlib.transforms.composite_transform_factory(
obj.get_transform(), obj.axes.transData.inverted()
)
return transform.transform(points).T
return xdata, ydata | identifier_body |
|
mod.rs | // Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! Provides constants and convenience methods that define the format of ciphertexts and signatures.
use crate::TinkError;
use tink_proto::OutputPrefixType;
/// Prefix size of Tink and Legacy key types.
pub const NON_RAW_PREFIX_SIZE: usize = 5;
/// Prefix size of legacy key types.
/// The prefix starts with \x00 and followed by a 4-byte key id.
pub const LEGACY_PREFIX_SIZE: usize = NON_RAW_PREFIX_SIZE;
/// First byte of the prefix of legacy key types.
pub const LEGACY_START_BYTE: u8 = 0;
/// Prefix size of Tink key types.
/// The prefix starts with \x01 and followed by a 4-byte key id.
pub const TINK_PREFIX_SIZE: usize = NON_RAW_PREFIX_SIZE;
/// First byte of the prefix of Tink key types.
pub const TINK_START_BYTE: u8 = 1;
/// Prefix size of Raw key types.
/// Raw prefix is empty.
pub const RAW_PREFIX_SIZE: usize = 0;
/// Empty prefix for Raw key types.
pub const RAW_PREFIX: Vec<u8> = Vec::new();
/// Generate the prefix of ciphertexts produced by the crypto primitive obtained from key. The
/// prefix can be either empty (for RAW-type prefix), or consists of a 1-byte indicator of the type
/// of the prefix, followed by 4 bytes of the key ID in big endian encoding.
pub fn output_prefix(key: &tink_proto::keyset::Key) -> Result<Vec<u8>, TinkError> |
/// Build a vector of requested size with key ID prefix pre-filled.
fn create_output_prefix(size: usize, start_byte: u8, key_id: crate::KeyId) -> Vec<u8> {
let mut prefix = Vec::with_capacity(size);
prefix.push(start_byte);
prefix.extend_from_slice(&key_id.to_be_bytes());
prefix
}
| {
match OutputPrefixType::from_i32(key.output_prefix_type) {
Some(OutputPrefixType::Legacy) | Some(OutputPrefixType::Crunchy) => Ok(
create_output_prefix(LEGACY_PREFIX_SIZE, LEGACY_START_BYTE, key.key_id),
),
Some(OutputPrefixType::Tink) => Ok(create_output_prefix(
TINK_PREFIX_SIZE,
TINK_START_BYTE,
key.key_id,
)),
Some(OutputPrefixType::Raw) => Ok(RAW_PREFIX),
Some(OutputPrefixType::UnknownPrefix) | None => {
Err("cryptofmt: unknown output prefix type".into())
}
}
} | identifier_body |
mod.rs | // Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! Provides constants and convenience methods that define the format of ciphertexts and signatures.
use crate::TinkError;
use tink_proto::OutputPrefixType;
/// Prefix size of Tink and Legacy key types.
pub const NON_RAW_PREFIX_SIZE: usize = 5;
/// Prefix size of legacy key types.
/// The prefix starts with \x00 and followed by a 4-byte key id.
pub const LEGACY_PREFIX_SIZE: usize = NON_RAW_PREFIX_SIZE;
/// First byte of the prefix of legacy key types.
pub const LEGACY_START_BYTE: u8 = 0;
/// Prefix size of Tink key types.
/// The prefix starts with \x01 and followed by a 4-byte key id.
pub const TINK_PREFIX_SIZE: usize = NON_RAW_PREFIX_SIZE;
/// First byte of the prefix of Tink key types.
pub const TINK_START_BYTE: u8 = 1;
/// Prefix size of Raw key types.
/// Raw prefix is empty.
pub const RAW_PREFIX_SIZE: usize = 0;
/// Empty prefix for Raw key types.
pub const RAW_PREFIX: Vec<u8> = Vec::new();
/// Generate the prefix of ciphertexts produced by the crypto primitive obtained from key. The
/// prefix can be either empty (for RAW-type prefix), or consists of a 1-byte indicator of the type
/// of the prefix, followed by 4 bytes of the key ID in big endian encoding.
pub fn output_prefix(key: &tink_proto::keyset::Key) -> Result<Vec<u8>, TinkError> {
match OutputPrefixType::from_i32(key.output_prefix_type) {
Some(OutputPrefixType::Legacy) | Some(OutputPrefixType::Crunchy) => Ok(
create_output_prefix(LEGACY_PREFIX_SIZE, LEGACY_START_BYTE, key.key_id),
),
Some(OutputPrefixType::Tink) => Ok(create_output_prefix(
TINK_PREFIX_SIZE,
TINK_START_BYTE,
key.key_id,
)),
Some(OutputPrefixType::Raw) => Ok(RAW_PREFIX),
Some(OutputPrefixType::UnknownPrefix) | None => {
Err("cryptofmt: unknown output prefix type".into())
}
}
}
/// Build a vector of requested size with key ID prefix pre-filled.
fn | (size: usize, start_byte: u8, key_id: crate::KeyId) -> Vec<u8> {
let mut prefix = Vec::with_capacity(size);
prefix.push(start_byte);
prefix.extend_from_slice(&key_id.to_be_bytes());
prefix
}
| create_output_prefix | identifier_name |
mod.rs | // Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! Provides constants and convenience methods that define the format of ciphertexts and signatures.
use crate::TinkError;
use tink_proto::OutputPrefixType;
/// Prefix size of Tink and Legacy key types.
pub const NON_RAW_PREFIX_SIZE: usize = 5;
/// Prefix size of legacy key types.
/// The prefix starts with \x00 and followed by a 4-byte key id.
pub const LEGACY_PREFIX_SIZE: usize = NON_RAW_PREFIX_SIZE; | pub const LEGACY_START_BYTE: u8 = 0;
/// Prefix size of Tink key types.
/// The prefix starts with \x01 and followed by a 4-byte key id.
pub const TINK_PREFIX_SIZE: usize = NON_RAW_PREFIX_SIZE;
/// First byte of the prefix of Tink key types.
pub const TINK_START_BYTE: u8 = 1;
/// Prefix size of Raw key types.
/// Raw prefix is empty.
pub const RAW_PREFIX_SIZE: usize = 0;
/// Empty prefix for Raw key types.
pub const RAW_PREFIX: Vec<u8> = Vec::new();
/// Generate the prefix of ciphertexts produced by the crypto primitive obtained from key. The
/// prefix can be either empty (for RAW-type prefix), or consists of a 1-byte indicator of the type
/// of the prefix, followed by 4 bytes of the key ID in big endian encoding.
pub fn output_prefix(key: &tink_proto::keyset::Key) -> Result<Vec<u8>, TinkError> {
match OutputPrefixType::from_i32(key.output_prefix_type) {
Some(OutputPrefixType::Legacy) | Some(OutputPrefixType::Crunchy) => Ok(
create_output_prefix(LEGACY_PREFIX_SIZE, LEGACY_START_BYTE, key.key_id),
),
Some(OutputPrefixType::Tink) => Ok(create_output_prefix(
TINK_PREFIX_SIZE,
TINK_START_BYTE,
key.key_id,
)),
Some(OutputPrefixType::Raw) => Ok(RAW_PREFIX),
Some(OutputPrefixType::UnknownPrefix) | None => {
Err("cryptofmt: unknown output prefix type".into())
}
}
}
/// Build a vector of requested size with key ID prefix pre-filled.
fn create_output_prefix(size: usize, start_byte: u8, key_id: crate::KeyId) -> Vec<u8> {
let mut prefix = Vec::with_capacity(size);
prefix.push(start_byte);
prefix.extend_from_slice(&key_id.to_be_bytes());
prefix
} | /// First byte of the prefix of legacy key types. | random_line_split |
frequency_status.rs | use rosrust::Duration;
use rosrust_diagnostics::{FrequencyStatus, Level, Status, Task};
mod util;
#[test]
fn | () {
let _roscore = util::run_roscore_for(util::Feature::FrequencyStatusTest);
rosrust::init("frequency_status_test");
let fs = FrequencyStatus::builder()
.window_size(2)
.min_frequency(10.0)
.max_frequency(20.0)
.tolerance(0.5)
.build();
fs.tick();
rosrust::sleep(Duration::from_nanos(20_000_000));
let mut status0 = Status::default();
fs.run(&mut status0);
rosrust::sleep(Duration::from_nanos(50_000_000));
fs.tick();
let mut status1 = Status::default();
fs.run(&mut status1);
rosrust::sleep(Duration::from_nanos(300_000_000));
fs.tick();
let mut status2 = Status::default();
fs.run(&mut status2);
rosrust::sleep(Duration::from_nanos(150_000_000));
fs.tick();
let mut status3 = Status::default();
fs.run(&mut status3);
fs.clear();
let mut status4 = Status::default();
fs.run(&mut status4);
assert_eq!(
status0.level,
Level::Warn,
"Max frequency exceeded but not reported"
);
assert_eq!(
status1.level,
Level::Ok,
"Within max frequency but reported error"
);
assert_eq!(
status2.level,
Level::Ok,
"Within min frequency but reported error"
);
assert_eq!(
status3.level,
Level::Warn,
"Min frequency exceeded but not reported"
);
assert_eq!(status4.level, Level::Error, "Freshly cleared should fail");
assert_eq!(
status0.name, "",
"Name should not be set by FrequencyStatus"
);
assert_eq!(
fs.name(),
"Frequency Status",
"Name should be \"Frequency Status\""
);
}
| frequency_status_test | identifier_name |
frequency_status.rs | use rosrust::Duration;
use rosrust_diagnostics::{FrequencyStatus, Level, Status, Task};
mod util;
#[test]
fn frequency_status_test() {
let _roscore = util::run_roscore_for(util::Feature::FrequencyStatusTest);
rosrust::init("frequency_status_test");
let fs = FrequencyStatus::builder()
.window_size(2)
.min_frequency(10.0)
.max_frequency(20.0)
.tolerance(0.5)
.build();
fs.tick();
rosrust::sleep(Duration::from_nanos(20_000_000));
let mut status0 = Status::default();
fs.run(&mut status0);
rosrust::sleep(Duration::from_nanos(50_000_000));
fs.tick();
let mut status1 = Status::default();
fs.run(&mut status1);
rosrust::sleep(Duration::from_nanos(300_000_000));
fs.tick();
let mut status2 = Status::default();
fs.run(&mut status2);
rosrust::sleep(Duration::from_nanos(150_000_000));
fs.tick();
let mut status3 = Status::default();
fs.run(&mut status3);
fs.clear();
let mut status4 = Status::default();
fs.run(&mut status4);
assert_eq!(
status0.level,
Level::Warn,
"Max frequency exceeded but not reported"
);
assert_eq!(
status1.level,
Level::Ok,
"Within max frequency but reported error"
);
assert_eq!(
status2.level,
Level::Ok,
"Within min frequency but reported error"
);
assert_eq!(
status3.level,
Level::Warn,
"Min frequency exceeded but not reported"
); | );
assert_eq!(
fs.name(),
"Frequency Status",
"Name should be \"Frequency Status\""
);
} | assert_eq!(status4.level, Level::Error, "Freshly cleared should fail");
assert_eq!(
status0.name, "",
"Name should not be set by FrequencyStatus" | random_line_split |
frequency_status.rs | use rosrust::Duration;
use rosrust_diagnostics::{FrequencyStatus, Level, Status, Task};
mod util;
#[test]
fn frequency_status_test() | {
let _roscore = util::run_roscore_for(util::Feature::FrequencyStatusTest);
rosrust::init("frequency_status_test");
let fs = FrequencyStatus::builder()
.window_size(2)
.min_frequency(10.0)
.max_frequency(20.0)
.tolerance(0.5)
.build();
fs.tick();
rosrust::sleep(Duration::from_nanos(20_000_000));
let mut status0 = Status::default();
fs.run(&mut status0);
rosrust::sleep(Duration::from_nanos(50_000_000));
fs.tick();
let mut status1 = Status::default();
fs.run(&mut status1);
rosrust::sleep(Duration::from_nanos(300_000_000));
fs.tick();
let mut status2 = Status::default();
fs.run(&mut status2);
rosrust::sleep(Duration::from_nanos(150_000_000));
fs.tick();
let mut status3 = Status::default();
fs.run(&mut status3);
fs.clear();
let mut status4 = Status::default();
fs.run(&mut status4);
assert_eq!(
status0.level,
Level::Warn,
"Max frequency exceeded but not reported"
);
assert_eq!(
status1.level,
Level::Ok,
"Within max frequency but reported error"
);
assert_eq!(
status2.level,
Level::Ok,
"Within min frequency but reported error"
);
assert_eq!(
status3.level,
Level::Warn,
"Min frequency exceeded but not reported"
);
assert_eq!(status4.level, Level::Error, "Freshly cleared should fail");
assert_eq!(
status0.name, "",
"Name should not be set by FrequencyStatus"
);
assert_eq!(
fs.name(),
"Frequency Status",
"Name should be \"Frequency Status\""
);
} | identifier_body |
|
webviewEditor.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as DOM from 'vs/base/browser/dom';
import { CancellationToken } from 'vs/base/common/cancellation';
import { Emitter, Event } from 'vs/base/common/event';
import { IDisposable } from 'vs/base/common/lifecycle';
import { URI } from 'vs/base/common/uri';
import { IContextKeyService } from 'vs/platform/contextkey/common/contextkey';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { IWorkspaceContextService } from 'vs/platform/workspace/common/workspace';
import { EditorOptions } from 'vs/workbench/common/editor';
import { WebviewEditorInput } from 'vs/workbench/parts/webview/electron-browser/webviewEditorInput';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { IEditorGroup } from 'vs/workbench/services/group/common/editorGroupsService';
import { IPartService, Parts } from 'vs/workbench/services/part/common/partService';
import { BaseWebviewEditor, KEYBINDING_CONTEXT_WEBVIEW_FIND_WIDGET_VISIBLE } from './baseWebviewEditor';
import { WebviewElement } from './webviewElement';
import { IWindowService } from 'vs/platform/windows/common/windows';
import { IStorageService } from 'vs/platform/storage/common/storage';
export class WebviewEditor extends BaseWebviewEditor {
public static readonly ID = 'WebviewEditor';
private _editorFrame: HTMLElement;
private _content: HTMLElement;
private _webviewContent: HTMLElement | undefined;
private _webviewFocusTracker?: DOM.IFocusTracker;
private _webviewFocusListenerDisposable?: IDisposable;
private _onFocusWindowHandler?: IDisposable;
private readonly _onDidFocusWebview = new Emitter<void>();
constructor(
@ITelemetryService telemetryService: ITelemetryService,
@IThemeService themeService: IThemeService,
@IContextKeyService private _contextKeyService: IContextKeyService,
@IPartService private readonly _partService: IPartService,
@IWorkspaceContextService private readonly _contextService: IWorkspaceContextService,
@IInstantiationService private readonly _instantiationService: IInstantiationService,
@IEditorService private readonly _editorService: IEditorService,
@IWindowService private readonly _windowService: IWindowService,
@IStorageService storageService: IStorageService
) {
super(WebviewEditor.ID, telemetryService, themeService, _contextKeyService, storageService);
}
protected createEditor(parent: HTMLElement): void {
this._editorFrame = parent;
this._content = document.createElement('div');
parent.appendChild(this._content);
}
private doUpdateContainer() {
const webviewContainer = this.input && (this.input as WebviewEditorInput).container;
if (webviewContainer && webviewContainer.parentElement) {
const frameRect = this._editorFrame.getBoundingClientRect();
const containerRect = webviewContainer.parentElement.getBoundingClientRect();
webviewContainer.style.position = 'absolute';
webviewContainer.style.top = `${frameRect.top - containerRect.top}px`;
webviewContainer.style.left = `${frameRect.left - containerRect.left}px`;
webviewContainer.style.width = `${frameRect.width}px`;
webviewContainer.style.height = `${frameRect.height}px`;
}
}
public layout(dimension: DOM.Dimension): void { | if (this._webview) {
this.doUpdateContainer();
}
super.layout(dimension);
}
public focus() {
super.focus();
if (this._onFocusWindowHandler) {
return;
}
// Make sure we restore focus when switching back to a VS Code window
this._onFocusWindowHandler = this._windowService.onDidChangeFocus(focused => {
if (focused && this._editorService.activeControl === this) {
this.focus();
}
});
}
public dispose(): void {
// Let the editor input dispose of the webview.
this._webview = undefined;
this._webviewContent = undefined;
if (this._content && this._content.parentElement) {
this._content.parentElement.removeChild(this._content);
this._content = undefined;
}
this._onDidFocusWebview.dispose();
if (this._webviewFocusTracker) {
this._webviewFocusTracker.dispose();
this._webviewFocusTracker = undefined;
}
if (this._webviewFocusListenerDisposable) {
this._webviewFocusListenerDisposable.dispose();
this._webviewFocusListenerDisposable = undefined;
}
if (this._onFocusWindowHandler) {
this._onFocusWindowHandler.dispose();
}
super.dispose();
}
public sendMessage(data: any): void {
if (this._webview) {
this._webview.sendMessage(data);
}
}
public get onDidFocus(): Event<any> {
return this._onDidFocusWebview.event;
}
protected setEditorVisible(visible: boolean, group: IEditorGroup): void {
if (this.input && this.input instanceof WebviewEditorInput) {
if (visible) {
this.input.claimWebview(this);
} else {
this.input.releaseWebview(this);
}
this.updateWebview(this.input as WebviewEditorInput);
}
if (this._webviewContent) {
if (visible) {
this._webviewContent.style.visibility = 'visible';
this.doUpdateContainer();
} else {
this._webviewContent.style.visibility = 'hidden';
}
}
super.setEditorVisible(visible, group);
}
public clearInput() {
if (this.input && this.input instanceof WebviewEditorInput) {
this.input.releaseWebview(this);
}
this._webview = undefined;
this._webviewContent = undefined;
super.clearInput();
}
setInput(input: WebviewEditorInput, options: EditorOptions, token: CancellationToken): Thenable<void> {
if (this.input) {
(this.input as WebviewEditorInput).releaseWebview(this);
this._webview = undefined;
this._webviewContent = undefined;
}
return super.setInput(input, options, token)
.then(() => input.resolve())
.then(() => {
if (token.isCancellationRequested) {
return;
}
input.updateGroup(this.group.id);
this.updateWebview(input);
});
}
private updateWebview(input: WebviewEditorInput) {
const webview = this.getWebview(input);
input.claimWebview(this);
webview.update(input.html, {
allowScripts: input.options.enableScripts,
allowSvgs: true,
enableWrappedPostMessage: true,
useSameOriginForRoot: false,
localResourceRoots: input.options.localResourceRoots || this.getDefaultLocalResourceRoots()
}, input.options.retainContextWhenHidden);
if (this._webviewContent) {
this._webviewContent.style.visibility = 'visible';
}
this.doUpdateContainer();
}
private getDefaultLocalResourceRoots(): URI[] {
const rootPaths = this._contextService.getWorkspace().folders.map(x => x.uri);
if ((this.input as WebviewEditorInput).extensionLocation) {
rootPaths.push((this.input as WebviewEditorInput).extensionLocation);
}
return rootPaths;
}
private getWebview(input: WebviewEditorInput): WebviewElement {
if (this._webview) {
return this._webview;
}
this._webviewContent = input.container;
this.trackFocus();
const existing = input.webview;
if (existing) {
this._webview = existing;
return existing;
}
if (input.options.enableFindWidget) {
this._contextKeyService = this._register(this._contextKeyService.createScoped(this._webviewContent));
this.findWidgetVisible = KEYBINDING_CONTEXT_WEBVIEW_FIND_WIDGET_VISIBLE.bindTo(this._contextKeyService);
}
this._webview = this._instantiationService.createInstance(WebviewElement,
this._partService.getContainer(Parts.EDITOR_PART),
{
enableWrappedPostMessage: true,
useSameOriginForRoot: false
});
this._webview.mountTo(this._webviewContent);
input.webview = this._webview;
if (input.options.tryRestoreScrollPosition) {
this._webview.initialScrollProgress = input.scrollYPercentage;
}
this._webview.state = input.webviewState;
this._content.setAttribute('aria-flowto', this._webviewContent.id);
this.doUpdateContainer();
return this._webview;
}
private trackFocus() {
if (this._webviewFocusTracker) {
this._webviewFocusTracker.dispose();
}
if (this._webviewFocusListenerDisposable) {
this._webviewFocusListenerDisposable.dispose();
}
this._webviewFocusTracker = DOM.trackFocus(this._webviewContent);
this._webviewFocusListenerDisposable = this._webviewFocusTracker.onDidFocus(() => {
this._onDidFocusWebview.fire();
});
}
} | random_line_split |
|
webviewEditor.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as DOM from 'vs/base/browser/dom';
import { CancellationToken } from 'vs/base/common/cancellation';
import { Emitter, Event } from 'vs/base/common/event';
import { IDisposable } from 'vs/base/common/lifecycle';
import { URI } from 'vs/base/common/uri';
import { IContextKeyService } from 'vs/platform/contextkey/common/contextkey';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { IWorkspaceContextService } from 'vs/platform/workspace/common/workspace';
import { EditorOptions } from 'vs/workbench/common/editor';
import { WebviewEditorInput } from 'vs/workbench/parts/webview/electron-browser/webviewEditorInput';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { IEditorGroup } from 'vs/workbench/services/group/common/editorGroupsService';
import { IPartService, Parts } from 'vs/workbench/services/part/common/partService';
import { BaseWebviewEditor, KEYBINDING_CONTEXT_WEBVIEW_FIND_WIDGET_VISIBLE } from './baseWebviewEditor';
import { WebviewElement } from './webviewElement';
import { IWindowService } from 'vs/platform/windows/common/windows';
import { IStorageService } from 'vs/platform/storage/common/storage';
export class WebviewEditor extends BaseWebviewEditor {
public static readonly ID = 'WebviewEditor';
private _editorFrame: HTMLElement;
private _content: HTMLElement;
private _webviewContent: HTMLElement | undefined;
private _webviewFocusTracker?: DOM.IFocusTracker;
private _webviewFocusListenerDisposable?: IDisposable;
private _onFocusWindowHandler?: IDisposable;
private readonly _onDidFocusWebview = new Emitter<void>();
constructor(
@ITelemetryService telemetryService: ITelemetryService,
@IThemeService themeService: IThemeService,
@IContextKeyService private _contextKeyService: IContextKeyService,
@IPartService private readonly _partService: IPartService,
@IWorkspaceContextService private readonly _contextService: IWorkspaceContextService,
@IInstantiationService private readonly _instantiationService: IInstantiationService,
@IEditorService private readonly _editorService: IEditorService,
@IWindowService private readonly _windowService: IWindowService,
@IStorageService storageService: IStorageService
) {
super(WebviewEditor.ID, telemetryService, themeService, _contextKeyService, storageService);
}
protected createEditor(parent: HTMLElement): void {
this._editorFrame = parent;
this._content = document.createElement('div');
parent.appendChild(this._content);
}
private doUpdateContainer() {
const webviewContainer = this.input && (this.input as WebviewEditorInput).container;
if (webviewContainer && webviewContainer.parentElement) {
const frameRect = this._editorFrame.getBoundingClientRect();
const containerRect = webviewContainer.parentElement.getBoundingClientRect();
webviewContainer.style.position = 'absolute';
webviewContainer.style.top = `${frameRect.top - containerRect.top}px`;
webviewContainer.style.left = `${frameRect.left - containerRect.left}px`;
webviewContainer.style.width = `${frameRect.width}px`;
webviewContainer.style.height = `${frameRect.height}px`;
}
}
public layout(dimension: DOM.Dimension): void {
if (this._webview) {
this.doUpdateContainer();
}
super.layout(dimension);
}
public focus() {
super.focus();
if (this._onFocusWindowHandler) {
return;
}
// Make sure we restore focus when switching back to a VS Code window
this._onFocusWindowHandler = this._windowService.onDidChangeFocus(focused => {
if (focused && this._editorService.activeControl === this) {
this.focus();
}
});
}
public dispose(): void {
// Let the editor input dispose of the webview.
this._webview = undefined;
this._webviewContent = undefined;
if (this._content && this._content.parentElement) {
this._content.parentElement.removeChild(this._content);
this._content = undefined;
}
this._onDidFocusWebview.dispose();
if (this._webviewFocusTracker) {
this._webviewFocusTracker.dispose();
this._webviewFocusTracker = undefined;
}
if (this._webviewFocusListenerDisposable) {
this._webviewFocusListenerDisposable.dispose();
this._webviewFocusListenerDisposable = undefined;
}
if (this._onFocusWindowHandler) {
this._onFocusWindowHandler.dispose();
}
super.dispose();
}
public sendMessage(data: any): void {
if (this._webview) {
this._webview.sendMessage(data);
}
}
public get onDidFocus(): Event<any> {
return this._onDidFocusWebview.event;
}
protected setEditorVisible(visible: boolean, group: IEditorGroup): void {
if (this.input && this.input instanceof WebviewEditorInput) {
if (visible) {
this.input.claimWebview(this);
} else {
this.input.releaseWebview(this);
}
this.updateWebview(this.input as WebviewEditorInput);
}
if (this._webviewContent) {
if (visible) {
this._webviewContent.style.visibility = 'visible';
this.doUpdateContainer();
} else {
this._webviewContent.style.visibility = 'hidden';
}
}
super.setEditorVisible(visible, group);
}
public clearInput() {
if (this.input && this.input instanceof WebviewEditorInput) {
this.input.releaseWebview(this);
}
this._webview = undefined;
this._webviewContent = undefined;
super.clearInput();
}
setInput(input: WebviewEditorInput, options: EditorOptions, token: CancellationToken): Thenable<void> {
if (this.input) {
(this.input as WebviewEditorInput).releaseWebview(this);
this._webview = undefined;
this._webviewContent = undefined;
}
return super.setInput(input, options, token)
.then(() => input.resolve())
.then(() => {
if (token.isCancellationRequested) {
return;
}
input.updateGroup(this.group.id);
this.updateWebview(input);
});
}
private updateWebview(input: WebviewEditorInput) {
const webview = this.getWebview(input);
input.claimWebview(this);
webview.update(input.html, {
allowScripts: input.options.enableScripts,
allowSvgs: true,
enableWrappedPostMessage: true,
useSameOriginForRoot: false,
localResourceRoots: input.options.localResourceRoots || this.getDefaultLocalResourceRoots()
}, input.options.retainContextWhenHidden);
if (this._webviewContent) {
this._webviewContent.style.visibility = 'visible';
}
this.doUpdateContainer();
}
private getDefaultLocalResourceRoots(): URI[] {
const rootPaths = this._contextService.getWorkspace().folders.map(x => x.uri);
if ((this.input as WebviewEditorInput).extensionLocation) {
rootPaths.push((this.input as WebviewEditorInput).extensionLocation);
}
return rootPaths;
}
private getWebview(input: WebviewEditorInput): WebviewElement {
if (this._webview) {
return this._webview;
}
this._webviewContent = input.container;
this.trackFocus();
const existing = input.webview;
if (existing) {
this._webview = existing;
return existing;
}
if (input.options.enableFindWidget) {
this._contextKeyService = this._register(this._contextKeyService.createScoped(this._webviewContent));
this.findWidgetVisible = KEYBINDING_CONTEXT_WEBVIEW_FIND_WIDGET_VISIBLE.bindTo(this._contextKeyService);
}
this._webview = this._instantiationService.createInstance(WebviewElement,
this._partService.getContainer(Parts.EDITOR_PART),
{
enableWrappedPostMessage: true,
useSameOriginForRoot: false
});
this._webview.mountTo(this._webviewContent);
input.webview = this._webview;
if (input.options.tryRestoreScrollPosition) {
this._webview.initialScrollProgress = input.scrollYPercentage;
}
this._webview.state = input.webviewState;
this._content.setAttribute('aria-flowto', this._webviewContent.id);
this.doUpdateContainer();
return this._webview;
}
private | () {
if (this._webviewFocusTracker) {
this._webviewFocusTracker.dispose();
}
if (this._webviewFocusListenerDisposable) {
this._webviewFocusListenerDisposable.dispose();
}
this._webviewFocusTracker = DOM.trackFocus(this._webviewContent);
this._webviewFocusListenerDisposable = this._webviewFocusTracker.onDidFocus(() => {
this._onDidFocusWebview.fire();
});
}
}
| trackFocus | identifier_name |
webviewEditor.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as DOM from 'vs/base/browser/dom';
import { CancellationToken } from 'vs/base/common/cancellation';
import { Emitter, Event } from 'vs/base/common/event';
import { IDisposable } from 'vs/base/common/lifecycle';
import { URI } from 'vs/base/common/uri';
import { IContextKeyService } from 'vs/platform/contextkey/common/contextkey';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { IWorkspaceContextService } from 'vs/platform/workspace/common/workspace';
import { EditorOptions } from 'vs/workbench/common/editor';
import { WebviewEditorInput } from 'vs/workbench/parts/webview/electron-browser/webviewEditorInput';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { IEditorGroup } from 'vs/workbench/services/group/common/editorGroupsService';
import { IPartService, Parts } from 'vs/workbench/services/part/common/partService';
import { BaseWebviewEditor, KEYBINDING_CONTEXT_WEBVIEW_FIND_WIDGET_VISIBLE } from './baseWebviewEditor';
import { WebviewElement } from './webviewElement';
import { IWindowService } from 'vs/platform/windows/common/windows';
import { IStorageService } from 'vs/platform/storage/common/storage';
export class WebviewEditor extends BaseWebviewEditor {
public static readonly ID = 'WebviewEditor';
private _editorFrame: HTMLElement;
private _content: HTMLElement;
private _webviewContent: HTMLElement | undefined;
private _webviewFocusTracker?: DOM.IFocusTracker;
private _webviewFocusListenerDisposable?: IDisposable;
private _onFocusWindowHandler?: IDisposable;
private readonly _onDidFocusWebview = new Emitter<void>();
constructor(
@ITelemetryService telemetryService: ITelemetryService,
@IThemeService themeService: IThemeService,
@IContextKeyService private _contextKeyService: IContextKeyService,
@IPartService private readonly _partService: IPartService,
@IWorkspaceContextService private readonly _contextService: IWorkspaceContextService,
@IInstantiationService private readonly _instantiationService: IInstantiationService,
@IEditorService private readonly _editorService: IEditorService,
@IWindowService private readonly _windowService: IWindowService,
@IStorageService storageService: IStorageService
) {
super(WebviewEditor.ID, telemetryService, themeService, _contextKeyService, storageService);
}
protected createEditor(parent: HTMLElement): void {
this._editorFrame = parent;
this._content = document.createElement('div');
parent.appendChild(this._content);
}
private doUpdateContainer() {
const webviewContainer = this.input && (this.input as WebviewEditorInput).container;
if (webviewContainer && webviewContainer.parentElement) {
const frameRect = this._editorFrame.getBoundingClientRect();
const containerRect = webviewContainer.parentElement.getBoundingClientRect();
webviewContainer.style.position = 'absolute';
webviewContainer.style.top = `${frameRect.top - containerRect.top}px`;
webviewContainer.style.left = `${frameRect.left - containerRect.left}px`;
webviewContainer.style.width = `${frameRect.width}px`;
webviewContainer.style.height = `${frameRect.height}px`;
}
}
public layout(dimension: DOM.Dimension): void {
if (this._webview) {
this.doUpdateContainer();
}
super.layout(dimension);
}
public focus() {
super.focus();
if (this._onFocusWindowHandler) {
return;
}
// Make sure we restore focus when switching back to a VS Code window
this._onFocusWindowHandler = this._windowService.onDidChangeFocus(focused => {
if (focused && this._editorService.activeControl === this) {
this.focus();
}
});
}
public dispose(): void {
// Let the editor input dispose of the webview.
this._webview = undefined;
this._webviewContent = undefined;
if (this._content && this._content.parentElement) {
this._content.parentElement.removeChild(this._content);
this._content = undefined;
}
this._onDidFocusWebview.dispose();
if (this._webviewFocusTracker) |
if (this._webviewFocusListenerDisposable) {
this._webviewFocusListenerDisposable.dispose();
this._webviewFocusListenerDisposable = undefined;
}
if (this._onFocusWindowHandler) {
this._onFocusWindowHandler.dispose();
}
super.dispose();
}
public sendMessage(data: any): void {
if (this._webview) {
this._webview.sendMessage(data);
}
}
public get onDidFocus(): Event<any> {
return this._onDidFocusWebview.event;
}
protected setEditorVisible(visible: boolean, group: IEditorGroup): void {
if (this.input && this.input instanceof WebviewEditorInput) {
if (visible) {
this.input.claimWebview(this);
} else {
this.input.releaseWebview(this);
}
this.updateWebview(this.input as WebviewEditorInput);
}
if (this._webviewContent) {
if (visible) {
this._webviewContent.style.visibility = 'visible';
this.doUpdateContainer();
} else {
this._webviewContent.style.visibility = 'hidden';
}
}
super.setEditorVisible(visible, group);
}
public clearInput() {
if (this.input && this.input instanceof WebviewEditorInput) {
this.input.releaseWebview(this);
}
this._webview = undefined;
this._webviewContent = undefined;
super.clearInput();
}
setInput(input: WebviewEditorInput, options: EditorOptions, token: CancellationToken): Thenable<void> {
if (this.input) {
(this.input as WebviewEditorInput).releaseWebview(this);
this._webview = undefined;
this._webviewContent = undefined;
}
return super.setInput(input, options, token)
.then(() => input.resolve())
.then(() => {
if (token.isCancellationRequested) {
return;
}
input.updateGroup(this.group.id);
this.updateWebview(input);
});
}
private updateWebview(input: WebviewEditorInput) {
const webview = this.getWebview(input);
input.claimWebview(this);
webview.update(input.html, {
allowScripts: input.options.enableScripts,
allowSvgs: true,
enableWrappedPostMessage: true,
useSameOriginForRoot: false,
localResourceRoots: input.options.localResourceRoots || this.getDefaultLocalResourceRoots()
}, input.options.retainContextWhenHidden);
if (this._webviewContent) {
this._webviewContent.style.visibility = 'visible';
}
this.doUpdateContainer();
}
private getDefaultLocalResourceRoots(): URI[] {
const rootPaths = this._contextService.getWorkspace().folders.map(x => x.uri);
if ((this.input as WebviewEditorInput).extensionLocation) {
rootPaths.push((this.input as WebviewEditorInput).extensionLocation);
}
return rootPaths;
}
private getWebview(input: WebviewEditorInput): WebviewElement {
if (this._webview) {
return this._webview;
}
this._webviewContent = input.container;
this.trackFocus();
const existing = input.webview;
if (existing) {
this._webview = existing;
return existing;
}
if (input.options.enableFindWidget) {
this._contextKeyService = this._register(this._contextKeyService.createScoped(this._webviewContent));
this.findWidgetVisible = KEYBINDING_CONTEXT_WEBVIEW_FIND_WIDGET_VISIBLE.bindTo(this._contextKeyService);
}
this._webview = this._instantiationService.createInstance(WebviewElement,
this._partService.getContainer(Parts.EDITOR_PART),
{
enableWrappedPostMessage: true,
useSameOriginForRoot: false
});
this._webview.mountTo(this._webviewContent);
input.webview = this._webview;
if (input.options.tryRestoreScrollPosition) {
this._webview.initialScrollProgress = input.scrollYPercentage;
}
this._webview.state = input.webviewState;
this._content.setAttribute('aria-flowto', this._webviewContent.id);
this.doUpdateContainer();
return this._webview;
}
private trackFocus() {
if (this._webviewFocusTracker) {
this._webviewFocusTracker.dispose();
}
if (this._webviewFocusListenerDisposable) {
this._webviewFocusListenerDisposable.dispose();
}
this._webviewFocusTracker = DOM.trackFocus(this._webviewContent);
this._webviewFocusListenerDisposable = this._webviewFocusTracker.onDidFocus(() => {
this._onDidFocusWebview.fire();
});
}
}
| {
this._webviewFocusTracker.dispose();
this._webviewFocusTracker = undefined;
} | conditional_block |
mod.rs | extern crate android_glue;
use libc;
use std::ffi::{CString};
use std::sync::mpsc::{Receiver, channel};
use {CreationError, Event, MouseCursor};
use CreationError::OsError;
use events::ElementState::{Pressed, Released};
use events::Event::{MouseInput, MouseMoved};
use events::MouseButton;
use std::collections::VecDeque;
use Api;
use BuilderAttribs;
use GlRequest;
use native_monitor::NativeMonitorId;
pub struct Window {
display: ffi::egl::types::EGLDisplay,
context: ffi::egl::types::EGLContext,
surface: ffi::egl::types::EGLSurface,
event_rx: Receiver<android_glue::Event>,
}
pub struct MonitorID;
mod ffi;
pub fn get_available_monitors() -> VecDeque <MonitorID> {
let mut rb = VecDeque::new();
rb.push_back(MonitorID);
rb
}
pub fn get_primary_monitor() -> MonitorID {
MonitorID
}
impl MonitorID {
pub fn get_name(&self) -> Option<String> {
Some("Primary".to_string())
}
pub fn get_native_identifier(&self) -> NativeMonitorId {
NativeMonitorId::Unavailable
}
pub fn get_dimensions(&self) -> (u32, u32) {
unimplemented!()
}
}
#[cfg(feature = "headless")]
pub struct HeadlessContext(i32);
#[cfg(feature = "headless")]
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(_builder: BuilderAttribs) -> Result<HeadlessContext, CreationError> {
unimplemented!()
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn is_current(&self) -> bool {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, _addr: &str) -> *const () {
unimplemented!()
}
pub fn get_api(&self) -> ::Api {
::Api::OpenGlEs
}
}
#[cfg(feature = "headless")]
unsafe impl Send for HeadlessContext {}
#[cfg(feature = "headless")]
unsafe impl Sync for HeadlessContext {}
pub struct PollEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for PollEventsIterator<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
match self.window.event_rx.try_recv() {
Ok(event) => {
match event {
android_glue::Event::EventDown => Some(MouseInput(Pressed, MouseButton::Left)),
android_glue::Event::EventUp => Some(MouseInput(Released, MouseButton::Left)),
android_glue::Event::EventMove(x, y) => Some(MouseMoved((x as i32, y as i32))),
_ => None,
}
}
Err(_) => {
None
}
}
}
}
pub struct WaitEventsIterator<'a> {
window: &'a Window,
}
impl<'a> Iterator for WaitEventsIterator<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
use std::time::Duration;
use std::old_io::timer;
loop {
// calling poll_events()
if let Some(ev) = self.window.poll_events().next() {
return Some(ev);
}
// TODO: Implement a proper way of sleeping on the event queue
timer::sleep(Duration::milliseconds(16));
}
}
}
impl Window {
pub fn new(builder: BuilderAttribs) -> Result<Window, CreationError> {
use std::{mem, ptr};
if builder.sharing.is_some() {
unimplemented!()
}
let native_window = unsafe { android_glue::get_native_window() };
if native_window.is_null() {
return Err(OsError(format!("Android's native window is null")));
}
let display = unsafe {
let display = ffi::egl::GetDisplay(mem::transmute(ffi::egl::DEFAULT_DISPLAY));
if display.is_null() {
return Err(OsError("No EGL display connection available".to_string()));
}
display
};
android_glue::write_log("eglGetDisplay succeeded");
let (_major, _minor) = unsafe {
let mut major: ffi::egl::types::EGLint = mem::uninitialized();
let mut minor: ffi::egl::types::EGLint = mem::uninitialized();
if ffi::egl::Initialize(display, &mut major, &mut minor) == 0 {
return Err(OsError(format!("eglInitialize failed")))
}
(major, minor)
};
android_glue::write_log("eglInitialize succeeded");
let use_gles2 = match builder.gl_version {
GlRequest::Specific(Api::OpenGlEs, (2, _)) => true,
GlRequest::Specific(Api::OpenGlEs, _) => false,
GlRequest::Specific(_, _) => panic!("Only OpenGL ES is supported"), // FIXME: return a result
GlRequest::GlThenGles { opengles_version: (2, _), .. } => true,
_ => false,
};
let mut attribute_list = vec!();
if use_gles2 {
attribute_list.push_all(&[
ffi::egl::RENDERABLE_TYPE as i32,
ffi::egl::OPENGL_ES2_BIT as i32,
]);
}
{
let (red, green, blue) = match builder.color_bits.unwrap_or(24) {
24 => (8, 8, 8),
16 => (6, 5, 6),
_ => panic!("Bad color_bits"),
};
attribute_list.push_all(&[ffi::egl::RED_SIZE as i32, red]);
attribute_list.push_all(&[ffi::egl::GREEN_SIZE as i32, green]);
attribute_list.push_all(&[ffi::egl::BLUE_SIZE as i32, blue]);
}
attribute_list.push_all(&[
ffi::egl::DEPTH_SIZE as i32,
builder.depth_bits.unwrap_or(8) as i32,
]);
attribute_list.push(ffi::egl::NONE as i32);
let config = unsafe {
let mut num_config: ffi::egl::types::EGLint = mem::uninitialized();
let mut config: ffi::egl::types::EGLConfig = mem::uninitialized();
if ffi::egl::ChooseConfig(display, attribute_list.as_ptr(), &mut config, 1,
&mut num_config) == 0
{
return Err(OsError(format!("eglChooseConfig failed")))
}
if num_config <= 0 {
return Err(OsError(format!("eglChooseConfig returned no available config")))
}
config
};
android_glue::write_log("eglChooseConfig succeeded");
let context = unsafe {
let mut context_attributes = vec!();
if use_gles2 {
context_attributes.push_all(&[ffi::egl::CONTEXT_CLIENT_VERSION as i32, 2]);
}
context_attributes.push(ffi::egl::NONE as i32);
let context = ffi::egl::CreateContext(display, config, ptr::null(),
context_attributes.as_ptr());
if context.is_null() {
return Err(OsError(format!("eglCreateContext failed")))
}
context
};
android_glue::write_log("eglCreateContext succeeded");
let surface = unsafe {
let surface = ffi::egl::CreateWindowSurface(display, config, native_window, ptr::null());
if surface.is_null() {
return Err(OsError(format!("eglCreateWindowSurface failed")))
}
surface
};
android_glue::write_log("eglCreateWindowSurface succeeded");
let (tx, rx) = channel();
android_glue::add_sender(tx);
Ok(Window {
display: display,
context: context,
surface: surface,
event_rx: rx,
})
}
pub fn is_closed(&self) -> bool {
false
}
pub fn set_title(&self, _: &str) {
}
pub fn show(&self) {
}
pub fn hide(&self) {
}
pub fn get_position(&self) -> Option<(i32, i32)> {
None
}
pub fn set_position(&self, _x: i32, _y: i32) {
}
pub fn get_inner_size(&self) -> Option<(u32, u32)> {
let native_window = unsafe { android_glue::get_native_window() };
if native_window.is_null() {
None
} else {
Some((
unsafe { ffi::ANativeWindow_getWidth(native_window) } as u32,
unsafe { ffi::ANativeWindow_getHeight(native_window) } as u32
))
}
}
pub fn get_outer_size(&self) -> Option<(u32, u32)> {
self.get_inner_size()
}
pub fn set_inner_size(&self, _x: u32, _y: u32) {
}
pub fn create_window_proxy(&self) -> WindowProxy {
WindowProxy
}
pub fn poll_events(&self) -> PollEventsIterator {
PollEventsIterator {
window: self
}
}
pub fn wait_events(&self) -> WaitEventsIterator {
WaitEventsIterator {
window: self
}
}
pub fn make_current(&self) {
unsafe {
ffi::egl::MakeCurrent(self.display, self.surface, self.surface, self.context);
}
}
pub fn | (&self) -> bool {
unsafe { ffi::egl::GetCurrentContext() == self.context }
}
pub fn get_proc_address(&self, addr: &str) -> *const () {
let addr = CString::from_slice(addr.as_bytes());
let addr = addr.as_ptr();
unsafe {
ffi::egl::GetProcAddress(addr) as *const ()
}
}
pub fn swap_buffers(&self) {
unsafe {
ffi::egl::SwapBuffers(self.display, self.surface);
}
}
pub fn platform_display(&self) -> *mut libc::c_void {
self.display as *mut libc::c_void
}
pub fn platform_window(&self) -> *mut libc::c_void {
unimplemented!()
}
pub fn get_api(&self) -> ::Api {
::Api::OpenGlEs
}
pub fn set_window_resize_callback(&mut self, _: Option<fn(u32, u32)>) {
}
pub fn set_cursor(&self, _: MouseCursor) {
}
pub fn hidpi_factor(&self) -> f32 {
1.0
}
pub fn set_cursor_position(&self, x: i32, y: i32) -> Result<(), ()> {
unimplemented!();
}
}
unsafe impl Send for Window {}
unsafe impl Sync for Window {}
#[cfg(feature = "window")]
#[derive(Clone)]
pub struct WindowProxy;
impl WindowProxy {
pub fn wakeup_event_loop(&self) {
unimplemented!()
}
}
#[unsafe_destructor]
impl Drop for Window {
fn drop(&mut self) {
use std::ptr;
unsafe {
// we don't call MakeCurrent(0, 0) because we are not sure that the context
// is still the current one
android_glue::write_log("Destroying gl-init window");
ffi::egl::DestroySurface(self.display, self.surface);
ffi::egl::DestroyContext(self.display, self.context);
ffi::egl::Terminate(self.display);
}
}
}
| is_current | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.